Backup Report via Email using Power Shell Script.

 $ServerList = "C:\DBA\Servers.csv"

$OutputFile = "C:\DBA\Output.htm"

$emlist="sqldbanow@gmail.com"

$MailServer="smtp.sqldbanow.com"

 

$HTML = '<style type="text/css">

#Header{font-family:"Trebuchet MS", Arial, Helvetica, sans-serif;width:100%;border-collapse:collapse;}

#Header td, #Header th {font-size:14px;border:1px solid #98bf21;padding:3px 7px 2px 7px;}

#Header th {font-size:14px;text-align:left;padding-top:5px;padding-bottom:4px;background-color:#A7C942;color:#fff;}

#Header tr.alt td {color:#000;background-color:#EAF2D3;}

</Style>'

$HTML += "<HTML><BODY><Table border=1 cellpadding=0 cellspacing=0 width=100% id=Header>

  <TR>

   <TH><B>Database Name</B></TH>

   <TH><B>RecoveryModel</B></TD>

   <TH><B>Last Full Backup Date</B></TH>

   <TH><B>Last Differential Backup Date</B></TH>

   <TH><B>Last Log Backup Date</B></TH>

   </TR>"

  

  

[System.Reflection.Assembly]::LoadWithPartialName('Microsoft.SqlServer.SMO') | out-null

Import-CSV $ServerList |ForEach-Object {

$ServerName=$_.ServerName

$AppName=$_.ApplicationName

$HTML += "<TR bgColor='#ccff66'><TD colspan=8 align=center>$ServerName - $AppName</TD></TR>"

$SQLServer = New-Object ('Microsoft.SqlServer.Management.Smo.Server') $ServerName 

 Foreach($Database in $SQLServer.Databases)

{

$DaysSince = ((Get-Date) - $Database.LastBackupDate).Days

$DaysSinceDiff = ((Get-Date) - $Database.LastDifferentialBackupDate).Days

$DaysSinceLog = ((Get-Date) - $Database.LastLogBackupDate).Days

IF(($Database.Name) -ne 'tempdb' -and ($Database.Name) -ne 'model')

{

if ($Database.RecoveryModel -like "simple" )

{

if ($DaysSince -gt 1){

  $HTML += "<TR >

     <TD>$($Database.Name)</TD>

     <TD>$($Database.RecoveryModel)</TD>

     <TD bgcolor='RED'>$($Database.LastBackupDate)</TD>

     <TD>$($Database.LastDifferentialBackupDate)</TD>

     <TD>NA</TD>

     </TR>"

}

}

  if ($Database.RecoveryModel -like "full" )

{

if ($DaysSince -gt 1){

  $HTML += "<TR >

     <TD>$($Database.Name)</TD>

     <TD>$($Database.RecoveryModel)</TD>

     <TD bgcolor='RED'>$($Database.LastBackupDate)</TD>

     <TD>$($Database.LastDifferentialBackupDate)</TD>

     <TD>$($Database.LastLogBackupDate)</TD>

     </TR>"

}

}

if ($DaysSince -lt 1)

{

$HTML += "<TR >

     <TD>$($Database.Name)</TD>

     <TD>$($Database.RecoveryModel)</TD>

     <TD bgcolor='GREEN'>$($Database.LastBackupDate)</TD>

     <TD>$($Database.LastDifferentialBackupDate)</TD>

     <TD>$($Database.LastLogBackupDate)</TD>

     </TR>"

}

 }

}

}

 

$HTML += "</Table></BODY></HTML>"

$HTML | Out-File $OutputFile

 

Function sendEmail  

 

param($from,$to,$subject,$smtphost,$htmlFileName)  

 

$body = Get-Content $htmlFileName 

$body = New-Object System.Net.Mail.MailMessage $from, "$to", $subject, $body 

$body.isBodyhtml = $true

$smtpServer = $MailServer

$smtp = new-object Net.Mail.SmtpClient($smtpServer)

$smtp.Send($body)

 

}  

$date = ( get-date ).ToString('MM/dd/yyyy')

$emlist

sendEmail sqldbanow@gmail.com $emlist "SQLDBANOW Test Server Backup Report for - $Date" $MailServer $OutputFile



---------------------------------------------

-- You need to create one csv file with servers list as below mentioned screenshot C:\DBA\Servers.csv





Find largest tables. Usually, we can prioritized index design based on slow queries that could be referencing one or more largest tables.

 --Find largest tables.  Usually, we can prioritized index design based on slow queries that could be referencing one or more largest tables.

TOP 20 Largerst
"select top 10 schema_name(tab.schema_id) + '.' + tab.name as [table],
cast(sum(spc.used_pages * 8)/1024.00 as numeric(36, 2)) as used_mb,
cast(sum(spc.total_pages * 8)/1024.00 as numeric(36, 2)) as allocated_mb
from sys.tables tab
join sys.indexes ind
on tab.object_id = ind.object_id
join sys.partitions part
on ind.object_id = part.object_id and ind.index_id = part.index_id
join sys.allocation_units spc
on part.partition_id = spc.container_id
group by schema_name(tab.schema_id) + '.' + tab.name
order by sum(spc.used_pages) desc;"



Review bad indexes


 -- Review bad indexes


--Possible Bad NC Indexes (writes > reads)  


 -- Taking into connsideration the complete workload, and how long your instance has been running


 -- Advice not to drop or disable  indexes unless the is 100% certainty that it is no loger substantially benefitial


   SELECT OBJECT_NAME(s.[object_id]) AS [Table Name], i.name AS [Index Name], i.index_id, 


   is_disabled, i.is_hypothetical, i.has_filter, i.fill_factor,


   user_updates AS [Total Writes], user_seeks + user_scans + user_lookups AS [Total Reads],


   user_updates - (user_seeks + user_scans + user_lookups) AS [Difference]


   FROM sys.dm_db_index_usage_stats AS s WITH (NOLOCK)


   INNER JOIN sys.indexes AS i WITH (NOLOCK)


   ON s.[object_id] = i.[object_id]


   AND i.index_id = s.index_id


  WHERE OBJECTPROPERTY(s.[object_id],'IsUserTable') = 1


  AND s.database_id = DB_ID()


  AND user_updates > (user_seeks + user_scans + user_lookups)


  AND i.index_id > 1


  ORDER BY [Difference] DESC, [Total Writes] DESC, [Total Reads] ASC OPTION (RECOMPILE);


 


 

Improve the index design-- Review missing indexes


--  Review missing indexes


--Missing Index Query


SELECT TOP 25 dm_mid.database_id AS DatabaseID, dm_migs.avg_user_impact*(dm_migs.user_seeks+dm_migs.user_scans) Avg_Estimated_Impact, dm_migs.last_user_seek AS Last_User_Seek, OBJECT_NAME(dm_mid.OBJECT_ID,dm_mid.database_id) AS [TableName], 'CREATE INDEX [IX_' + OBJECT_NAME(dm_mid.OBJECT_ID,dm_mid.database_id) + '_' + REPLACE(REPLACE(REPLACE(ISNULL(dm_mid.equality_columns,''),', ','_'),'[',''),']','') + CASE WHEN dm_mid.equality_columns IS NOT NULL AND dm_mid.inequality_columns IS NOT NULL THEN '_' ELSE '' END + REPLACE(REPLACE(REPLACE(ISNULL(dm_mid.inequality_columns,''),', ','_'),'[',''),']','') + ']' + ' ON ' + dm_mid.statement + ' (' + ISNULL (dm_mid.equality_columns,'') + CASE WHEN dm_mid.equality_columns IS NOT NULL AND dm_mid.inequality_columns IS NOT NULL THEN ',' ELSE '' END + ISNULL (dm_mid.inequality_columns, '') + ')' + ISNULL (' INCLUDE (' + dm_mid.included_columns + ')', '') AS Create_Statement FROM sys.dm_db_missing_index_groups dm_mig INNER JOIN sys.dm_db_missing_index_group_stats dm_migs ON dm_migs.group_handle = dm_mig.index_group_handle INNER JOIN sys.dm_db_missing_index_details dm_mid ON dm_mig.index_handle = dm_mid.index_handle WHERE dm_mid.database_ID = DB_ID() ORDER BY Avg_Estimated_Impact DESC

Tune inefficient queries in SQL Server

 --Tune inefficient queries in SQL Server

SELECT TOP 50
[Avg. MultiCore/CPU time(sec)] = qs.total_worker_time / 1000000 / qs.execution_count,
[Total MultiCore/CPU time(sec)] = qs.total_worker_time / 1000000,
[Avg. Elapsed Time(sec)] = qs.total_elapsed_time / 1000000 / qs.execution_count,
[Total Elapsed Time(sec)] = qs.total_elapsed_time / 1000000,
qs.execution_count,
[Avg. I/O] = (total_logical_reads + total_logical_writes) / qs.execution_count,
[Total I/O] = total_logical_reads + total_logical_writes,
Query = SUBSTRING(qt.[text], (qs.statement_start_offset / 2) + 1,
(
(
CASE qs.statement_end_offset
WHEN -1 THEN DATALENGTH(qt.[text])
ELSE qs.statement_end_offset
END - qs.statement_start_offset
) / 2
) + 1
),
Batch = qt.[text],
[DB] = DB_NAME(qt.[dbid]),
qs.last_execution_time,
qp.query_plan
FROM sys.dm_exec_query_stats AS qs
CROSS APPLY sys.dm_exec_sql_text(qs.[sql_handle]) AS qt
CROSS APPLY sys.dm_exec_query_plan(qs.plan_handle) AS qp
where qs.execution_count > 5 --more than 5 occurences
ORDER BY [Total MultiCore/CPU time(sec)] DESC