Wednesday, September 14, 2016

Script to Check is a Schema Exists and Create New

USE [master]
GO

IF NOT EXISTS(SELECT schema_id FROM sys.schemas where name = 'DBA')
BEGIN
DECLARE @SQLcmd NVARCHAR(50) = 'CREATE SCHEMA [DBA] AUTHORIZATION [db_owner]';
EXEC sp_executesql @SQLcmd;
END

Friday, September 9, 2016

Return a List of All Dabases with their Storage Amounts


USE [master]
GO

IF NOT EXISTS(SELECT schema_id FROM sys.schemas WHERE name = 'DBA')
BEGIN 
DECLARE @SQLcmd NVARCHAR(50) = 'CREATE SCHEMA [DBA] AUTHORIZATION [db_owner]';
EXEC sp_executesql @SQLcmd;
END 

IF NOT EXISTS(SELECT object_id FROM sys.tables WHERE name ='tDataDiskUsageHistory')
BEGIN
CREATE TABLE DBA.tDataDiskUsageHistory
(
DatabaseName nvarchar(50) NOT NULL,
DateTested date NOT NULL,
TimeTested time(7) NOT NULL,
ReservedDbSpace decimal(12, 2) NULL,
UsedDbSpace decimal(12, 2) NULL,
FreeSpacePercent decimal(12, 2) NULL,
LogSpaceUsed decimal(12, 2) NULL
)  ON [PRIMARY];

ALTER TABLE DBA.tDataDiskUsageHistory ADD CONSTRAINT
PK_tDataDiskUsageHistory PRIMARY KEY CLUSTERED
(
DatabaseName,
DateTested,
TimeTested
) WITH( STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ALLOW_ROW_LOCKS = ON, ALLOW_PAGE_LOCKS = ON) ON [PRIMARY];

END
GO

USE [master]
GO

CREATE PROCEDURE DBA.DataDiskUsage_Record_AllDatabases
(@DaysToRetain int = 0)
AS

DECLARE @loop INT;
DECLARE @count INT;
DECLARE @db_name NVARCHAR(50);
DECLARE @exec_string NVARCHAR(1000);
DECLARE @id INT;
DECLARE @free_space INT;

--Create tabel to hold database list
DECLARE @current_databases TABLE
(
 id INT IDENTITY(1,1)
 , name nvarchar(50)
);

--create a table for holding all database free space
DECLARE @space_used TABLE
(
id INT IDENTITY(1,1)
, database_name NVARCHAR(50)
, reserved_db_space DECIMAL(12,2)
, used_db_space DECIMAL(12,2)
, free_space_percent   DECIMAL(5,3)
, log_space_used DECIMAL(12,2)
);

--get the names of all user databases
INSERT INTO @current_databases
SELECT name
FROM sys.databases
WHERE database_id &gt 4 --eliminate system databases
AND snapshot_isolation_state = 0; --eliminate snapshots

SET @loop = 1;
SELECT @count = MAX(id) FROM @current_databases;

--loop through each database and get the data file free space    
WHILE @loop <= @count
 BEGIN

 --get our working db
 SELECT @db_name = name
 FROM @current_databases
 WHERE id = @loop;

 SET @exec_string = 'USE ' + @db_name + ';

  DECLARE @DataSpaceReserved DECIMAL(12,2) = 0.0;
  DECLARE @DataSpaceUsed DECIMAL(12,2) = 0.0;
  DECLARE @LogSpaceUsed DECIMAL(12,2) = 0.0;

  SELECT
  @LogSpaceUsed = SUM(f.size/128.0)
  FROM sys.sysfiles f
  WHERE groupid = 0;-- log files

  SELECT
  @DataSpaceReserved = SUM(f.size/128.0)
  ,  @DataSpaceUsed = SUM(CAST(FILEPROPERTY(f.name,  ''spaceused'') AS int)/128.0)
  FROM sys.sysfiles f
  WHERE groupid != 0;-- data files

  SELECT
  ''' + @db_name + ''' as database_name
  , @DataSpaceReserved
  , @DataSpaceUsed
  , 100 * (@DataSpaceReserved - @DataSpaceUsed)/@DataSpaceReserved
  , @LogSpaceUsed';

print @exec_string;

 --pull our space data back and insert into our holding table
 INSERT @space_used
 EXECUTE (@exec_string);

 --next please            
 SET @loop = @loop + 1

END


INSERT INTO [DBA].[tDataDiskUsageHistory]
           ([DatabaseName]
           ,[DateTested]
           ,[TimeTested]
           ,[ReservedDbSpace]
           ,[UsedDbSpace]
           ,[FreeSpacePercent]
           ,[LogSpaceUsed])
SELECT
database_name
,convert (date ,getdate()) AS QueryDate
,convert (time ,getdate()) AS QueryTime
, reserved_db_space
, used_db_space
, free_space_percent
, log_space_used
FROM @space_used;

IF @DaysToRetain > 0
BEGIN
DELETE FROM DBA.tDataDiskUsageHistory WHERE DATEDIFF(d, DateTested, GETDATE()) > @DaysToRetain
END

Thursday, August 25, 2016

Index Usage Query Using DMV's

SELECT
obj.name AS TableName
, idx.name AS IndexName
, ius.index_id
, ius.user_seeks
, ius.user_scans
, ius.user_lookups
, ius.user_updates
,  ius.user_seeks + ius.user_scans + ius.user_lookups + ius.user_updates AS TotalUserUsages
, last_user_seek
, last_user_scan
, last_user_lookup
, last_user_update
, idx.fill_factor
, idx.is_padded
, idx.is_primary_key
, idx.is_disabled
FROM sys.dm_db_index_usage_stats ius
INNER JOIN sys.objects obj
ON ius.object_id  = obj.object_id 
INNER JOIN sys.indexes idx
ON ius.object_id  = idx.object_id 
AND ius.index_id = idx.index_id
--WHERE obj.name = '[table name]'
--This will find indexes without user usage
WHERE ius.user_seeks + ius.user_scans + ius.user_lookups + ius.user_updates = 0

ORDER BY is_disabled, index_id

Wednesday, August 24, 2016

Defining an Index for the Rest of the World

Picture a data as just a pile of information like all the chapters of a book on the floor in a pile.

Indexes are like the indexes of a book

  1. One usually puts the book in the order the author intended (Primary Key)
  2. Several other might order specific things like give me a way to find the first page a character appears on or all the fight scenes. (indexes)

Each of these allow you to either get a collection of pages or a specific value without having to start at page one and make a list of the values you want. For example, I want to know when Chad is first mentioned in a book. Without the index I have to start at page one of the book and read it until I find chad even if he is only on the last page. With the index of characters I would call up a list of the characters by name and it would tell me the page number. As you can see the lookup took a lot less time and fewer resources.


That is the good. In life nothing is free. For the index to be any good it has to be maintained when data changes especially inserts and deletes. Every index slows the time for these down. The key then is to only add indexes that have value to what the users are doing. Therefore we look at how often it is used before making a choice. For example, if you are only going to look for Chad once when you read the book the first time there is no value in making and maintaining a list of character entry points so you wouldn’t waste time with it.

Wednesday, June 1, 2016

Query to review job status

Queries all enabled agent jobs and returns either jobs not in success status or jobs where the run time is greater than the run time of the last X runs + the standard deviation of the last X runs. The default is 10 but is set in the opening @TargetDate statement.This will reduce the number of jobs that need interrogating.


DECLARE @TargetDate DATE = DATEADD(d, -10, GETDATE());
--PRINT @TargetDate;
--PRINT FORMAT(MONTH(@TargetDate), 'd2');
DECLARE @RunTo INT = CAST(CAST(YEAR(@TargetDate) AS NCHAR(4)) + FORMAT(MONTH(@TargetDate), 'd2') + FORMAT(DAY(@TargetDate), 'd2') AS INT);

DECLARE @tJobHistory TABLE 
(job_id uniqueidentifier
, avgRunDurationLast10 decimal(10,2)
, stdRunDurationLast10 decimal(10,2)
);

WITH cteLast10DaysJobs AS
(
SELECT
job_id
, run_duration
FROM msdb..sysjobhistory
WHERE step_id = 0
AND run_status = 1
AND run_date >= @RunTo
)
INSERT INTO @tJobHistory
SELECT
job_id
,AVG(CAST (run_duration as float))as average_run_duration
,COALESCE(STDEV(CAST (run_duration as float)), 0) as stddev_run_duration
FROM cteLast10DaysJobs
--WHERE  job_id = '0AF6CA6E-E573-4FB8-B065-EE9994F3898D'
GROUP BY job_id

SELECT
sj.name AS Job_Name
,sjh.[message] AS Job_Message
, CASE sjh.run_status
WHEN 0 THEN 'Failed'
WHEN 1 THEN 'Success'
        WHEN 2 THEN 'Retry'
WHEN 3 THEN 'Cancelled'
END AS Job_Status
,sjh.run_date AS Last_Run_Date
,sjh.run_time AS Job_Last_Run_Time
,sjh.run_duration AS Total_Run_Time
,jh.avgRunDurationLast10 AS Avg_Run_Time_Last_10_Runs
, jh.stdRunDurationLast10 AS  Stddev_Run_Time_Last_10_Runs
, sjh.run_duration - jh.avgRunDurationLast10 - jh.stdRunDurationLast10 AS Run_Time_Greater_Then_Dev
FROM msdb..sysjobs sj
INNER join @tJobHistory jh
ON sj.job_id = jh.job_id
INNER join msdb..sysjobhistory sjh
ON sj.job_id = sjh.job_id
WHERE sj.enabled = 1
AND sjh.step_id = 0
AND (sjh.run_duration - round(jh.avgRunDurationLast10,2) > jh.stdRunDurationLast10  OR run_status != 1)
AND instance_id in (SELECT MAX(instance_id) FROM msdb..sysjobhistory GROUP BY by job_id)
ORDER BY
run_date desc
, sjh.run_duration - jh.avgRunDurationLast10 - jh.stdRunDurationLast10 DESC;

Friday, May 6, 2016

SQL Jobs change job step proxy

The purpose of this script is to set a proxy on each job step to allow the owner of the job to not have access to the target database. This applies when a separate server is used for ETL and data storage. This also has a strong effect on the performance of the database server as significantly less memory has to be allocated to the Integration Services service.

This assume you can segregate the jobs by job category to make sure only the targeted step are changed. This could also be done easily by job owner.


USE MSDB
GO

SELECTFROM sysproxies;
--Use this to restrict the jobs to only the categories you want to change
DECLARE @categoryMatch nvarchar(30) = 'test_%';
--Set the proxy value to the values desired in the above query
DECLARE @proxy_value nvarchar(10) = 2;
DECLARE @CurJobId nvarchar(50);
DECLARE @curStepId nvarchar(50);

SELECT sj.job_id, sjs.step_id, proxy_id
FROM sysjobs sj
INNER JOIN sysjobsteps sjs
ON sj.job_id = sjs.job_id
WHERE category_id IN ( select category_id FROM syscategories WHERE  name like @categoryMatch)
AND (sjs.proxy_id != @proxy_value OR proxy_id IS NULL);

DECLARE db_cursor CURSOR FOR
SELECT sj.job_id, sjs.step_id
FROM sysjobs sj
INNER JOIN  sysjobsteps sjs
ON sj.job_id = sjs.job_id
WHERE  category_id IN (SELECT category_id FROM syscategories WHERE  name like @categoryMatch)
AND (sjs.proxy_id != @proxy_value OR proxy_id IS NULL);


OPEN db_cursor
FETCH NEXT FROM db_cursor INTO @CurJobId, @curStepId

WHILE @@FETCH_STATUS = 0
BEGIN
PRINT 'Job_ID:  ' + @curJobId + '     Job Step:  ' + @curStepId;
EXEC dbo.sp_update_jobstep
@job_id = @curJobId
,@step_id = @curStepId
,@proxy_id = @proxy_value;

    FETCH NEXT FROM db_cursor INTO @CurJobId, @curStepId
END

CLOSE db_cursor
DEALLOCATE db_cursor

Friday, April 29, 2016

Best update database owner tools

In my never ending quest to automate the setting of DB owners here is the best script yet. It assumes there is a user called [Domain User]/[Server name]Server running the server.

USE Master
GO

DECLARE @userName nvarchar(50);
DECLARE @userNamePostfix nvarchar(10) = 'Server'; --'Admin';

IF  CHARINDEX('\', @@SERVERNAME, 0) > 0
SET @userName = '[Domain User]\' +  SUBSTRING(@@SERVERNAME, 0, CHARINDEX('\', @@SERVERNAME, 0)) + @userNamePostfix ;
ELSE
SET @userName = '[Domain User]\' +  @@SERVERNAME + @userNamePostfix;

--DECLARE @userName nvarchar(50) = '[Domain User]\' +  SUBSTRING(@@SERVERNAME, 0, CHARINDEX('\', @@SERVERNAME, 0)) + 'Admin';
-- Use this section to override if the pattern does not work
--SET @userName = 'override User';

PRINT @userName;

--Require the user to exist and be a sysadmin
IF EXISTS(SELECT sid FROM sys.syslogins WHERE name = @userName AND sysAdmin = 1)
BEGIN

--Should be modified to ignore DBs which are not assigned to a defaulte value
DECLARE @prefix  nvarchar(200) = 'IF ''?'' NOT IN (SELECT name from sys.databases WHERE owner_sid = SUSER_ID(''' + @userName + ''') OR DB_NAME(database_id) IN (''master'', ''msdb'',''model'', ''tempdb'', ''distribution'')) BEGIN  ';
DECLARE @postfix  nvarchar(50) = ' END';
DECLARE @cmd  nvarchar(512);

SET @cmd =  @prefix + 'PRINT ''?''; USE ?  exec sp_changedbowner @loginame=''' + @userName + ''' ' + @postfix;
PRINT @cmd;
EXEC master.sys.sp_MSforeachdb  @command1 = @cmd;
PRINT 'Successfully set DB owner to ' + @userName;
END
ELSE
BEGIN
  PRINT @userName + ' NOT found';
END

Wednesday, April 13, 2016

Fix for Chrome in Reporting Services 2012 and 2008R2

Add the script below to the file:
In 2008R2
...\Program Files\Microsoft SQL Server\MSRS10_50.MSSQLSERVER\Reporting Services\ReportManager\Pages\Report.aspx

In 2012
...\Program Files\Microsoft SQL Server\MSRS12.MSSQLSERVER\Reporting Services\ReportManager\Pages\Report.aspx

Tuesday, March 22, 2016

Find the Number of Rows in Database Tables

Had a need to track empty tables in a DB to see if converted systems were correctly hooked up and wanted to add filters.Sorry I didn't keep the link to the original website.

SELECT
QUOTENAME(SCHEMA_NAME(sOBJ.schema_id)) + '.' + QUOTENAME(sOBJ.name) AS [TableName]
, SUM(sdmvPTNS.row_count) AS [RowCount]
FROM sys.objects AS sOBJ
INNER JOIN sys.dm_db_partition_stats AS sdmvPTNS
ON sOBJ.object_id = sdmvPTNS.object_id
WHERE 
      sOBJ.type = 'U'
      AND sOBJ.is_ms_shipped = 0x0
      AND sdmvPTNS.index_id < 2
--Find all by schema
-- AND SCHEMA_NAME(sOBJ.schema_id) = ''
GROUP BY
      sOBJ.schema_id
      , sOBJ.name
--Only find empty tables
--HAVING SUM(sdmvPTNS.row_count) = 0
ORDER BY [TableName]
, [RowCount];
GO

Monday, February 8, 2016

Add Log Percent Used Alert to all non System DBs

in my never ending quest to automate more monitoring I have added this to my pre-production deployment package. It assumes a Operator group of the name DBgroup has been created to transmit the alerts.
Also Database Mail must be set up on the agent.

USE [msdb]
GO

DECLARE @RemoveExisting as bit = 1; --Set to 1 to purge any existing

DECLARE db_cursor CURSOR FOR
SELECT name
FROM master.sys.databases
WHERE name NOT IN ('model','master', 'msdb','tempdb')

DECLARE @DBname nvarchar(max) = '';
DECLARE @AlertName nvarchar(max) = '';
DECLARE @AlertText nvarchar(max) = '';

OPEN db_cursor
FETCH NEXT FROM db_cursor INTO @DBname

WHILE @@FETCH_STATUS = 0
BEGIN
SET @AlertName = @DBname  + N'_LogPercentUsed';
SET @AlertText = N'Databases|Percent Log Used|' + @DBname + '|>|90';

IF EXISTS(select id from sysalerts where name = @AlertName) AND  @RemoveExisting = 1
EXEC msdb.dbo.sp_delete_alert @AlertName;

IF NOT EXISTS(select id from sysalerts where name = @AlertName)
BEGIN
EXEC msdb.dbo.sp_add_alert @name= @AlertName ,
@message_id=0,
@severity=0,
@enabled=1,
@delay_between_responses=1715,
@include_event_description_in=1,
@category_name=N'[Uncategorized]',
@performance_condition= @AlertText
, @job_id=N'00000000-0000-0000-0000-000000000000';

EXEC msdb.dbo.sp_add_notification @alert_name= @AlertName, @operator_name=N'DBGroup', @notification_method = 1;

END

  FETCH NEXT FROM db_cursor INTO @DBname
END

CLOSE db_cursor
DEALLOCATE db_cursor