tsql query- consuming large space in tempdb - sql-server

How to find which query is consuming more space in temp db in SQL 2000 version. I do not want to use SQL profiler as there is not much space on disk.
Issue is its 2000 version. No information can be pulled using dmv's.
How to track that between 1-2 which query and on which database is making tempdb size to grow as much that notifications are coming that could allocate page under tempdb, it is full.

You can create a proc out of the script below which can be executed by a job. More or less this is a template and can be altered however you see fit.
EDIT: Added additional comments below.
--Create temp table for sysprocesses records
IF OBJECT_ID('tempdb.dbo.#SYSPROC') IS NOT NULL
DROP TABLE #SYSPROC;
BEGIN
CREATE TABLE #SYSPROC
(
spid smallint NOT NULL,
dbid smallint NOT NULL,
blocked smallint NOT NULL,
lastwaittype nchar(32) NOT NULL,
cpu int NOT NULL,
physical_io int NOT NULL,
memusage int NOT NULL,
login_time datetime NOT NULL,
Last_batch datetime NOT NULL,
status nchar(30) NOT NULL,
cmd nchar(16) NOT NULL,
loginame nchar(128) NOT NULL,
sql_handle binary(20) NOT NULL,
sh_text text NULL,
snapshot_dt datetime NULL
)
END;
--Insert sysprocesses records into temp #SYSPROC
INSERT INTO #SYSPROC
(
spid,
dbid,
blocked,
lastwaittype,
cpu,
physical_io,
memusage,
login_time,
Last_batch,
status,
cmd,
loginame,
sql_handle,
snapshot_dt
)
SELECT
sp.spid,
sp.dbid,
sp.blocked,
sp.lastwaittype,
sp.cpu,
sp.physical_io,
sp.memusage,
sp.login_time,
sp.Last_batch,
sp.status,
sp.cmd,
sp.loginame,
sp.sql_handle,
GETDATE()
FROM master.dbo.sysprocesses sp
WHERE sp.spid > 50;
/*
Update temp #SYSPROC with sql text. Since CROSS APPLY is not
available in SQL Server 2000, a loop is required to interate
through each record.
*/
DECLARE #min_spid smallint = (SELECT MIN(spid) FROM #SYSPROC);
DECLARE #max_spid smallint = (SELECT MAX(spid) FROM #SYSPROC);
DECLARE #sql_handle binary(20);
DECLARE #sql_text varchar(max);
WHILE #min_spid <= #max_spid
BEGIN
--Set #sql_handle variable to be evaluated by the fn_get_sql function
SELECT
#sql_handle = S.sql_handle
FROM #SYSPROC S
WHERE spid = #min_spid
--Identify the sql_text for the session by passing the #sql_handle variablet through fn_get_sql
SELECT
#sql_text = H.text
FROM fn_get_sql(#sql_handle) H
--Update #SYSPROC with the sql_text.
UPDATE S
SET sh_text = #sql_text
FROM #SYSPROC S
WHERE spid = #min_spid
SET #min_spid = #min_spid + 1
END;
INSERT INTO <SOMETABLE YOU CREATE>
(
spid,
dbid,
blocked,
lastwaittype,
cpu,
physical_io,
memusage,
login_time,
Last_batch,
status,
cmd,
loginame,
sql_handle,
snapshot_dt
)
SELECT
spid,
dbid,
blocked,
lastwaittype,
cpu,
physical_io,
memusage,
login_time,
Last_batch,
status,
cmd,
loginame,
sql_handle,
snapshot_dt
FROM #SYSPROC
Hope this helps!

Related

Resync target servers in SQL Server

I am working in a SQL Server environment with one master server and many target servers. It sometimes happens that for a reason or another one a target server may go out of sync.
I have the choice when that occurs to manually run the following stored procedure to re-sync the target server:
exec sp_resync_targetserver #server_name = 'RMAPP11DV1\PROJECT'
My assignment is to automate the process so that we do not have to manually run it. I should write a script and schedule it as a job that should run a a schedule time to selectively find and re-sync only the target servers that are currently out of sync.
This is my approach so far. It is not working as expected (can not do the re-sync when ran), that is why I need any one output. Thanks in advance:
use msdb
set nocount on;
if exists (select * from tempdb.sys.all_objects where name like '%#targetstatus%') --deleting the table if it already exists
drop table #targetstatus
create table #targetstatus
(
server_id int not null,
server_name nvarchar(300) not null,
location nvarchar(350) null,
time_zone_adjustment int not null,
enlist_date datetime not null,
last_poll_date datetime not null,
status int not null,
unread_instructions int not null,
local_time datetime not null,
enlisted_by_nt_user nvarchar(100) not null,
poll_interval int not null
)
insert into #targetstatus
exec sp_help_targetserver
select * from #targetstatus
if exists (select * from tempdb.sys.all_objects where name like '%#needresync%') --deleting the table if it already exists
drop table #needresync
create table #needresync -- will hold the target servers needing to be re-synced
(
server_id int not null,
server_name nvarchar(300) not null,
location nvarchar(350) null,
time_zone_adjustment int not null,
enlist_date datetime not null,
last_poll_date datetime not null,
status int not null,
unread_instructions int not null,
local_time datetime not null,
enlisted_by_nt_user nvarchar(100) not null,
poll_interval int not null
)
insert into #needresync
select *
from #targetstatus
where status <> 1 -- we only want to run the syncing proc on the target with a status diff of #1
select * from #needresync
declare #target_server varchar(100);
set #target_server = ' '
while #target_server <> ' '
begin
set #target_server = (select max(server_name) from #needresync);
exec msdb.dbo.sp_resync_targetserver #server_name = '#target_server';
-- #target_server = #target_server + 1
end
You are not deleting the row out of #needresync. You must delete each row one by one inside the while loop.
However, a much easier method exists. You can use the systargetservers DMV without using any temp tables at all:
DECLARE #server sysname =
(SELECT TOP 1 FROM dbo.systargetservers WHERE status = 2); -- 2 = Re-sync Pending
WHILE (#server IS NOT NULL)
BEGIN
EXEC sp_resync_targetserver #server;
SET #server =
(SELECT TOP 1 FROM dbo.systargetservers WHERE status = 2);
END;

SQL agent job failing at a step despite error handling

My SQL Server instance has an agent job called Grand Master that runs to a schedule every minute, 24/7.
I have created another job that needs to be run manually from time to time. One of the first things it needs to do is disable and stop the Grand Master job from running while it is active.
Step 1 is to disable the GM, which works fine:
exec msdb..sp_update_job #job_name = "Grand Master", #Enabled = 0
Step 2, however fails. Its job it to stop the GM from running IF it is running. It is not supposed to do anything if the GM is not currently running:
if exists (select 1
from msdb.dbo.sysjobs_view j
join msdb.dbo.sysjobactivity a on j.job_id = a.job_id
where a.run_requested_date is not null
and a.stop_execution_date is null
and j.name = 'Grand Master')
begin
exec msdb.dbo.sp_stop_job 'Grand Master'
end
Every time I run this job, regardless of the state of the GM, it fails on step 2 with this error:
Executed as user: NT AUTHORITY\SYSTEM. SQLServerAgent Error: Request to stop job Grand Master (from User NT AUTHORITY\SYSTEM) refused because the job is not currently running. [SQLSTATE 42000] (Error 22022). The step failed.
Does anyone have any ideas?
First stop it if it's running and then disable the job. SQL Server might misinterprete the idea to stop a disabled job...
If you have a disordered working environment, the above query may show different results than Job Activity Monitor (imho, GUI info is more reliable). You can use the next procedure to check if a job is in "Executing" state. The procedure is provided at your own responsibility.
/*
procedure result: 0=Not idle or suspended, 1=Executing, 2=WaitingForThread, 3=BetweenRetries, 4=Idle, 5=Suspended, [6=WaitingForStepToFinish], 7=PerformingCompletionActions
*/
CREATE PROCEDURE [dbo].[sp_get_job_state] (
#job_name VARCHAR(100)
, #job_state SMALLINT OUTPUT
)
AS
BEGIN
DECLARE #job_id UNIQUEIDENTIFIER
, #can_see_all_running_jobs INT = 1
, #job_owner SYSNAME = SUSER_SNAME()
, #res SMALLINT;
DECLARE #xp_results TABLE (
job_id UNIQUEIDENTIFIER NOT NULL
, last_run_date INT NOT NULL
, last_run_time INT NOT NULL
, next_run_date INT NOT NULL
, next_run_time INT NOT NULL
, next_run_schedule_id INT NOT NULL
, requested_to_run INT NOT NULL
, -- BOOL
request_source INT NOT NULL
, request_source_id SYSNAME COLLATE database_default NULL
, running INT NOT NULL
, -- BOOL
current_step INT NOT NULL
, current_retry_attempt INT NOT NULL
, job_state INT NOT NULL
);
SELECT #job_id = job_id
FROM msdb..sysjobs
WHERE name = #job_name;
INSERT INTO #xp_results (
job_id
, last_run_date
, last_run_time
, next_run_date
, next_run_time
, next_run_schedule_id
, requested_to_run
, request_source
, request_source_id
, running
, current_step
, current_retry_attempt
, job_state
)
EXECUTE master.dbo.xp_sqlagent_enum_jobs #can_see_all_running_jobs = #can_see_all_running_jobs
, #job_owner = #job_owner
, #job_id = #job_id;
SELECT #job_state = job_state
FROM #xp_results
END

Resume a WHILE loop from where it stopped SQL

I have a while loop query that I only want to run until 11PM everyday - I'm aware this can be achieved with a WAITFOR statement, and then just END the query.
However, on the following day, once I re-run my query, I want it to continue from where it stopped on the last run. So I'm thinking of creating a log table that will contain the last processed ID.
How can I achieve this?
DECLARE #MAX_Value BIGINT = ( SELECT MAX(ID) FROM dbo.TableA )
DECLARE #MIN_Value BIGINT = ( SELECT MIN(ID) FROM dbo.TableA )
WHILE (#MIN_Value < #MAX_Value )
BEGIN
INSERT INTO dbo.MyResults
/* Do some processing*/
….
….
….
SET #MIN_Value = MIN_Value + 1
/*I only want the above processing to run until 11PM*/
/* Once it’s 11PM, I want to save the last used #MIN_Value
into my LoggingTable (dbo.Logging) and kill the above processing.*/
/* Once I re-run the query I want my processing to restart from the
above #MIN_Value which is recorded in dbo.Logging */
END
Disclaimer: I do not recommend using WHILE loops in SQL Server but considering the comment that you want a solution in SQL, here you go:
-- First of all, I strongly recommend using a different way of assigning variable values to avoid scenarios with the variable being NULL when the table is empty, also you can do it in a single select.
-- Also, if something started running at 10:59:59 it will let the processing for the value finish and will not simply rollback at 11.
CREATE TABLE dbo.ProcessingValueLog (
LogEntryId BIGINT IDENTITY(1,1) NOT NULL,
LastUsedValue BIGINT NOT NULL,
LastUsedDateTime DATETIME NOT NULL DEFAULT(GETDATE()),
CompletedProcessing BIT NOT NULL DEFAULT(0)
)
DECLARE #MAX_Value BIGINT = 0;
DECLARE #MIN_Value BIGINT = 0;
SELECT
#MIN_Value = MIN(ID),
#MAX_Value = MAX(ID)
FROM
dbo.TableA
SELECT TOP 1
#MIN_Value = LastUsedValue
FROM
dbo.ProcessingValueLog
WHERE
CompletedProcessing = 1
ORDER BY
LastUsedDateTime DESC
DECLARE #CurrentHour TINYINT = HOUR(GETDATE());
DECLARE #LogEntryID BIGINT;
WHILE (#MIN_Value < #MAX_Value AND #CurrentHour < 23)
BEGIN
INSERT INTO dbo.ProcessingValueLog (LastUsedValue)
VALUE(#MIN_Value)
SELECT #LogEntryID = SCOPE_IDENTITY()
// Do some processing...
SET #MIN_Value = #MIN_Value + 1;
UPDATE dbo.ProcessingValueLog
SET CompletedProcessing = 1
WHERE LogEntryId = #LogEntryID
SET #CurrentHour = HOUR(GETDATE())
END

How to optimize this query for inserting/updating millions of records in SQL Server

I have 4 tables in SQL Server: AspNetUsers, CustomerFile, CustomerOption and LastPullRecords. Application uploads the customer records from an Excel file. This Excel file is converted into DataTable and then for each row of the DataTable this stored procedure is called.
There is a trigger applied on the CustomerFile table. In this stored procedure first we check if FirstName, LastName, StreetAddress, City, State and Zip are not changed, then update only officer details other wise update all details, set the action to update ('U') and that will send the record to third party next day. Second, if that customer is not present then add it and set the action as add ('A'). After that if available then we update two other tables based on customer records.
ALTER PROC [dbo].[InsertUpdateRecords]
(
#FullName NVARCHAR(50) =NULL,
#FirstName NVARCHAR(50) =NULL,
#LastName NVARCHAR(50) =NULL,
#StreetAddress NVARCHAR(50) =NULL,
#City NVARCHAR(50) =NULL,
#State NVARCHAR(50) =NULL,
#Zip INT =NULL,
#SSN NVARCHAR(50) =NULL,
#Email NVARCHAR(150) =NULL,
#OfficerEmail NVARCHAR(50) =NULL,
#OfficerId NVARCHAR(50)=NULL,
#OfficerName NVARCHAR(50) =NULL,
#Option NVARCHAR(50) =NULL,
#DownloadedFromFTP BIT =NULL,
#LastPullDate DATETIME=NULL
)
AS
BEGIN
DECLARE #IsActive BIT
DECLARE #FileID INT
DECLARE #CompanyId INT
SET #IsActive=1
--Get Company ID based on OfficerID
Select #CompanyId=CompanyId from AspNetUsers where Email=#OfficerEmail
select top (1) #FileID=cf.fileId from CustomerFile cf inner join AspNetUsers usr on usr.Id=cf.OfficerId where cf.SSN = #SSN and usr.CompanyId=#CompanyId order by cf.FileReceivedDate, cf.FileId desc
if ((#FileID<>'') or(#FileID is not null))
Begin
-- COMPARE IF ONLY OFFICER IS CHANGED
If EXISTS(select 1 from CustomerFile where FirstName=#FirstName and LastName=#LastName and StreetAddress=#StreetAddress and City=#City and State=#State and Zip=#Zip
and FileId=#FileID
)
BEGIN
UPDATE top (1) CustomerFile SET OfficerEmail=#OfficerEmail,
OfficerName=#OfficerName,Email=#Email,
----FileModifiedDate=GETDATE(),
DownloadedFromFTP=#DownloadedFromFTP,IsActive=#IsActive,OfficerId=#OfficerId
WHERE FileId=#FileID
END
Else
BEGIN
Update top (1) CustomerFile set FullName=#FullName, FirstName=#FirstName, LastName=#LastName, StreetAddress=#StreetAddress, City=#City,State=#State,Zip=#Zip,
OfficerEmail=#OfficerEmail,OfficerName=#OfficerName,Email=#Email,
--FileReceivedDate=GETDATE(),
FileModifiedDate=GETDATE(),DownloadedFromFTP=#DownloadedFromFTP,IsActive=#IsActive,Action='U'
where FileId=#FileID
END
End
Else
BEGIN
declare #IdentityOutput table ( ID int )
INSERT INTO CustomerFile(FullName,FirstName,LastName,StreetAddress,City,State,Zip,SSN,OfficerEmail,OfficerId,OfficerName,
FileReceivedDate,DownloadedFromFTP,IsActive,Action,Email
)
output inserted.FileId into #IdentityOutput
VALUES(#FullName,#FirstName,#LastName,#StreetAddress,#City,
#State,#Zip,#SSN,#OfficerEmail,#OfficerId,#OfficerName,
GETDATE(),#DownloadedFromFTP,#IsActive,'A',#Email)
select #FileID = (select ID from #IdentityOutput)
END
---------------------------------------------------------------------------
-- Set Option
---------------------------------------------------------------------------
if ((#Option<>'') or(#Option is not null))
Begin
if exists(select 1 from CustomerOption where CustomerFileID=#FileID)
Begin
Update CustomerOption Set Option=#Option where CustomerFileID=#FileID
End
else
Begin
Insert into CustomerOption (CustomerFileID, Option) values (#FileID, #Option)
End
End
---------------------------------------------------------------------------
-- Insert Last Pull if exist
---------------------------------------------------------------------------
if ((#LastPullDate<>'') or(#LastPullDate is not null) or CONVERT(varchar(10),#LastPullDate,101)!='01/01/1900')
Begin
if((#FileID<>'') OR (#FileID<>0))
Begin
if exists (Select * from LastPullRecords where CustomerId=#FileID and CompanyId=#CompanyId)
bEGIN
Update LastPullRecords
set LastPullDate=#LastPullDate,
IsSelfPull=1,
ModifiedDateTime=getdate()
where CustomerId=#FileID and CompanyId=#CompanyId
End
ELSE
Begin
iNSERT INTO LastPullRecords
(CompanyId,CustomerId,LastPullDate,IsSelfPull,IsRTS,CreatedDateTime)
values
(#CompanyId,#FileID,#LastPullDate,1,0,getdate())
End
End
end
END
Problem is that there could be thousands of records and this query will take very long time to upload all those records. For testing i uploaded only 10K records and it took 13 minutes.
I tried to send the DataTable as parameter, defining custom User Table Type for the datatable, using while loop , then cursor but all these experiments are not making any difference.
Please suggest any optimized way to upload these records so it take less time.
I think you can choose from two options, if you want to keep using .NET for this task:
use SqlBulkCopy to copy data into intermediate tables (may be you can write insert without intermediate table), then write one command to insert all new data and second command to update all changed data.
use Table-Valued Parameters instead of SqlBulkCopy and intermediate tables (if your MSSQL not less than 2012)
Note, that SqlBulkCopy can be slower than Table-Valued Parameters for small data sets, but faster for big data sets.

Do stored procedures lock tables/rows?

Quite a simple question. In SQL 2008 if I have a stored procedure (see below) do I run the risk of a race condition between the first two statements or does the stored procedure put a lock on the things it touches like transactions do?
ALTER PROCEDURE [dbo].[usp_SetAssignedTo]
-- Add the parameters for the stored procedure here
#Server varchar(50),
#User varchar(50),
#UserPool varchar(50)
AS
BEGIN
SET NOCOUNT ON;
Declare #ServerUser varchar(50)
-- Find a Free record
SELECT top 1 #ServerUser = UserName
from ServerLoginUsers
where AssignedTo is null and [TsServer] = #Server
--Set the free record to the user
Update ServerLoginUsers
set AssignedTo = #User, AssignedToDate = getdate(), SourcePool = #UserPool
where [TsServer] = #Server and UserName = #ServerUser
--report record back if it was updated. Null if it was not available.
select *
from ServerLoginUsers
where [TsServer] = #Server
and UserName = #ServerUser
and AssignedTo = #User
END
You could get a race condition.
It can be done in one statement:
You can assign in an UPDATE
The lock hints allow another process to skip this row
The OUTPUT clause returns data to the caller
Try this... (edit: holdlock removed)
Update TOP (1) ServerLoginUsers WITH (ROWLOCK, READPAST)
OUTPUT INSERTED.*
SET
AssignedTo = #User, AssignedToDate = getdate(), SourcePool = #UserPool
WHERE
AssignedTo is null and [TsServer] = #Server -- not needed -> and UserName = #ServerUser
If not, you may need a separate select
Update TOP (1) ServerLoginUsers WITH (ROWLOCK, READPAST)
SET
-- yes, assign in an update
#ServerUser = UserName,
-- write
AssignedTo = #User, AssignedToDate = getdate(), SourcePool = #UserPool
OUTPUT INSERTED.*
WHERE
AssignedTo is null and [TsServer] = #Server -- not needed -> and UserName = #ServerUser
SELECT ...
See this please for more: SQL Server Process Queue Race Condition

Resources