Thursday, September 15, 2011

Split functions

1.
CREATE FUNCTION dbo.Split(@String varchar(8000), @Delimiter char(1))       
returns @temptable TABLE (items varchar(8000))       
as       
begin       
    declare @idx int       
    declare @slice varchar(8000)       
    select @idx = 1       
        if len(@String)<1 or @String is null  return       
    while @idx!= 0       
    begin       
        set @idx = charindex(@Delimiter,@String)       
        if @idx!=0       
            set @slice = left(@String,@idx - 1)       
        else       
            set @slice = @String       
        if(len(@slice)>0)  
            insert into @temptable(Items) values(@slice)       
        set @String = right(@String,len(@String) - @idx)       
        if len(@String) = 0 break       
   end   
return       
end 
GO
select top 10 * from dbo.split('Chennai,Bangalore,Mumbai',',')

2.
CREATE FUNCTION [dbo].[fn_Split_Up_Ids](   @Param_Ids varchar(500))RETURNS @Id_Table TABLE(IDField int)AS
BEGIN     IF (LEN(@Param_Ids) <= 0)       RETURN   DECLARE @CommaPos smallint   SET @CommaPos = CHARINDEX(',', RTRIM(LTRIM(@Param_Ids)))           IF @CommaPos = 0       INSERT INTO @Id_Table               VALUES(CONVERT(BIGINT ,RTRIM(LTRIM(@Param_Ids))))   ELSE        BEGIN           WHILE LEN(@Param_Ids) > 1        BEGIN         SET @CommaPos = CHARINDEX(',', RTRIM(LTRIM(@Param_Ids)))             INSERT INTO @Id_Table                       VALUES(CONVERT(INT ,SUBSTRING(RTRIM(LTRIM(@Param_Ids)),1, @CommaPos - 1)))         SET @Param_Ids = SUBSTRING(RTRIM(LTRIM(@Param_Ids)), @CommaPos + 1 , LEN(RTRIM(LTRIM(@Param_Ids))))           SET @CommaPos = CHARINDEX(',', RTRIM(LTRIM(@Param_Ids)))          IF @CommaPos = 0        BEGIN                 INSERT INTO @Id_Table VALUES(CONVERT(INT ,RTRIM(LTRIM(@Param_Ids))))                 BREAK           END         END       END       RETURN   END
GO
select * from fn_Split_Up_Ids('1,2,3')

Error Handling in SP

BEGIN CATCH
   DECLARE @ErrorSeverity INT
   DECLARE @ErrorState INT
   SELECT
    @ErrorMessage = ERROR_MESSAGE(),
    @ErrorSeverity = ERROR_SEVERITY(),
    @ErrorState = ERROR_STATE(),
    @ErrorCode = ERROR_NUMBER()
  
   RAISERROR (@ErrorCode,@ErrorSeverity,@ErrorState,@ErrorMessage)--(50327,14,1)
   RETURN 1
END CATCH

Finding Dependent Tables

DECLARE @TempDepends Table(Name varchar(200),typeName varchar(20),updated varchar(10),selected varchar(10),columnName varchar(50))
INSERT INTO @TempDepends EXEC sp_depends @objname = N'vwAccount' ;
SELECT distinct Name FROM @TempDepends
DECLARE @TempDepend Table(oTypeName varchar(20),oObjName varchar(200),oOwner varchar(10),oSequence varchar(10))
INSERT INTO @TempDepend EXEC sp_MSdependencies N'Account', null, 1315327
SELECT distinct oObjName FROM @TempDepend
SELECT
    [TableName] = so.name,
    [RowCount] = MAX(si.rows)
FROM
    sysobjects so,
    sysindexes si
WHERE
    so.xtype = 'U'
    AND
    si.id = OBJECT_ID(so.name) 
GROUP BY
    so.name
    having MAX(si.rows)=0
ORDER BY
    2 DESC

DB space Findings

1.
xp_fixeddrives

2.
SELECT
    a.FILEID,
    CONVERT(decimal(12,2),ROUND(a.size/128.000,2)) as [FILESIZEINMB] ,
    CONVERT(decimal(12,2),ROUND(fileproperty(a.name,'SpaceUsed')/128.000,2)) as [SPACEUSEDINMB],
    CONVERT(decimal(12,2),ROUND((a.size-fileproperty(a.name,'SpaceUsed'))/128.000,2)) as [FREESPACEINMB],
    a.name as [DATABASENAME],
    a.FILENAME as [FILENAME]
FROM
   dbo.sysfiles a

Queries for Checking OLAP instances

select * from $SYSTEM.DISCOVER_SESSIONS where session_current_database = 'orgCube'

select * from $system.discover_connections

select * from $system.discover_commands
select * from $system.discover_object_memory_usage
select * from $system.discover_object_activity
select * from $system.discover_locks

Providing User permissions on DB

IF  EXISTS (SELECT * FROM sys.database_principals WHERE name = alias'')
                                DROP USER [alias]
                                GO
                                CREATE USER [DEV] FOR LOGIN [alias]
                                GO
                                EXEC sp_addrolemember ''db_datareader'', alias''
                                Go

Send mail with HTML Table format in subject area

declare @AccountTable Table(RowIds int Identity(1,1),AccountName varchar(50),ModifiedDate datetime,ExpiryDate datetime)
declare @cntAccounts int
declare @flgAcconts int
declare @AccountName varchar(50)
declare @ModifiedDate datetime
declare @ExpiryDate datetime
declare @BodyText varchar(max)
set @flgAcconts=1
Insert into @AccountTable select * from dbo.Passwords where  DATEDIFF(DAY,Getdate(),expiry_date) <=15
select @cntAccounts=COUNT(1) from @AccountTable
if @cntAccounts > 1
begin
DECLARE @AdminEmails NVARCHAR(1000)
SELECT @AdminEmails = 'jay@micro.com'
DECLARE @Subject NVARCHAR(500)
SELECT @Subject = 'Expiry Date of Accounts'
set @BodyText =
            N'<table border="1" cellpadding="2" cellspacing="0" >' +
            N'<tr bgcolor = "#cccccc"><th>Account Name</th><th>ModifiedDate</th><th>Expiry Date</th></tr>'
      
while (@flgAcconts <=@cntAccounts)
begin
      select @AccountName=accountName,@ModifiedDate=ModifiedDate,@ExpiryDate=ExpiryDate from @AccountTable where RowIds=@flgAcconts
      set @flgAcconts=@flgAcconts+1
      set @BodyText =@BodyText+ N'<tr bgcolor = ""><td>'+convert(varchar(50),@AccountName)+'</td>' +
            N'<td>'+convert(varchar(10),@ModifiedDate,102)+'</td>'+
            N'<td>'+convert(varchar(10),@ExpiryDate,102)+'</td></tr>'
end  
set @BodyText=@BodyText+'</table>'
EXEC msdb.dbo.sp_send_dbmail
@profile_name = 'Default'
,@body_format = 'HTML'
,@importance = 'High'
,@recipients = @AdminEmails
,@subject = @Subject
,@body = @BodyText
end

sessionid,Command and Percentage complete in SQL Server

1.
SELECT command,
            start_time,
            percent_complete,
                  CAST(((DATEDIFF(s,start_time,GetDate()))/3600) as varchar) + ' hour, '
                  + CAST((DATEDIFF(s,start_time,GetDate())%3600)/60 as varchar) + 'min, '
                  + CAST((DATEDIFF(s,start_time,GetDate())%60) as varchar) + ' sec' as running_time,
            CAST((estimated_completion_time/3600000) as varchar) + ' hour, '
                  + CAST((estimated_completion_time %3600000)/60000 as varchar) + 'min, '
                  + CAST((estimated_completion_time %60000)/1000 as varchar) + ' sec' as est_time_to_go,
            dateadd(second,estimated_completion_time/1000, getdate()) as est_completion_time
                  ,s.text
FROM sys.dm_exec_requests r
CROSS APPLY sys.dm_exec_sql_text(r.sql_handle) s
WHERE r.command like '%Restore%'

2.
select session_id, percent_complete from sys.dm_exec_requests

3.
select r.session_id
,status
,substring(qt.text,r.statement_start_offset/2,
(case when r.statement_end_offset = -1
then len(convert(nvarchar(max), qt.text)) * 2
else r.statement_end_offset end - r.statement_start_offset)/2)
as query_text --- this is the statement executing right now
,qt.dbid
,qt.objectid
,r.cpu_time
,r.total_elapsed_time
,r.reads
,r.writes
,r.logical_reads
,r.scheduler_id
from sys.dm_exec_requests r
cross apply sys.dm_exec_sql_text(sql_handle) as qt
where r.session_id > 50
order by r.scheduler_id, r.status, r.session_id;

Row Count of All Tables in a Database and size of tables

1. select '['+SCHEMA_NAME(t.[SCHEMA_ID])+'].['+t.NAME+']' AS [fulltable_name],
SCHEMA_NAME(t.[SCHEMA_ID]) as [schema_name],t.NAME as [table_name] ,i.rows
from sys.tables t
INNER JOIN sysindexes i ON (t.object_id = i.id AND i.indid < 2)order by t.NAME

2.
DECLARE @TABLESSIZE TABLE (TableId INT identity(1,1),TableName varchar(500),RowCounts int,Reserved varchar(50),Data varchar(50),index_size varchar(50),unused varchar(50))
DECLARE @TABLELIST TABLE (TableId INT identity(1,1),TableName varchar(500))
DECLARE @TABLENAME VARCHAR(500)
DECLARE @FLGTAB INT
DECLARE @CNTTAB INT
 INSERT INTO @TABLELIST select TABLE_NAME from INFORMATION_SCHEMA.TABLES where TABLE_TYPE='BASE TABLE' and TABLE_NAME like 'TOP%'
SET @FLGTAB=1
SELECT @CNTTAB=COUNT(1) FROM @TABLELIST
 WHILE(@FLGTAB<=@CNTTAB)
BEGIN
SELECT @TABLENAME=TableName from @TABLELIST WHERE TableId=@FLGTAB
insert into @TABLESSIZE EXEC sp_spaceused @TABLENAME
SET @FLGTAB=@FLGTAB+1
END
SELECT * FROM @TABLESSIZE

Code for Result in single row from multiple row table

DECLARE
@SQLExec varchar(MAX) SET @SQLExec = NULL SELECT @SQLExec = COALESCE (@SQLeXEC + ''', DISK = ', '') + '''' + BackupFileName FROM PublishDetails WITH (NOLOCK) WHERE databasename = 'DWStage' Print @SQLExec

Performance Considerations for SSIS

1.Packet size in connection should be equal to 32767
2.Consider using NOLOCK in source table
3.Select only columns you need in source query
4.Use Shared lookup cache in lookup transfer
5.Consider is the transfermation fall under syn/asynchronous type
6.Data types as narrow as possible forless memory usage
7.Do not perform excessive casting
8.sort:puch to source queries when possible-use sort transform for sorting cross database joins
9.Use merge instead of SCD
10.Use group by instead of aggregation
11.insert into instead of a data flow on a single sql instance
12.unnecessary delta detection vs. reload
13.use sql server destination
14.commit size 0 == fastest
15.drop some indexes based on load growth %
 clustered indexes:+inf%(dont drop)
 single nonclustered index: >=~100%
 multiple nonclusted index: >=~10%(varies)
16.truncate,not delete
17. In “Table or view” access mode, the OLE DB source adapter calls OpenRowset to get column metadata at Validate phase. The data returned from OpenRowset include more than just column metadata. Thus the adapter issues “SET ROWCOUNT 1” statement to retrieve column metadata information. “SET ROWCOUNT 1” causes an inefficient execution plan (i.e. Nested Loop) to be cached and later used in the subsequent Execute phase.
In “SQL command” access mode, the OLE DB source adapter calls “sp_prepare” to get column metadata at Validate phase, and “sp_execute” at Execute phase. The execution plan used at Execute phase is Hash Match which is more efficient than Nested Loop.
18. Avoid many small buffers. Tweak the values for DefaultMaxBufferRows and DefaultMaxBufferSize to get as many records into a buffer as possible. To optimize the Pipeline, the goal is to pass as many records as possible through the fewest number of buffers, especially when dealing with large data volumes.
19. DefaultMaxBufferRows and DefaultMaxBufferSize are configured separately for each Data Flow task. When integrating data from multiple data sources using a single Data Flow task, these two settings will only impact the data source components and transformations in that task.
20. Do not increase buffer size to the point where paging to disk starts to occur.
21. Design the package in such a way that it does a full pull of data only in the beginning or on-demand, next time onward it should do the incremental pull, this will greatly reduce the volume of data load operations, especially when volumes are likely to increase over the lifecycle of an application. For this purpose, use upstream enabled CDC (Change Data Capture) feature of SQL Server 2008; for previous versions of SQL Server incremental pull logic.
22. It is recommended to set two of the data flow task properties viz. BufferTempStoragePath and BLOBTempStoragePath instead of using default values. The location provided should be of some fast drives. It is also recommended that BufferTempStoragePath and BLOBTempStoragePath should point to drives which are on separate spindles in order to maximize I/O throughput

Drop duplicate records from table

1.
SELECT DISTINCT * INTO tab2 FROM tab1 — distinct of all columns
DROP TABLE tab1
EXEC sp_rename 'tab2','tab1'

2.
WITH T1 AS (SELECT ROW_NUMBER ( ) OVER ( PARTITION BY ID, FNAME, LNAME ORDER BY ID ) AS RNUM FROM DUPLICATE )
DELETE FROM T1 WHERE RNUM > 1