Quantcast
Channel: udayarumilli.com
Viewing all 145 articles
Browse latest View live

Automate the database restore procedure - SQL Server

$
0
0

Automated Database restore - SQL Server 2005/2008/R2



Database Refresh Procedure



Make sure that the stored procedure “[dbo].[DB_Restore]” is available at Target Instance master database.

Connect to target server instance and open a new query window from SSMS



Get the backup file location and database name to which the backup needs to be restored as and 

execute the stored procedure as below:

USEMASTER
GO
EXECUTE [DB_Restore]
@DBName='Test',
@BackupFile_Path='C:\U_World\Shared\Test_bkp.bak'

Output:




Check database to make sure that it is online.


Stored Procedure

USE [master]
GO
SETANSI_NULLSON
GO
SETQUOTED_IDENTIFIERON
GO


IFOBJECT_ID('[dbo].[DB_Restore]')ISNOTNULL
BEGIN
DROPPROC [dbo].[DB_Restore];
END
GO

/*******************************************************************/
--Proc_TypeDate_ModifiedCreated_ByDescription
--Automation19-Oct-2012 UDAY Initial
--
--@DBName:Database Name to be restored
--@BackupFile_Path: Fully qualified Backup file name
--EXECUTE [DB_Restore] 'Test','C:\U_World\Shared\Test_bkp.bak'
/*******************************************************************/
CREATEPROCEDURE [dbo].[DB_Restore](
@DBNameSYSNAME,
@BackupFile_Path VARCHAR(255))
AS
BEGIN
SETNOCOUNTON
DECLARE@DBFilenameVARCHAR(100),
@DBLogFilenameVARCHAR(100),
@DBDataFileVARCHAR(100),
@v_strDBLogFile VARCHAR(100),
@ExecSQLNVARCHAR(1000),
@ExecSQL1NVARCHAR(1000),
@MoveSQLNVARCHAR(4000),
@ReplaceFlagNVARCHAR(50),
@TempNVARCHAR(1000),
@ListSQLNVARCHAR(4000),
@ServerVersionNVARCHAR(20),
@RestorePathVARCHAR(500)

BEGINTRY

   /**** Update @ReplaceFlag depends on database existence****/
SET@ReplaceFlag =''  

IFexists(selectnamefromsys.databaseswherename= @DBName)
BEGIN
SET @ReplaceFlag =', REPLACE'
END


/*** Create table ##FILE_LIST to hold file infor from backup file ***/
SET @ListSQL =''
SET @ListSQL = @ListSQL +'IF (EXISTS (SELECT 1 FROM TEMPDB..SYSOBJECTS WHERE NAME = ''##FILE_LIST''))'
SET @ListSQL = @ListSQL +'BEGIN'
SET @ListSQL = @ListSQL +'   DROP TABLE ##FILE_LIST '
SET @ListSQL = @ListSQL +'END '

SET @ListSQL = @ListSQL +'CREATE TABLE ##FILE_LIST ('
SET @ListSQL = @ListSQL +'   LogicalName VARCHAR(64),'
SET @ListSQL = @ListSQL +'   PhysicalName VARCHAR(130),'
SET @ListSQL = @ListSQL +'   [Type] VARCHAR(1),'
SET @ListSQL = @ListSQL +'   FileGroupName VARCHAR(64),'
SET @ListSQL = @ListSQL +'   Size DECIMAL(20, 0),'
SET @ListSQL = @ListSQL +'   MaxSize DECIMAL(25,0),'
SET @ListSQL = @ListSQL +'   FileID bigint,'
SET @ListSQL = @ListSQL +'   CreateLSN DECIMAL(25,0),'
SET @ListSQL = @ListSQL +'   DropLSN DECIMAL(25,0),'
SET @ListSQL = @ListSQL +'   UniqueID UNIQUEIDENTIFIER,'
SET @ListSQL = @ListSQL +'   ReadOnlyLSN DECIMAL(25,0),'
SET @ListSQL = @ListSQL +'   ReadWriteLSN DECIMAL(25,0),'
SET @ListSQL = @ListSQL +'   BackupSizeInBytes DECIMAL(25,0),'
SET @ListSQL = @ListSQL +'   SourceBlockSize INT,'
SET @ListSQL = @ListSQL +'   filegroupid INT,'
SET @ListSQL = @ListSQL +'   loggroupguid UNIQUEIDENTIFIER,'
SET @ListSQL = @ListSQL +'   differentialbaseLSN DECIMAL(25,0),'
SET @ListSQL = @ListSQL +'   differentialbaseGUID UNIQUEIDENTIFIER,'
SET @ListSQL = @ListSQL +'   isreadonly BIT,'
SET @ListSQL = @ListSQL +'   ispresent BIT'

SELECT @ServerVersion =CAST(SERVERPROPERTY('PRODUCTVERSION')ASNVARCHAR)

IF @ServerVersion LIKE'10.%'
BEGIN
SET @ListSQL = @ListSQL +', TDEThumbpr DECIMAL'
END

SET @ListSQL = @ListSQL +')'
EXEC(@ListSQL)

INSERTINTO ##FILE_LIST EXEC('RESTORE FILELISTONLY FROM DISK = '''+ @BackupFile_Path +'''')
ALTERTABLE ##FILE_LIST ADD D_File VARCHAR(150)

/*** Get the default file location. It help us when database is not already exists ***/
SELECTTOP(1) @RestorePath= physical_name FROMsys.master_files;

SET @RestorePath =REPLACE(@RestorePath,RIGHT(@RestorePath,CHARINDEX('\',REVERSE(@RestorePath))-1),'')

/*** Capture information od data files - MDF, NDF and LDF ***/
/*** We have two cases 1. Database is already there is instance ***/
/*** 2. New database Restore (ELSE) ***/
IFEXISTS(SELECT 1 FROMSYS.SYSDATABASESWHERENAME=@DBName)
BEGIN
DECLARE @DataFile VARCHAR(150),
@LogFile VARCHAR(150);

SELECTTOP 1 @DataFile = physical_name
FROMsys.master_files
WHEREDatabase_ID =DB_ID(@DBName)ANDTYPE=0

SELECTTOP 1 @LogFile = physical_name
FROMsys.master_files
WHEREDatabase_ID =DB_ID(@DBName)ANDTYPE=1

UPDATE ##FILE_LIST SET D_File = @DataFile WHERETYPE='D'AND FileID=1

UPDATE ##FILE_LIST SET D_File =LEFT(@DataFile,LEN(@DataFile)-4)+'_'+CONVERT(VARCHAR(5),FILEID)+'.ndf'
WHERETYPE='D'AND FileID>1

UPDATE ##FILE_LIST SET D_File =LEFT(@LogFile,LEN(@LogFile)-4)+'_'+CONVERT(VARCHAR(5),FILEID)+'_log.ldf'WHERETYPE='L'

END
ELSE
BEGIN
UPDATE ##FILE_LIST SET D_File=@RestorePath+@DBName+'.mdf'
WHERETYPE='D'AND FileID=1

UPDATE ##FILE_LIST SETD_File=@RestorePath+@DBName+'_'+CONVERT(VARCHAR(5),FILEID)+'.ndf'
WHERETYPE='D'AND FileID>1


END

   /*** Loop through the all files mdf,ldf and ndf ****/
   /*** Form a string "MOVE" To MDF, MOVE LDF etc ***/
DECLARE CurFiles CURSORFOR
SELECT'MOVE N'''+ LogicalName +''' TO N'''+ D_File +''''
FROM##FILE_LIST

SET @MoveSQL =''

OPEN CurFiles
FETCHNEXTFROM CurFiles into @Temp

WHILE@@Fetch_Status= 0
BEGIN
SET @MoveSQL = @MoveSQL + @Temp +', '
FETCHNEXTFROM CurFiles into @Temp
END

CLOSE CurFiles
DEALLOCATE CurFiles

/*** Kill all connections to the database before restore ***/
PRINT'Killing active connections to the database : '+ @DBName +CHAR(10)

SET @ExecSQL =''
SELECT  @ExecSQL = @ExecSQL +'kill '+CONVERT(CHAR(10), spid)+' '
FROM    master.dbo.sysprocesses
WHERE   DB_NAME(dbid)= @DBName ANDDBID<> 0 AND spid <>@@spid
ANDstatus<>'background'
ANDstatusIN('runnable','sleeping')

EXEC(@ExecSQL)

/*** Restore Database ***/
PRINT'Restoring "'+ @DBName +'" database from "'+ @BackupFile_Path +CHAR(10)

SET @ExecSQL ='RESTORE DATABASE ['+ @DBName +']'
SET @ExecSQL = @ExecSQL +' FROM DISK = '''+ @BackupFile_Path +''''
SET @ExecSQL = @ExecSQL +' WITH FILE = 1,'
SET @ExecSQL = @ExecSQL + @MoveSQL
SET @ExecSQL = @ExecSQL +' NOREWIND, '
SET @ExecSQL = @ExecSQL +' NOUNLOAD, STATS=10 '
SET @ExecSQL = @ExecSQL + @ReplaceFlag

PRINT'/**************************************************/'
PRINT'/****** Start Restore Operation *******************/'
PRINT'/**************************************************/'

EXEC(@ExecSQL)

PRINTCHAR(10)
PRINT'/**************************************************/'
PRINT'/****** Restore Operation Completed ***************/'
PRINT'/**************************************************/'

ENDTRY
BEGINCATCH
PRINT'Restore Failed with error'+ERROR_MESSAGE()

IF(EXISTS(SELECT 1 FROM TEMPDB..sysobjects WHERENAME='##FILE_LIST'))
BEGIN
DROPTABLE ##FILE_LIST
END
RETURN
ENDCATCH
/*** Drop the temp table  ***/
IF(EXISTS(SELECT 1 FROM TEMPDB..sysobjects WHERENAME='##FILE_LIST'))
BEGIN
   DROPTABLE ##FILE_LIST
END

PRINTCHAR(10)+'Database "'+ @DBName +'" Restored Successfully '
SETNOCOUNTOFF
END




How to find Primary key and foreign key's using T-SQL - Sql server

$
0
0
Find the relationships between tables in sql server
Get Primary Key and Foreign Key details using T-SQL


How to find all related tables of a given table?

You got a new database and you started working on that. Essentially when times we have to work with a new database first we need to understand the table structure and relationships. The overall structure we can understand from schema / entity diagrams.

But when it is time to start developing sql code on new database it would always be an easier way to find dependent objects of a given table using “sp_help” or with “Alt+F1”.

Here with I am giving one more way which is handy to find out these details.

Have a look at the stored procedure “[usp_get_related_Tables]” below.

IFEXISTS(SELECT 1 FROMsys.sysobjectsWHERETYPE='P'ANDNAME='usp_get_related_Tables')
BEGIN
DROPPROCEDURE [dbo].[usp_get_related_Tables];
END
GO
CREATEPROCEDURE [dbo].[usp_get_related_Tables] (
@tbl_Schema VARCHAR(50)=NULL,
@tbl_NameVARCHAR(100)=NULL)
AS
BEGIN
SELECT TC.CONSTRAINT_SCHEMAAS'Table_Schema',
TC.TABLE_NAMEAS'Table_Name',
TC.CONSTRAINT_NAMEAS'PrimaryKey_Name',
CCU1.COLUMN_NAMEAS'PrimaryKey_Column',
COALESCE(RC.CONSTRAINT_NAME,'N/A')   AS'ForeignKey_Name',
COALESCE(CCU2.Column_Name,'N/A')AS'ForeignKey_Column',
CASEWHEN TC2.TABLE_NAME ISNULLTHEN'N/A'
ELSE TC.CONSTRAINT_SCHEMA +'.'+ TC2.TABLE_NAME END AS'ForeignKey_Table'
FROMINFORMATION_SCHEMA.TABLE_CONSTRAINTS TC
INNERJOININFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CCU1 ON
TC.TABLE_NAME= CCU1.TABLE_NAME AND
TC.TABLE_SCHEMA = CCU1.TABLE_SCHEMA AND
TC.CONSTRAINT_NAME = CCU1.CONSTRAINT_NAME
LEFTJOININFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS RC ON
TC.CONSTRAINT_NAME = RC.UNIQUE_CONSTRAINT_NAME
LEFTJOININFORMATION_SCHEMA.TABLE_CONSTRAINTS TC2 ON
TC2.CONSTRAINT_NAME= RC.CONSTRAINT_NAME
LEFTJOININFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE CCU2 ON
RC.CONSTRAINT_NAME= CCU2.CONSTRAINT_NAME
WHERETC.CONSTRAINT_TYPE='PRIMARY KEY'AND
TC.CONSTRAINT_SCHEMA=ISNULL(@tbl_Schema,TC.CONSTRAINT_SCHEMA)AND
TC.TABLE_NAME =ISNULL(@tbl_Name,TC.TABLE_NAME)
ORDERBY TC.TABLE_NAME,
 TC.CONSTRAINT_NAME,
 RC.CONSTRAINT_NAME
END


To know relationship (Primary key/ Foreign Key) details for all tables

EXEC [dbo].[usp_get_related_Tables]



To know relationship (Primary key/ Foreign Key) details for all tables under a given schema “Sales”

EXEC [dbo].[usp_get_related_Tables] @tbl_Schema='Sales'


To know relationship (Primary key/ Foreign Key) details for a given Table “Employee”

EXEC [dbo].[usp_get_related_Tables] @tbl_Name='Employee'



This Procedure was tested on SQL Server 2005, 2008 and on 2008R2.


Incremental uploads using SQL Server Integration services (SSIS)

$
0
0

Incremental Uploads Using SSIS

Incremental Uploads:

We have got a request to need a SSIS package to incremental uploads between two sql server instances which are two different locations.

Requirement:

There are two different databases which are available in two different instances.
SSIS package required to accomplish the below tasks.

  1. Sync data for the table Employee from Source to Destination
  2. Compare records based on a key value
  3. If you find any new records Insert them into destination
  4. Update all existing records from Source to destination  
  5. Insert records into destination If any new records found.

Environment:
Source:SQL_Instance: INHYDUDAYA; Database: Source
Destination:SQL_Instance: INHYDUDAYA\SQL2008R2; Database: Destination

Table information:

Connect to Source Instance:
USE Source
GO
CREATETABLE Source_Employee (
IDINTIDENTITYNOTNULLPRIMARYKEY,
[First_Name]VARCHAR(50)NOTNULL,
[Last_Name]VARCHAR(50),
[SSL]VARCHAR(18),
[DLNO]VARCHAR(25),
[UpdatedOn]DATETIMENULL,
[CreatedOn]DATETIMENOTNULLDEFAULT(GETDATE())
)
GO
INSERTINTO Source_Employee([First_Name],[Last_Name],[SSL],[DLNO])
SELECT'Jason','Mag','SA-MYk9989001','DL-SA0545678'
UNION
SELECT'Carry','Uyon','WC-KAP9989001','DL-WC0545887'
UNION
SELECT'Chrish','Lott','AT-LKU8788954','DL-AT059675'
UNION
SELECT'Kourav','Mishra','NY-NYU5669877','DL-NY0073987'
GO
SELECT*FROM Source_Employee

Connect to Destination Instance:

USE Destination
GO
CREATETABLE Dest_Employee(
IDINTNOTNULLPRIMARYKEY,
[First_Name]VARCHAR(50)NOTNULL,
[Last_Name]VARCHAR(50),
[SSL]VARCHAR(18),
[DLNO]VARCHAR(25),
[UpdatedOn]DATETIMENULL,
[CreatedOn]DATETIMENOTNULLDEFAULT(GETDATE())
)
GO
SELECT*FROM Dest_Employee


Now tables are ready, we need to start build SSIS package.

  1. Open SQL Server 2008 R2 business intelligence development studio
  2. Create a new SSIS package and name it as  “Incremental Uploads.dtsx” as below

  1. Add a new dataflow task and name it as “Data_Flow_Sync_SourceNDestination”


  1. Create two OLE DB connection managers to both source and destination. Since these are the test instances here I am using SA account at the connection managers.


  1. Open dataflow task. Add “OLE DB Source” and map it with source connection manager, select the table name “Source_Employee”.

            

  1. Now dataflow task looks like as below:

  1. Add a transformation “LookUp” and connect to Lookup from “OLEDB Source”
  2. Double click on “Lookup” component. Choose “Connection type” as “OLEDB Connection manager” and choose “Specify how to handle rows with no matching entries as “ignore failure”.


  1. Go to the next tab “Connection”. Select OLEDB connection manager as “Destination connection manager” and table as “Dest_Employee”.

       

  1. Go to the next tab “Columns”. Map columns on which lookup has to be perform. Means it applies where condition while performing sync operation. Here we have to map “ID” from “Available Input Columns” To “Available Lookup Columns”. And check all columns at “Available Lookup Columns”.


  1. For our clarification update “OutPut_Alias” column names as “Out_ID”, “Out_First_Name”, “Out_Last_Name”,  etc as below.


  1. Go to “Error Output” tab and choose “Ignore Failure” for the column “Error”


  1. Click on Ok. Now the dtaflow task looks like below.



  1. Add a new transformation “Conditional Split” and connect it from “Lookup” transformation.
  2. While connecting from “Lookup” choose “Lookup Match Output”



  1. Now the dataflow task looks like as below

  1.  


  1. Double click on Conditional Split transformation, give “Output_Name” as “New Rows” and assign the condition as ISNULL (Out_ID). It means there are no corresponding Out_ID available at destination which in turn as a new row.   
          

  1. Now add a condition to find the modified rows. Give Output Name as “Updated” . Compare source and destination columns with || (OR) operator. It filters the rows where any of these columns has been changed.  (([First_Name] != [Out_First_Name]) || ([Last_Name] != [Out_Last_Name]) || ([SSL] != [Out_SSL]) || ([DLNO] != [Out_DLNO]) || ([UpdatedOn] != [Out_UpdatedOn]) || ([CreatedOn] != [Out_CreatedOn]))
  2. Change Default Output name to “Unchanged Rows”. Means rows which are not fall in either of these two conditions are unchanged records.

  1. Now click on “Configure error Output” and made “Ignore failure” when error occurred for the output “Updated”. Because if both source and destination tables are in sync and no new records available and no updates required then the package should execute without fail and ofcourse it neither insert not update any commands.
          
          

  1. Click on OK and now the data flow task looks like below.


  1. Add a “OLE DB Destination” and connect it from conditional split transformation. While connecting choose “New Rows” as Output.

  1. Click on OK. Now open “OleDB destination” and map it with destination connection manager. Select the table name “Dest_Employee”.

  1. Go to the tab mappings and map columns accordingly as shown in below figure.
         

  1. Click on OK now the data flow looks like below:


  1. Add a transformation “OLE DB command” to data flow and name it as “OLE DB Command_Update Changed Rows”.
  2. Connect “OLE DB Command” from conditional split transformation. While connecting choose “New Rows” as Output.

  1. Click on OK. Now open the OLE DB command and select destination connection manager.
  2. Go to the component properties tab and give SQL command as below.
UPDATE dbo.Dest_Employee
SET
First_Name= ?
,Last_Name= ?
,SSL= ?
,DLNO= ?
,UpdatedOn= ?
,CreatedOn= ?
WHERE ID = ?

           

  1. Go to the next tab called “Column mappings” and map columns according to the parameters given in update statement. If you see the update statement “ID” would be last parameter hence we have to map it with last “Available Destination Column” as below.

  1. Click on OK. Now the package is ready and the data flow task looks like below.
         

  1. Now execute the package and remember there are no rows available at destination, hence initially it inserts all rows from source to destination.

  1. Now check the destination table


  1. You can see that all rows have been loaded into destination from source.

  1. Now insert 3 rows and update two existing rows at source and then run the package again.
USE Source
GO
INSERTINTO Source_Employee (First_Name,Last_Name,SSL,DLNO)
SELECT'Chan','Yano','CH-PP89977345','DL-CH0587332'
UNION
SELECT'Krishnan','Gopal','ID-IN8854687','DL-IN994532'
UNION
SELECT'Krish','Manlon','KD-KP8814356','DL-ASJ9KI0112'

USE Source
GO
UPDATE Source_Employee
SETFirst_Name = First_Name+' Updated',
Last_Name = Last_Name+' Updated',
UpdatedOn =GETDATE()
WHERE ID IN(2,4)

USE Source
GO
SELECT*FROM Source_Employee

  1. After inserting new records and updating two rows the source table looks like :

 

  1. Now execute the package:


  1. Now you can see that there are 3 new rows inserted and 2 rows updated.
  2. Go to the destination table and check the table to make sure both are in sync.

How to control windows services from SQL Server?

$
0
0


How to control windows services from SQL Server?
Monitor SQL Server services using T-SQL

XP_SERVICECONTROL:
It helps DBA to know the status and control the windows services from SQL Server.

Syntax: XP_SERVICECONTROL ,

Action: There are total 5 actions can be performed on services.

  • Start: To start a service
  • Stop: To stop a service
  • Pause: To pause a service
  • Continue: To start a passed service
  • Querystats: To know the current status of a service

Service Name: Can be any windows service

Example:
-- To know the status of SQL Server Agent
EXECMASTER..XP_SERVICECONTROL'QueryState','SQLSERVERAGENT'
GO
-- Start a service - Postgresql 9.2 database service
EXECMASTER..XP_SERVICECONTROL'Start','postgresql-x64-9.2'
GO
-- To know the status Distributed Transaction Coordinator
EXECMASTER..XP_SERVICECONTROL'QueryState','MSDTC'




-- Stop SQL EXPRESS service   
EXECMASTER..XP_SERVICECONTROL'Stop','MSSQL$SQLEXPRESS'
GO
WAITFORDELAY'00:00:05'---- 5 Second Delay
GO
-- Get status SQL EXPRESS service   
EXECMASTER..XP_SERVICECONTROL'QueryState','MSSQL$SQLEXPRESS'
GO
WAITFORDELAY'00:00:05'---- 5 Second Delay
GO
-- Start SQL EXPRESS service   
EXECMASTER..XP_SERVICECONTROL'Start','MSSQL$SQLEXPRESS'
GO



Note: It really helps a DBA in monitoring windows services and the process can be automated.  
        




Monitor SQL Server CPU utilization, I/O Usage and Memory Usage

$
0
0


CPU Usage, I/O Usage and Memory Usage of database

Database level / Database wise CPU, memory and I/O usage
As part of DBA’s daily checklist, we need to monitor few parameters of a database throughout the day. It includes CPU utilization, Memory utilization and I/O utilization. Here are the T-SQL scripts to monitor sql server instances database wise.
CPU Utilization:
WITH DB_CPU_Stats
AS
(SELECT DatabaseID,
DB_Name(DatabaseID)AS [DatabaseName],
SUM(total_worker_time)AS [CPU_Time(Ms)]
FROMsys.dm_exec_query_statsAS qs
CROSSAPPLY(SELECTCONVERT(int, value)AS [DatabaseID]
 FROMsys.dm_exec_plan_attributes(qs.plan_handle)
 WHEREattribute =N'dbid')AS epa
GROUPBY DatabaseID)
SELECTROW_NUMBER()OVER(ORDERBY [CPU_Time(Ms)] DESC)AS [row_num],
DatabaseName,
[CPU_Time(Ms)],
CAST([CPU_Time(Ms)] * 1.0 /SUM([CPU_Time(Ms)])
OVER()* 100.0 ASDECIMAL(5, 2))AS [CPUPercent]
FROMDB_CPU_Stats
WHEREDatabaseID > 4 -- system databases
AND DatabaseID <> 32767 -- ResourceDB
ORDERBY row_num
OPTION(RECOMPILE);



CPU Utilization History:
The query retrieves the SQL Server instance CPU usage from last 10 minutes.   
/***** CPU Utilization history *****/
-- Get CPU Utilization History (SQL Server 2008 and above)
DECLARE@ts BIGINT
SELECT@ts =(SELECT cpu_ticks/(cpu_ticks/ms_ticks)
FROMsys.dm_os_sys_info);

SELECTTOP(10)SQLProcessUtilization AS [SQLServer_Process_CPU_Utilization],
SystemIdle AS [System_Idle_Process],
100 - SystemIdle - SQLProcessUtilization AS [Other_Process_CPU_Utilization],
DATEADD(ms,-1 *(@ts - [timestamp]),GETDATE())AS [Event_Time]
FROM (SELECT record.value('(./Record/@id)[1]','int')AS record_id,
record.value('(./Record/SchedulerMonitorEvent/SystemHealth/SystemIdle)[1]','int')AS [SystemIdle],
record.value('(./Record/SchedulerMonitorEvent/SystemHealth/ProcessUtilization)[1]','int')AS [SQLProcessUtilization],
[timestamp]
     FROM (SELECT[timestamp],
convert(xml, record)AS [record]
            FROMsys.dm_os_ring_buffers
            WHEREring_buffer_type =N'RING_BUFFER_SCHEDULER_MONITOR'
AND record LIKE'%%')AS x
)AS y
ORDERBY record_id DESC;

-- Get CPU Utilization History (SQL 2005 Only)
DECLARE@ts_now BIGINT;
SELECT@ts_now = cpu_ticks /CONVERT(float, cpu_ticks_in_ms)
FROMsys.dm_os_sys_info

SELECTTOP(10)SQLProcessUtilization AS [SQL_Server_Process_CPU_Utilization],
SystemIdle AS [System_Idle_Process],
100 - SystemIdle - SQLProcessUtilization AS [Other_Process_CPU_Utilization],
DATEADD(ms,-1 *(@ts_now - [timestamp]),GETDATE())AS [Event_Time]
FROM (SELECTrecord.value('(./Record/@id)[1]','int')AS record_id,
record.value('(./Record/SchedulerMonitorEvent/SystemHealth/SystemIdle)[1]','int')AS [SystemIdle],
record.value('(./Record/SchedulerMonitorEvent/SystemHealth/ProcessUtilization)[1]','int')AS [SQLProcessUtilization],
[timestamp]
     FROM (SELECT[timestamp],
CONVERT(xml, record)AS [record]
           FROMsys.dm_os_ring_buffers
           WHEREring_buffer_type =N'RING_BUFFER_SCHEDULER_MONITOR'
AND record LIKE'%%')AS x
          )AS y
ORDERBY record_id DESC;

SQLServer_Process_CPU_Utilization: Percentage of CPU utilizing by SQL Server instance
Sytem_Idle_Process: Percentage of CPU is idle
Other_Process_CPU_Utilization: Percentage of CPU utilizing by all other processes other than SQL Server instance.  
Event_Time: Time when these details has been captured

I/O Usage by database:

/****** I/O Usage by database ****/
SELECTName AS'Database Name'
,SUM(num_of_reads)AS'Number of Reads'
,SUM(num_of_writes)AS'Number of Writes'
FROMsys.dm_io_virtual_file_stats(NULL,NULL) I
INNERJOINsys.databases D ON I.database_id = d.database_id
GROUPBYName
ORDERBY'Number of Reads'
DESC;


Memory usage by Database:

DECLARE@total_buffer INT;
SELECT@total_buffer = cntr_value
FROMsys.dm_os_performance_counters
WHERERTRIM([object_name])LIKE'%Buffer Manager'
AND counter_name ='Total Pages';

;WITH src AS
(SELECTdatabase_id,
db_buffer_pages =COUNT_BIG(*)
FROMsys.dm_os_buffer_descriptors
      --WHERE database_id BETWEEN 5 AND 32766
GROUPBY database_id
)
SELECT[db_name] =CASE [database_id] WHEN 32767 THEN'Resource DB'ELSEDB_NAME([database_id])END,
db_buffer_pages,
db_buffer_MB = db_buffer_pages / 128,
db_buffer_percent =CONVERT(DECIMAL(6,3),
db_buffer_pages * 100.0 / @total_buffer)
FROMsrc
ORDERBY db_buffer_MB DESC;



Low Level SQL Server Architecture

$
0
0



SQL Server Architecture

Here I would like to describe the process architecture whan a new request submitted to SQL Server.

I have submitted a Query to SQL Server from an Application and I got the reply as “data inserted successfully”. What are the overall processes worked inside?

At Client:

1. User enter data and click on submit

2. The client database library transforms the original request into a sequence of one or more Transact-SQL statements to be sent to SQL Server. These statements are encapsulated in one or more Tabular Data Stream (TDS) packets and passed to the database network library

3. The database network library uses the network library available in the client computer to repackage the TDS packets as network protocol packets.

4. The network protocol packets are sent to the server computer network library across the network

At Server:

5. The extracted TDS packets are sent to Open Data Services (ODS), where the original query is extracted.

6. ODS sends the query to the relational engine

7. A connection established to the relational engine and assigns a SID to the connection

At Relational Engine:

8. Check permissions and determines if the query can be executed by the user associated with     the request

9.  Query sends to Query Parser

  • It checks that the T-SQL is written correctly
  • Build a Parse Tree \ Sequence Tree

10. Parse Tree sends to Algebrizer

  • Verifies all the columns, objects and data types
  • Aggregate Binding (determines the location of aggregates such as GROUP BY, and MAX)
  • Builds aQuery Processor Tree in Binary Format

11. Query Processor Tree sends to Optimizer

  • Based on the query processor tree and Histogram (Statistics) builds an optimized execution plan
  • Stores the execution plan into cache and send it to the database engine

At Database Engine:

12. Database engine map a batch into different tasks

13. Each task associated with a process

14. Each process assigned with a Windows Thread or a Windows Fiber. The worker thread takes care of this.

15. The Thread/Fiber send to the execution queue and wait for the CPU time.

16. The Thread/Fiber identifies the table location where the data need to be stored

17. Go to the file header, checks the PFS, GAM and GSAM and go to the correct page

18. Verifies the page is not corrupted using Torn page Detection / Check SUM and writes the data

19. If require allocates new pages and stores data on it. Once the data is stored/updated/added in a page, it updates the below locations

  • PFS - Page Free Space
  • Page Header – Checksum / Torn Page Detection (Sector info)
  • BCM - Bulk Change MAP
  • DCM - Differential Change MAP

20. In this process the

  • Memory manager take care of allocating buffers, new pages etc,
  • Lock manager take care of allocating appropriate locks on the objects/pages and releasing them when task completed
  • Thread Scheduler: schedules the threads for CPU time
  • I/O manager: Establish memory bus for read/write operations from memory to disk and vice versa
  • Deadlock\Resource\Scheduler Monitor: Monitors the processes

21. Once the process is completed the result set is submitted to the relational engine and follow the same process for sending back the result set to client application.

22. The connection will be closed and the SID is removed.


This information I have collected from various articles and reading through books online.

If someone wants to add / update can suggest / assist me on this.

Simple report using SSRS 2012

$
0
0



Simple Sales Report using SSRS - 2012
I wanted to explore the different types of SSRS report hence I would be starting with a simple report.
To have more meaningful format for reports I am going to create a table with data which can help us in looking into different types of reports.

T-SQL Script:

USEDBAConnect
GO
IFEXISTS(SELECT 1 FROMSYS.SYSOBJECTSWHERETYPE='U'ANDNAME='SALES')
BEGIN
DROPTABLE[DBO].[SALES]
END
CREATETABLE[dbo].[SALES](
IDINTIDENTITYNOTNULLPRIMARYKEY,
OrganizationVARCHAR(100)NOTNULL,
CountryVARCHAR(100)NOTNULL,
ZoneVARCHAR(20),
ProductVARCHAR(100)NOTNULL,
SYearCHAR(4)NOTNULL,
Total_SoldOutINTDEFAULT(0)NOTNULL,
Total_PriceMONEYDEFAULT(0.00)NOTNULL)
GO
INSERTINTO[dbo].[SALES](Organization,Country,Zone,Product,SYear,Total_SoldOut,Total_Price)
VALUES('CaBerry','USA','CA','OPhone',2010,14000,5600000),
('CaBerry','USA','WC','OPhone',2011,3000,1200000),
('CaBerry','USA','KL','OPhone',2010,5400,2160000),
('CaBerry','USA','CA','kPAD',2010,72,86400),
('CaBerry','USA','WC','kPAD',2012,56,67200),
('CaBerry','USA','KL','kPAD',2011,8,9600),
('OWNnOW','NewZealand','NZ-E','VM-R332',2010,12,1200000),
('OWNnOW','NewZealand','NZ-W','VM-R332',2011,16,1600000),
('OWNnOW','NewZealand','NZ-S','Router-R319',2010,56,672000),
('OWNnOW','NewZealand','NZ-E','Router-R319',2010,89,1068000),
('OWNnOW','NewZealand','NZ-W','BrC-100A',2012,34,272000),
('OWNnOW','NewZealand','NZ-S','BrC-100A',2011,109,872000)
GO
SELECT*FROM[dbo].[SALES]
GO
IFEXISTS(SELECT 1 FROMSYS.SYSOBJECTSWHERETYPE='P'ANDNAME='usp_Sales_Report')
BEGIN
DROPPROCEDURE[DBO].[usp_Sales_Report]
END
GO
CREATEPROCEDURE[dbo].[usp_Sales_Report]
AS
BEGIN
SETNOCOUNTON
SELECT'Organization'=Organization,
'Country'=Country,
'Zone'=Zone,
'Product'=Product,
'Year'=SYear,
'Total_SoldOut'=Total_SoldOut,
'Total_Price'=Total_Price
FROM[dbo].[SALES]
END
GO
EXEC[dbo].[usp_Sales_Report]

Now we have ready with the required data. Create a new SSRS report using “SQL Server Data Tools”.



As we are creating a new report using Wizard just follow the instructions.




  

A stored procedure is being called here. The result set returned by the procedure will be used in
report.

 



Now the report look like below.

Add an image to the report.

After adding the image update report name as below and execute the report.    
  

It’s just a simple report to retrieve a dataset using a stored procedure. Our main intension is to looking into SSRS functionality rather than query / procedure using for result set which is retrieving from database.


Parametrized report using SSRS 2012

$
0
0


Parametrized report using SSRS 2012



In the previous post we have created a simple report. We are going to use the same report to demonstrate the parameterized report.
Under the same report solution copy and paste the Simple_Report and rename it as “Parameterized_Report”.







Now we can modify the report to accept parameters.

Now create a new stored procedure to accept the parameters as below.

USE[DBAConnect]
GO
CREATEPROCEDURE[dbo].[usp_Sales_Report_Parameterized](
@OrganizationVARCHAR(100)='ALL',
@CountryVARCHAR(100)='ALL',
@ZoneVARCHAR(20)='ALL',
@ProductVARCHAR(100)='ALL',
@YearCHAR(4)='ALL')
AS
BEGIN
SETNOCOUNTON
DECLARE@FilterVARCHAR(1000),@SQLVARCHAR(8000)
SET@Filter=''

IF (@Organization<>'ALL')
BEGIN
SET@Filter=' AND Organization = '''+@Organization+''''
END

IF (@Country<>'ALL')
BEGIN
SET@Filter=@Filter+' AND Country = '''+@Country+''''
END

IF (@Zone<>'ALL')
BEGIN
SET@Filter=@Filter+' AND Zone = '''+@Zone+''''
END

IF (@Product<>'ALL')
BEGIN
SET@Filter=@Filter+' AND Product = '''+@Product+''''
END

IF (@Year<>'ALL')
BEGIN
SET@Filter=@Filter+' AND SYear = '''+@Year+''''
END
SET@SQL='
SELECT''Organization''=Organization,
''Country''=Country,
''Zone''=Zone,
''Product''=Product,
''Year''=SYear,
''Total_SoldOut''=Total_SoldOut,
''Total_Price''=Total_Price
FROM[dbo].[SALES]
WHERE1 = 1 '+@Filter

PRINT@SQL
EXECUTE (@SQL)
END


I have written a stored procedure to accept parameters and retrieve result set dynamically. Now we’ll change the report. Follow below steps.   


Change the procedure execution statement to
“EXEC [usp_Sales_Report_Parameterized]  @Organization”




Add a new data set to support List of Organizations as an input to the report.


Use the query to retrieve the input set
SELECT'ALL'ASOrganization FROMSALES
UNION
SELECTDISTINCTOrganizationASOrganizationFROMSALES;





Now map the newly created dataset to the parameter “Organization”.










Now preview the report, by default “ALL” is selected for Organization hence all records retrieved.

Now select an organization from the list.



Now let’s include all parameters as below.
  • @Country
  • @Zone
  • @Product
  • @Year



Parameters are added. Now add datasets to provide inputs for parameters as below.

DataSet :
Country:
SELECT'ALL'ASCountry
UNION
SELECTDISTINCTCountryASCountryFROMSALES

Zone:
SELECT'ALL'ASZone
UNION
SELECTDISTINCTZoneASZoneFROMSALES

Product:
SELECT'ALL'ASProduct
UNION
SELECTDISTINCTProductASProductFROMSALES

Year:
SELECT'ALL'AS[Year]
UNION
SELECTDISTINCTSYearAS[Year]FROMSALES



As we have done mapping between dataset and parameter for “Organization” follow those steps and map all datasets to corresponding parameters. Finally it looks like as below.



Now preview the report to check the output. As we designed by default all parameters are shown as “ALL”



Now select Organization and Country values.




Now select Zone and Product values as below.



This is a simple way to implement parameterized reports.



Drill Down Reports using SQL Server Reporting Services 2012

$
0
0


Drill Down Reports using SQL Server Reporting Services 2012

We will see how to deal with drill down reports using SSRS 2012. For this report you can create a table and populate data from the post.

Let’s create a report using SQL Server 2012 Data tools. Create SSRS report using SSRS report wizard.


Substitute the query in design query window.



SELECTSYearAS'Year',
Country,
Organization,
Zone,
Product,
Total_SoldOut,
Total_Price
FROMsales
ORDERBY[Year]






Now the report looks like below.


Do required alignments and update the report name and now the report looks like below.


Preview the report to make sure it’s been working fine and retrieving data.


Now we’ll go ahead and design drill down report base don year.

Under row group add a parent group for the column year.




Preview the report to have a drill down report.

Now edit the group details as below.



Now preview the report.


Expand the year check the view.


To give more clear view we will remove the column “Year”  (repeated values).


Preview the report; it will be the final view of the drill down report.


            This is the way to implement drill down view for SSRS reports.


How to split a string using T-SQL - Split a string in SQL Server

$
0
0




We have a requirement to capture the list of key words in the given list of statements.

There is a table called “SearchQueries”. It’s been having a list of queries, now the requirement is to split each and every statement into individual words and needs to be stored in a table “KeyWords”.



Now the requirement is to split and store keywords from each record as below along with the number of occurrences.  

Example:
KeyWordCount
Describe12
Change2
Data10
Capture1
Etc…….

Script to create table and populate with data:

USEDBAConnect
GO
CREATETABLESearchQueries(IDINTIDENTITY,QueryNVARCHAR(MAX))
GO
INSERTINTOSearchQueries(Query)
SELECT'How to design 2012 SSRS reports using SQL Server Data Tools'
UNION
SELECT'What are the new features added in SSRS 2012'
UNION
SELECT'Describe Change Data Capture for SSIS in SQL Server 2012'
UNION
SELECT'What are the new features added in SSIS 2012'
UNION
SELECT'SSIS 2012 Support for Variable Number of Columns in a Flat File'
UNION
SELECT'SSIS Package Format Changed and the Specs are Open Source New in SSIS 2012'
UNION
SELECT'What are the new features added in SSAS 2012'
GO

Create a table to hold the keywords:

CREATETABLEKeyWords(IDINTIDENTITY,WordNVARCHAR(255))

Now create a stored procedure to split the given statement into individual words.

IFEXISTS(SELECT 1 FROMSYS.SYSOBJECTSWHERETYPE='P'ANDNAME='usp_split_string')
BEGIN
DROPPROCEDUREusp_split_string;
END
GO
CREATEPROCEDUREusp_split_string(@TxtNVARCHAR(MAX))
AS
BEGIN
SETNOCOUNTON;

DECLARE@tempTABLE
(
KeyWordNVARCHAR(250)
);

DECLARE @dataNVARCHAR(MAX),
@delimiterNVARCHAR(10),
@posINT,
@startINT,
@lenINT,
@endINT;

SELECT@data=@Txt,
@delimiter=' ',
@len=LEN('.'+@delimiter+'.')- 2,
@end=LEN(@data)+ 1,
@start=1,
@pos=0;

WHILE (@pos<@end)
BEGIN
SET@pos=CHARINDEX(@delimiter,@data,@start);
IF (@pos= 0)SET@pos=@end;

INSERT@temp(KeyWord)
SELECTSUBSTRING(@data,@start,@pos-@start);

SET@start=@pos+@len;
END

INSERTINTOKeyWords(word)
SELECTKeyWordFROM@temp;
END

The stored procedure splits the statement into individual words. For example if we input a statement “SQL Server 2012” to the stored procedure it splits the statement into words an stored into table “KeyWords”.
i.e
SQL
Server
2012

Using the same logic we will apply the logic to existing table “SearchQueries” and prepare a list of keywords from the table.

BEGIN
DECLARE@MaxINT,
@iINT,
@STMTVARCHAR(100),
@CategoryVARCHAR(200);

TRUNCATETABLEKeyWords;

SELECT@max=MAX(ID)FROMSearchQueries;
SET@i= 1;
WHILE(@i<=@Max)
BEGIN
SELECT@stmt=Query
FROMSearchQueries
WHEREID=@i;

EXECusp_split_string@stmt;

SET@i=@i+1;
END
END

Execute the above script to accomplish the task.

Check the table for the list of keywords.


Finally capture the keywords and corresponding counts.

SELECTWord,
Count(1)AS'Total_Count'
FROMKeyWords
GROUPBYWord
ORDERBYTotal_CountDESC;






Removing duplicate records from a table

$
0
0


How to remove the duplicate records from table?


Usually on daily basis we usually need to perform removing duplicate records from a table.
Here consider an example for removing duplicate records.

USETest
GO
CREATETABLETest(idINT, nameVARCHAR(100), ageINT)
GO
INSERTINTOTest
SELECT 1,'A',34
UNIONALL
SELECT 1,'A',34
UNIONALL
SELECT 2,'B',37
UNIONALL
SELECT 3,'C',21
UNIONALL
SELECT 3,'C',21
UNIONALL
SELECT 3,'C',21
GO
SELECTid, name, ageFROMTest



GO
-- Now delete the duplicate records
WITHCTE(id,name,age,Duplicates)
AS
(
SELECTid,name,age,
ROW_NUMBER()OVER (PARTITIONBYid, name, ageORDERBYid)ASDuplicates
FROMTest
)
DELETEFROMCTE
WHEREDuplicates> 1
GO
Now check the table to make sure duplicates are being removed from table.


SQL Server database backup automation using T-SQL

$
0
0


I have recently gone through a request as a part of daily maintenance. Found a backup maintenance plan has been failed due to low disk space.

“SQL Task     Description: Executing the query "BACKUP DATABASE CA1011_DB TO DISK = ..." failed with the following error: "A nonrecoverable I/O error occurred on file "C:\Backup\ CA1011_DB \ CA1011_DB _backup_2013_06_05_010000_7196421.bak:" 112(There is not enough space on the disk.).  BACKUP DATABASE is terminating abnormally."

We have designed a maintenance plan that performs backup for all selected databases, once after backup a task added to cleanup the backup files. But the problem is while performing backup all previous day backups are still available on folders and those can be deleted only after backup job completed.

So for all databases if the total backup size is 200 GB we exactly need 400GB free space on drive. To avoid these issues we have created a T-SQL script which performs backup and clean up the old backup.

Existing Maintenance Plan:

Step1: Perform backup for Database-1
Step2: Perform backup for Database-2
----------------------------------------------------
----------------------------------------------------
StepN: Perform backup for Database-N
StepN+1: Delete the previous day backup from all folders

New T-SQL Script:

Step1: Perform backup for Database-1
Step2: Deletes previous day backup for Database-1
Step3: Perform backup for Database-2
Step4: Deletes previous day backup for Database-2
----------------------------------------------------
----------------------------------------------------
StepN: Perform backup for Database-N
StepN+1: Deletes previous day backup for Database-N

Below is the script to perform a backup and deletes previous days backup.



USEMASTER;
GO

IFEXISTS(SELECT 1 FROMSYS.SYSOBJECTSWHERENAME='usp_Backup_Database'ANDTYPE='P')
BEGIN
DROPPROCEDURE[usp_Backup_Database];
END
GO

CREATEPROCusp_Backup_Database (@db_nameNVARCHAR(50),
 @file_pathNVARCHAR(256))
AS
BEGIN

SETNOCOUNTON
DECLARE@fileNameNVARCHAR(256);-- Filename for backup
DECLARE@fileDateVARCHAR(20);-- Used for file name
DECLARE@DeleteDateDATETIME=GETDATE();-- Cutoff date

-- Get date to include with the file name.
SELECT@fileDate=CONVERT(VARCHAR(20),GETDATE(),112);

-- Build the file path and file name.
SET@fileName=@file_path+@db_name+'_'+@fileDate+'.BAK';

-- Backup the database.
BACKUPDATABASE@db_nameTODISK=@fileNameWITHINIT;

-- Delay 10 sec
WAITFORDELAY'000:00:10'

-- Purge old backup files from the disk.
EXECmaster.sys.xp_delete_file0,@file_path,'BAK',@DeleteDate,0;

END
GO

Now execute the stored procedure:

Parameters:
@db_name: Database name that needs to be backup
@file_path: File path where backup needs to be stored.

Note: File path format should be “:\............\Backup\”

Procedure execution:

USEMASTER;
GO
EXECusp_Backup_Database@db_name =N'Source',
@file_path=N'C:\Bkptest\Source\'


This is really helpful when we need to deal with disk space.   

Just to demonstrate I have shown an example with one database that same can be loop for all user defined databases using a table variable and while loop.


SQL Server: Incorrect PFS free space information for page (1:xxxx) in object ID xxxxxx:

$
0
0
SQL Server: Incorrect PFS free space information for page (1:xxxx) in object ID xxxxxx:


"Incorrect PFS free space information for page (1:233791) in
object ID 367392428, index ID 1, partition ID 72057594180730880, alloc unit ID 72057594222018560 (type LOB data). Expected value   0_PCT_FULL, actual value 100_PCT_FULL.  CHECKDB found 0 allocation errors and 1 consistency errors in table 'xxxxxxx' (object ID 367392428).  


We have a maintenance plan which is scheduled to run on weekly basis and checks the database integrity for specified user databases in production instance.


I found above error in SQL Server log. Clearly from the error message we can say that it’s not the page corruption. There is a special page called PFS (Page Free Space) which indicates the percentage of page full. It actually helps free space scanner while inserting data.


In this case PFS is 100% means it indicates page is full whereas in real the page is empty.  DBCC commands will fail due to this wrong calculation.


Resolution:


To resolve this I have tried the below procedure.


  1. Take a full backup of the database (Based on the size if it’s a huge DB take the backup of table just by using “SELECT * INTO FROM )
  2. From sql log we can find the object (Table/Index) name and ID
  3. Put database in single user mode
ALTERDATABASE<Corrupted_Table>
SET SINGLE_USER
WITHROLLBACKIMMEDIATE


  1. Run DBCC CHECKTABLE() on specific table / index
DBCCCHECKTABLE(<Corrupted_Table>)WITHALL_ERRORMSGS


  1. It will reproduce the “PFS” error.
  2. To fix this execute the below two statements one after other.
DBCCCHECKTABLE(<Corrupted_Table>,REPAIR_FAST)WITHALL_ERRORMSGS
AND
DBCCCHECKTABLE(<Corrupted_Table>,DBCC CHECKTABLE(,REPAIR_REBUILD)WITHALL_ERRORMSGS


  1. For most of the cases the problems must be fixed with above two checks.
  2. If not goahead and execute the below statement
DBCCCHECKTABLE(<Corrupted_Table>,REPAIR_ALLOW_DATA_LOSS)WITHALL_ERRORMSGS


  1. For first time it executes and completes with the same PFS error message.
  2. Re execute the step number 8, now this time it fix the issue and avoid the PFS error.
    11. Put database in multi user mode
ALTERDATABASE<Corrupted_Table>
SET MULTI_USER



If it’s not got resolved, put the database in single user mode, backup the corrupted table (using SELECT * INTO ) . Backup the table creation script with all key relationships, drop the corrupted table, rename the backup table with the original table and recreate all keys and relationships.
 
If it still not gets resolved there is only option. Restore the database with the latest possible backup.


“Ctrl + R” is not working in SQL Server 2012: “Ctrl+R was pressed .Waiting for second key of chord..”

$
0
0
“Ctrl + R” is not working in SQL Server 2012: “Ctrl+R was pressed .Waiting for second key of chord..”

Keyboard short cut “Ctrl+R” is used to Show or hide the query results pane in SQL Server 2005/2008/R2 whereas in SQL Server 2012 it’s not working by default.

After installing SQL Server 2012 if you press Ctrl+R then it echo’s the message:
“Ctrl+R was pressed .Waiting for second key of chord..”

It’s very handy shortcut for SQL developers. Now let’s see hot to configure SQL Server 2012 SSMS to use the “Ctrl+R” functionality.
Open SQL Server 2012 management studio.
Go to Tools - > Options - > KeyBoard




Under “show commands containing” select “Window.ShowResultsPane“


Select “SQL Query Editor” under “use new shortcut in”.




Once it is selected, make sure cursor is placed on text box “Press Shortcut keys” and press Ctrl+ R on keyboard.




Click on “Assign” and then click on “OK”.
Then you would be able to use the shortcut Ctrl+R to show/hide the output window.

SQL Server Backup & Restore Q&A

$
0
0

Backup and Restore Q &A

 

1. How does the database recovery model impact database backups?

Ans:

First the database recovery model is responsible for the retention of the transaction log entries.  So based on the setting determines if transaction log backups need to be issued on a regular basis i.e. every minute, five minutes, hourly, etc. in order to keep the transaction log small and/or maintain a log shipping solution.

  • Simple – Committed transactions are removed from the log when the check point process occurs.

  • Bulk Logged – Committed transactions are only removed when the transaction log backup process occurs.

  • Full – Committed transactions are only removed when the transaction log backup process occurs.

 2. Is the native SQL Server 2005 backups are in clear text or in encrypted?

Ans:

With SQL Server 2008 is the introduction of natively encrypted database backups. Prior to SQL Server 2008 a third party product was necessary to encrypt the database backups.

 3. How can I verify that backups are occurring on a daily basis?

Ans:

  • Check all backup jobs history

  • Review the SQL Server error log for backup related entries.

  • Query the msdb.dbo.backupset table for the backup related entries.

  • Review the file system where the backups are issued to validate they exist.

 4. How do you know if your database backups are restorable?

Ans:

  • Issue the RESTORE VERIFYONLY command to validate the backup. For validating LiteSpeed backups use XP_restore_verifyonly

  • Randomly retrieve tapes from off site and work through the restore process with your team to validate the database is restored in a successful manner.

5. What are some common reasons why database restores fail?

Ans:

  • Sufficient space not available on drive

  • User may not have sufficient permissions to perform the restore

  • Unable to gain exclusive use of the database.

  • LSN’s are out of sequence so the backups cannot be restored.

  • Syntax error such as with the WITH MOVE command.

  • Version problem

  • Might be wrong backup location specified

  • Service account may not have permissions on backup folder

 6. What are the permissions required to perform backup and Restore?

Ans:

The user must be a member of either of the below roles

Backup:

  • sysadmin – fixed server role

  • db_owner –  fixed database role

  • db_backupoperator – fixed database role

Restore:

  • Sysadmin – fixed server role

  • Dbcreator – fixed server role

  • db_owner – fixed database role

 7. How can you be notified if a native SQL Server database backup or restore fails via the native tools?

Ans:

  • Setup SQL Server Alerts to be sent to Operators on a failure condition.

  • Include RAISERROR or TRY\CATCH logic in your backup or restore code to alert on the failure.

 8. Does all successful SQL Server backup entries can be prevented from writing to the SQL Server Error Log by a single trace flag?

Ans:

Yes – Just enable the trace flag 3226.

 9. What are some common post restore processes?

Ans:

  • Sync the logins and users

  • Validate the data is accurate by running dbcc commands

  • Notify the team\user community

  • Cleanse the data to remove sensitive data i.e. SSN’s, credit card information, customer names, personal information, etc.

  • Change database properties i.e. recovery model, read-only, etc.

10. Explain how you could automate the backup and restore process?

Ans:

  • Backups can be automated by using a cursor to loop through each of the databases and backup each one

  • Restores can also be automated by looping over the files, reading from the system tables (backup or log shipping) or reading from a table as a portion of a custom solution

 11. What is the database that has the backup and restores system tables?  What are the backup and restore system tables?  What do each of the tables do?

Ans:

 The MSDB database is the database with the backup and restores system tables. Here are the backup and restore system tables and their purpose:

  • backupfile – contains one row for each data file or log file backed up

  • backupmediafamily – contains one row for each media family

  • backupmediaset – contains one row for each backup media set

  • backupset – contains one row for each backup set

  • restorefile – contains one row for each restored file

  • restorefilegroup – contains one row for each restored filegroup

  • restorehistory – contains one row for each restore operation

 12. How can full backups be issued without interrupting the LSN’s?

Ans:

Issue the BACKUP command with the COPY_ONLY option

 13. How is a point in time recovery performed?

Ans:

It depends on which backup types are issued.  In this example let’s assume that full, differential and transaction log backups are issued.

  • Restore the most recent full backup with the NORECOVERY clause

  • Restore the most recent differential backup with the NORECOVERY clause

  • Restore all of the subsequent transaction log backups with the NORECOVERY clause except the last transaction log backup

  • Restore the last transaction log backup with the RECOVERY clause and a STOPAT statement if the entire transaction log does not need to be applied

 14. What are your recommendations to design a backup and recovery solution? Simply what is Backup Check list?

Ans:

  • Determine What is Needed

  • Recovery Model

  • Select Backup Types

  • Backup Schedule

  • Backup Process

  • Document

  • Backup to Disk

  • Archive to Tape

  • Backup to Different Drives

  • Secure Backup Files

  • Encrypt or Password Protect Backup Files

  • Compress Backup Files

  • How Much to Keep on Disk

  • Online Backups

  • Run Restore Verify only

  • Offsite Storage

 15. Consider a scenario where you issue a full backup.  Then issue some transaction log backups, next a differential backup, followed by more transaction log backups, then another differential and finally some transaction log backups.  If the SQL Server crashes and if all the differential backups are bad, when is the latest point in time you can successfully restore the database?  Can you recover the database to the current point in time without using any of the differential backups?

Ans:

You can recover to the current point in time, as long as you have all the transaction log backups available and they are all valid. Differential backups do not affect the transaction log backup chain.

 16. What are the three basic phases for database recovery and in what order do they occur?

Ans:

  • Analysis

  • Redo – rolls forward committed transactions

  • Undo – rolls back any incomplete transactions

 17. What options/arguments can be specified in a BACKUP LOG statement to keep inactive log records from being truncated?

Ans:

  • SQL Server 2000: NO_TRUNCATE

  • SQL Server 2005/2008: NO_TRUNCATE, COPY_ONLY

18. What are all of the backup \Restore options and their associated value?

Ans:

Backup Options:

  • Full – Online operation to backup all objects and data in a single database

  • Differential – Backup all extents with data changes since the last full backup

  • Transaction log – Backup all transaction in the database transaction log since the last transaction log backup

  • File – Backup of a single file to be included with the backup when a full backup is not possible due to the overall database size

  • File group – Backup of a single file group to be included with the backup when a full backup is not possible due to the overall database size

  • Cold backup – Offline file system backup of the databases

  • Partial Backup – When we want to perform read-write filegroups and want to exclude read-only filegroups from backup. It will be useful for huge databases (Data warehousing)

  • Third party tools – A variety of third party tools are available to perform the operations above in addition to enterprise management, advanced features, etc.

Restore Options:

  • Restore an entire database from a full database backup (a complete restore).

  • Restore part of a database (a partial restore).

  • Restore specific files or filegroups to a database (a file restore).

  • Restore specific pages to a database (a page restore).

  • Restore a transaction log onto a database (a transaction log restore).

  • Revert a database to the point in time

 19. How much time taken to take full backup of 500 GB database by using third party tool litesped and without using third-party tool and also how much time taken to restore same full backup using litespeed and without third-party tool

Ans:

There is no specific time we can say for BACKUP & RESTORE operation.

It depends on lot of factors like Disk I/O, Network, processors etc.

 SQL Server 2005:

Database Size: 1.2 TB

Time taken to Backup with Litespeed :  3:20 Hrs (80 % of compression)

Time Taken to Restore: 6Hrs

 Database Size: 800 GB

Time Taken to Backup using Native Method:  11 Hrs

I never tried restoring huge db’s in native method mean native backups

SQL Server 2000:

A Database of 20 GB will take 14 Min to Backup and 22 Min to Restore the Backup

 20. What are the issues you faced in backup and restore process?
Ans:

Common Errors in Backup:

Error 3201 – when performing a backup to a network share

Sol:

Where SQL Server disk access is concerned, everything depends on the rights of the SQL Server service startup account. If you are unable to back up to a network share, check that the service startup account has write rights to that share.

 Error: Cannot open the backup device:

Sol: 

Either the specified location is missing or the service account under which the SQL Agent is running does not have the permissions on that folder.

 Common Errors in Restore:

Error 3205 – Too many backup devices specified for backup or restore;

Sol:

The most common cause for this error is because you are trying to restore a SQL Server 2005 backup on a SQL Server 2000 instance

 Error 4305 – an earlier transaction log backup is required

Sol:

There are one or more transaction log backups that need to be restored before the current transaction log backup. Using LSN number we can identify the prior log backups.

21. How to perform the tail log backup?

Ans:

As normal log backup we can perform the tail log backup. We have two options to consider

WITH NORECOVERY:  When database online and you are planning to perform a restore after the tail log backup. It takes the database in restoring mode to make sure that no transactions performed after the tail log.

WITH CONTINUE_AFTER_ERROR: When database offline and does not starts. Remember we can only perform the log backup on damaged database when the log files are not damaged

22. What is the difference between NO_LOG and TRUNCATE_ONLY?

Ans:

Both removes the inactive part of the log without making a backup copy of it and truncates the log by discarding all but the active log. This option frees space. NO_LOG and TRUNCATE_ONLY are synonyms.
After truncating the log using either NO_LOG or TRUNCATE_ONLY, the changes recorded in the log are not recoverable. For recovery purposes, immediately execute BACKUP DATABASE to take a full or full differential backup. Always try to avoid running the truncating as it breaks the log chain, Until the next full or full differential backup, the database is not protected from media failure.

 23. Consider a situation where I have to take a backup of one database of 60 GB. My hard drive lacked sufficient space at that moment. I don’t find 64GB free on any drive. Fortunately, I have 3 different drives where I can hold 20 GB on each drive. How can you perform the backup to three different drives? How can you restore those files? Is this really possible?

Ans:

Yes it is possible. We can split the backup files into different places and the same can be restored.

  • BACKUP DATABASE AdventureWorks
    TO DISK = ‘D:\Backup\MultiFile\AdventureWorks1.bak’,
    DISK = ‘E:\Backup\MultiFile\AdventureWorks2.bak’,
    DISK = ‘F:\Backup\MultiFile\AdventureWorks3.bak’

  • RESTORE DATABASE [AdventureWorks]
    FROM DISK = N’D:\Backup\MultiFile\AdventureWorks1.bak’,
    DISK = N’E:\Backup\MultiFile\AdventureWorks2.bak’,
    DISK = N’F:\Backup\MultiFile\AdventureWorks3.bak’

24. What is piecemeal Restore?

Ans:

Consider we have a database of 3 TB where as on primary file group is a read write filegroup of size 500 GB and we have other files groups which are read-only of size  2.5 TB. We actually need not perform backup for read-only file groups, here we can perform partial backups.

Piecemeal restore process allows us to restore the primary filegroup first and the database can be online and the remaining filegroups can be restored while the recovery the transactions are running on primary

File group. Mostly suitable for data warehousing databases.

 25 Have you ever perform the backup using T-SQL? Can you explain about different backup options?

Ans:

BACKUP [DATABASE/LOG] <File/FileGroup>

TO <Backup Device>

MIRROR TO <Backup performed to different locations>

MIRROR TO <>

MIRROR TO < Only 3 mirrors can be specified >

WITH <Options>

Below are the General WITH options

  • Backup Set Options

  • COPY_ONLY  - Full backup on full recovery mode db’s. No chain breaking

  • COMPRESSION | NO_COMPRESSION – DB compression

  • DESCRIPTION

  • NAME

  • PASSWORD – Can assign a pwd, same password required to restore it

  • EXPIREDATE – Expires after the given date

  • RETAINDAYS – number of days that must elapse before this backup media set can be overwritten

  • Media Set Options

  • NOINIT | INIT – Overwrite | Append

  • NOSKIP | SKIP – Check Backupset expiration before overwritten | No checks

  • NOFORMAT | FORMAT -

  • MEDIADESCRIPTION

  • MEDIANAME

  • MEDIAPASSWORD

  • BLOCKSIZE

  • Data Transfer Options

  • BUFFERCOUNT

  • MAXTRANSFERSIZE

  • Error Management Options

  • NO_CHECKSUM | CHECKSUM

  • STOP_ON_ERROR | CONTINUE_AFTER_ERROR – Instructs BACKUP to fail if a page checksum does not verify | Continue after error

  • Compatibility Options

  • RESTART

  • Monitoring Options

  • STATS  - Shows Percentage completed

  • Tape Options

  • REWIND | NOREWIND

  • UNLOAD | NOUNLOAD

  • Log-specific Options

  • NORECOVERY – Performs tal log and leave db in restoring mode

  • STANDBY – Performs a backup and leave db in read only mode

  • NO_TRUNCATE – Specifies that the log not be truncated and causes the Database Engine to attempt the backup regardless of the state of the database

26 Have you ever perform the restore using T-SQL? Can you explain about different restore options?

Ans:

RESTORE [DATABASE/LOG] <File/FileGroup>

FROM  <Backup Device>

WITH <Options>

Below are the general WITH options

  • MOVE ’logical_file_name_in_backup’ TO ‘operating_system_file_name’

  • REPLACE

  • RESTART

  • RESTRICTED_USER

  • Backup Set Options

  • FILE

  • PASSWORD

  • Media Set Options

  • MEDIANAME

  • MEDIAPASSWORD

  • BLOCKSIZE

  • Data Transfer Options

  • BUFFERCOUNT

  • MAXTRANSFERSIZE

  • Error Management Options

  • CHECKSUM | NO_CHECKSUM

  • STOP_ON_ERROR | CONTINUE_AFTER_ERROR

  • Monitoring Options

  • STATS [ = percentage ]

  • Tape Options

  • REWIND | NOREWIND }

  • UNLOAD | NOUNLOAD }

  • Replication_WITH_option

  • KEEP_REPLICATION

  • Point_in_time_options

  • STOPAT = { ‘datetime’| @datetime_var }

  • STOPATMARK = { ‘lsn:lsn_number’ }

  • STOPBEFOREMARK

 27. Can you restore master database? If yes how?

Ans:

All server level information stored in master database that includes logins information etc. Schedule a regular backup for master database and below is the process to restore a master db.

  • Start the sql server in single user mode (-m)

  • Issue the restore command with replace from SQLCMD prompt

  • RESTORE DATABASE master FROM <backup_device> WITH REPLACE

  • Restart the sql server in normal mode

  • All databases as available at the time of master db backup must be attached as everything is tracked in master database.

  • If any databases are missing we can manually attach the mdf-ldfs.

 28. How can we rebuild the system databases?

Ans:

We usually rebuild the system databases when they are corrupted.

Rebuild deletes the databases and recreates it hence all the existing information is vanished.

Before rebuild:

  • Locate all recent backup of system databases

  • Make a note on mdf and ldf file locations, server configuration, Build /hotfix /sp applied

Rebuild:

  • Locate the Sql Server installation bits and run the command setup.exe fro command prompt by passing the argument as “/ACTION=REBUILDDATABASE”

  • Review the summary.txt once the rebuild completes

Post Rebuild:

  • Restore all the system databases from existing backups

  • Move the system databases mdf/ldf files to the actual locations

 29. How can we rebuild Resource system database?

Ans:

In Sql Server 2008 from installation wizard from left navigation pane select “Maintenance” and Click on Repair. It rebuilds the resource database.

 30. As a database is recovering, after which phase will the database be available/online?

Ans

SQL Server 2000: After the Undo phase.

SQL Server 2005/2008:  In all editions but Enterprise, after the Undo phase (if running FULL recovery model). With Enterprise edition, after the Redo phase. Fast recovery is possible because transactions that were uncommitted when a crash occurred reacquire whatever locks they held before the crash. While these transactions are being rolled back, their locks protect them from interference by users.

On the Enterprise Edition of SQL Server 2005/2008, users are allowed access after REDO. So the point is REDO phase is done first.

31. How do you respond to the increasing transaction log file?

Ans:

Alternatives for responding to a full transaction log include:

  • Backing up the log.

  • Freeing disk space so that the log can automatically grow.

  • Adding a log file on a different disk.

  • Completing or killing a long-running transaction.

  • Moving the log file to a disk drive with sufficient space.

  • Increasing the size of a log file.

  • Shrinking the log file.

 32. How to rebuild the system databases in SQL 2008?

Ans:

  • First we need to confirm that the master database is corrupted. We cannot restart SQL Server without the MASTER database. By checking the error logs  we can confirm that master database is corrupted.

  • To rebuild the master database we have to use setup.exe from command prompt. There is no much difference between 2005 and 2008 except few command line switches.

  • Find the setup.exe file (C:\……………………….\100\Setup BootStrap\Release\setup.exe)

  • Run the below command from dos prompt

c:\Program Files\Microsoft SQL Server\100\Setup Bootstrap\Release>setup.exe

/QUIET

/ACTION=REBUILDDATABASE

/INSTANCENAME=<Default / Named>

/SQLSYSADMINACCOUNTS= <Service Account>

[/SAPWD=<Mandatory when using Mixedmode>]

[/SQLCOLLATION=<Give new collation if you want to change default>]

  • When setup has completed rebuilding the system databases, it will return to the command prompt with no messages (It always first prints out the version). Examine the “Summary” log file (100\setup bootstrap\logs) to verify it was completely successful.

 33. Do we need installation DVD or complete binaries to rebuild system databases?

Ans:

The answer is NO. In 2008 this is really a nice enhancement. While installing SQL Server these system database files are copied to the location (<MSSQL.InstanceName>\MSSQL\Binn\Templates)

In this directory you will find master, model, and msdb database and log files that were copied from your installation source as part of setup. Hence when we rebuild the databases the setup uses these files to rebuild the databases.

34. What should we do if cannot find these database files at ….Templates\ location?

Ans:

There are two options available.

  • Use the Repair feature of Setup (Available from the Maintenance option of the SQL Server Installation Center installed your machine)

OR

  • Manually copy the necessary file(s) yourself. On your media source find the directory of your platform (x86, x64, or ia64). Then go to the following directory:

setup\sql_engine_core_inst_msi\PFiles\SqlServr\MSSQL.X\MSSQL\Binn\Template

 Once you have copied the file into the templates directory or repairing, re-run setup with the syntax I’ve described above.

 35. Can we rebuild resource database?

Ans:

Yes! To rebuild these database files you would need to run Repair from the Installation Center.

 36. Can we rebuild msdb?

Ans:

Yes! We can directly restore it from a valid backup. If there is no valid backup available, restore all system databases as described above.

 37. What if I have applied Updates / HotFixes after installation?

Ans:

As with SQL Server 2005, if for any reason you rebuild system databases or repair the resource database, you should apply your latest update even if you restore backups of system databases.

 38. How to restore Master or Msdb database from a valid backup?

Ans:

  • Stop and Start the SQL Server Database Engine in Singe User Mode (Using parameter –m)

  • Restore the Master Database from SQLCMD prompt

  • From dos prompt using SQLCMD connect to the sql server and run the restore script

  • RESTORE DATABASE MASTER FROM DISK=’D:\MASTER_FULL.BAK’ WITH REPLACE

  • Stop and start the SQL Server Database Engine in normal mode

  • Restore MSDB Database

  • Connect to management studio and run the restore script for msdb

  • RESTORE DATABASE MSDB FROM DISK=’D:\MSDB_FULL.BAK’ WITH REPLACE

39. What is the difference between Hot and Cold Backup?

Ans:

Performing backup while the database is online is called Hot backup. Stopping SQL server service and copying MDF and LDF files is called cold backup which is not really happens in production.

 40. What are the restore options available?

Ans:

When you restore a backup, you can choose from 3 restore option.

  1. With Recovery – Database is ready to use, and user can connect to database, user can change data inside database.

  2. No Recovery – Database is not ready, there are few more backups that has to be applied to this database instance. User cannot connect to database because it is in Restoring Status. (Exception: Not considering Database Snapshots )

  3. Standby / Read Only – Database is ready to use but database is in Read Only mode, user can connect to database but they cannot change data inside database. A running database con not be changed to standby mode. Only a data in no-recovery state can be moved to standby mode. This is an option that is specified while restoring a database or transaction log.


Preparing for MCSA Exam 70-461 – MS SQL Server 2012 exam

$
0
0

MCSA Certification Prep | Exam 461: Querying Microsoft SQL Server 2012

SQL Server Certification preparation MCSA Exam 461

exam70641

 

  Review: Just go through this video we’ll get a over all view on preparing 70-461 exam.

What is the Difference between NUMERIC / DECIMAL and FLOAT / REAL in SQL Server 2012?

$
0
0

udayarumilli_decimals

Q. What is the Difference between NUMERIC / DECIMAL and FLOAT / REAL in SQL Server 2012?

Ans:

Before dig into these data types we must have a look into precision and scale.

Precision: The maximum total number of decimal digits that can be stored both to the left and to the right of the decimal points.

Scale: The maximum number of decimal digits that can be stored to the right of the decimal point

Example:

Take an example number: 1433890.2554

Here the precision is: 11 (1433890)

Scale is: 4 (2554)

To store this number into a variable it must be declared with the proper precision and scale.

USE udayarumilli;

GO

DECLARE @Test DECIMAL(11,4) = 1433890.2554;

SELECT @Test

Output is:

—————————————
1433890.2554

(1 row(s) affected)

From the above example if we change the precision value to 10 and try to assign the same number the execution will be failed with “Arithmetic Overflow error”.

USE udayarumilli;
GO
DECLARE @Test DECIMAL(10,4) = 1433890.2554;
SELECT @Test

Output would be:

Msg 8115, Level 16, State 8, Line 1
Arithmetic overflow error converting numeric to data type numeric.

(1 row(s) affected)

As we have changed the precision value to 10 it turns into arithmetic overflow error.

Now we’ll have a look into these data types.

NUMERIC / DECIMAL: These data types are the similar in functionality and used to represent the fixed precision and scale values.  These are most apt for the situations where we need to represent accurate values that include “Price”, “Balance”, and “Credit” etc. Please have a look here to know more about these data types.

 REAL / FLOAT: These data types can be used when accuracy is not a deal. To represent the very large numbers or very small numbers that includes scientific purposes and average counts, approx. values etc. Please have a look here to know more about these data types.

We’ll demonstrate an example, and we’ll see how these data types are varying in handling the same number.

Declare a variable of type FLOAT and try to convert it into NUMERIC.

DECLARE @Var_F FLOAT = 1234567.02265;

SELECT  @Var_F AS 'Float_Value',

        CAST(@Var_F AS NUMERIC(20,10)) AS 'Numeric_Converted';

 

Can you guess the output of converted value?

Float_Value            Numeric_Converted

———————- —————————————

1234567.02265          1234567.0226499999

(1 row(s) affected)

You can clearly observe the difference between numbers.  This is the reason we should not use the columns of type Float while comparing with the other values in a table.

Means we should be very careful using these columns and conversions in WHERE clause.

 

 

Conversion functions in SQL Server 2012

$
0
0

data conversion

Conversion functions  – SQL Server 2012

 

Before starting with the data conversions we’ll have a look into “Why data conversion needed?”

Does SQL Server can’t do it automatically?

Yes! SQL server can do data conversion internally which is called implicit data conversion.

Implicit Data Conversion:

While evaluating any SQL expressions SQL Server database engine implicitly converts data depends on the “Datatype Precedence”. “Binary” data type is the least precedence and “User defined data type” is the highest precedence.

For example consider the below query

SELECT 10+'10'

Here 10 is an integer and ‘10’ is a string. If we run the above statement it gives output as 20

While executing the statement SQL Server compares the data type precedence between the data types INT and VARCHAR. As INT data type is having higher precedence than VARCHAR it implicitly converts the string ‘10’ to integer 10 and hence the result would be 20.

Now we’ll see one more example as below

SELECT GETDATE()

SELECT GETDATE()+10

Here GETDATE() returns DATETIME data type and 10 is an integer. Since DATETIME is higher precedence than INTEGER it adds 10 days to the current date and returns the result as below.

SQL Server implicitly converts all lower data type values to the higher data type values in an expression. Sometimes implicit conversions may lead to performance overhead. To take the control of this conversion we have to use conversion functions explicitly.

Explicit Conversion:

Below are the conversion functions available in SQL Server 2012.

  • CAST

  • CONVERT

  • PARSE

  • TRY_CAST

  • TRY_CONVERT

  • TRY_PARSE

CAST AND CONVERT: As we know that these are the functions and can be used for converting data across data types.  Functionality is similar for these two functions. Major difference between CAST and CONVERT is; CONVERT supports an extra feature called “style”. We can also mention the style of the converted data. It helps us when dealing with DATETIME datatype values.

As a traditional way of converting, we clearly know how to use these functions.

SELECT CAST('10' AS INT)+10 AS 'Total'

SELECT CONVERT(INT,10)+10 AS 'Total'

Now we’ll see how these are differentiating for “DATETIME” data values.

SELECT 'OP Completed on: '+CAST(SYSDATETIME() AS VARCHAR(30)) AS 'Msg'

SELECT 'OP Completed on: '+CONVERT(VARCHAR(30),SYSDATETIME(),106) AS 'Msg'

Here “SYSDATETIME()” is a system function to get the current date with time.  “106” is the format style in which the data must be converted.

CONVERT function will give more flexibility in handling the date related data types.

Please check here to know more about these functions.

PARSE:

It is also a string conversion function introduced in SQL Server 2012. It converts string to other datatypes. With this conversion function we can indicate the culture in which data can be converted. The culture can be any culture supported by .Net framework. Also PARSE function tries it’s best in converting strings to the intended datatypes.

It is a non SQL Server native function and depends on dotnet CLR hence there would be some performance overhead. Try to use CAST and CONVERT functions wherever possible.

Let’s have a look at few examples.

-- Convert string to Int

SELECT PARSE('100' AS INT) AS 'Integer';

-- Convert string to Decimal

SELECT PARSE('100.334' AS DECIMAL(10,3)) AS 'Decimal';

-- Convert string to DateTime using US format

SELECT PARSE('03/07/2013' AS DATETIME USING 'en-US') AS 'DateTime';

-- Convert string to DateTime using INDIA format

SELECT PARSE('03/07/2013' AS DATETIME USING 'en-IN') AS 'DateTime';

PARSE function can be more efficient in handling DATETIME data values. It performs it’s best in converting data.

CONVERT/CAST function requires the input to be specified properly, convert fails if the input is not in an acceptable format whereas PARSE can perform well in this situations. Below are the examples.

-- Let’s try to convert String to datetime using CONVERT

SELECT CONVERT(DATETIME,'WEDNESDAY Jul 03 2013')

GO

-- Let’s try to convert String to datetime using CAST

SELECT CAST('WEDNESDAY Jul 03 2013' AS DATETIME)

The CONVERT and CAST fail’s in converting string “WEDNESDAY Jul 03 2013” to datetime.

Now we’ll check the same conversion operation with PARSE

-- Lets try to convert String to datetime using PARSE

SELECT PARSE('WEDNESDAY Jul 03 2013' AS DATETIME) AS 'Str2Date'

Please have a look at here for more details.

TRY_CAST, TRY_CONVERT, TRY_PARSE:

The difference between the functions without the TRY and their counterparts with the TRY is that those without the TRY fails if the value isn’t convertible, whereas those with the TRY return a NULL in such a case.

Let’s have a look at below example to know differences between with and without TRY.

-- When we try to convert a string to INT using CAST

SELECT CAST('T100' AS INT) AS 'With CAST';

GO
-- When we try to convert a string to INT using CONVERT

SELECT CONVERT(INT,'T100') AS 'With CONVERT';

GO
-- When we try to convert a string to INT using PARSE

SELECT PARSE('T100' AS INT) AS 'With PARSE';
GO

Now we’ll try some examples with “TRY_”

-- When we try to convert a string to INT using CAST

SELECT TRY_CAST('T100' AS INT) AS 'With CAST';
GO

-- When we try to convert a string to INT using CONVERT

SELECT TRY_CONVERT(INT,'T100') AS 'With CONVERT';
GO

-- When we try to convert a string to INT using PARSE

SELECT TRY_PARSE('T100' AS INT) AS 'With PARSE';
GO

Please check the below links to know more about these functions.

TRY_CAST

TRY_CONVERT

TRY_PARSE

Difference between CHARINDEX and PATINDEX

$
0
0

udayarumilli_charindex

CHARINDEX: It is a string function which returns the starting position of a pattern.

PATINDEX: It is a string function which returns the starting position of a pattern. We can also use wildcard characters in searching for a pattern.

Let’s see some examples to understand the exact difference

First create a table and insert some test data.

-- Test Database
USE udayarumilli
GO
CREATE TABLE MaintLog (Maint_ID INT IDENTITY,

             Activity VARCHAR(100),

             Log_Desc TEXT);

GO

INSERT INTO MaintLog (Activity,Log_Desc)
SELECT 'Maintenace Plan - 112','Executed as admin. DBCC executed for all databases without no errors'
UNION
SELECT 'SSIS Package - S11','Executed as admin. SSIS executed for databases without no errors and imported data into db_199'
UNION
SELECT 'Maintenace Plan - 113','Executed as admin. DBCC executed for database db_tran_113 without no errors'
UNION
SELECT 'SSIS Package - S12','Executed as admin. SSIS executed for databases without no errors and imported data into db_134';

Let’s try these examples with the column “Activity” column

SELECT  Activity,
        CHARINDEX('Maint',Activity)
FROM    MaintLog;

GO

SELECT Activity,
       PATINDEX('%Maint%',Activity)
FROM   MaintLog
WHERE  PATINDEX('%Maint%',Activity) > 0;

Now let’s try the same with the column “Log_Desc”

Briefly PATINDEX = CHARINDEX + Wildcard Search

Microsoft Windows 8.1 Preview

Viewing all 145 articles
Browse latest View live