Find specific word in column - sql-server

I am using SQL Server 2008.
My tables are :
Location
------------------------
Id | LocationName
------------------------
1 | Bodakdev
2 | Thaltej Road
3 | Andheri East
4 | Noida Sector 2
Company
--------------------------------------------------------------------------
CId | Address | LocationId
--------------------------------------------------------------------------
11 | 301, GNFC Infotower, Bodakdev, | NULL
12 | 307/308,Arundeep Complex | NULL
13 | 7 Krishana Dyeing Compund, Nagardas rd., Andheri | NULL
14 | B-23 ,Ground Floor,Sector 2 | NULL
--------------------------------------------------------------------------
Currently LocationId in the Company table are null. If Address contains any location name then update LocationId.
For example, Address of CID - 11 contains Bodakdev then update LocationId 1, second example, Address of CID - 13 contains Andheri word then update LocationId 3.
Required output :
CId | Address | LocationId
--------------------------------------------------------------------------
11 | 301, GNFC Infotower, Bodakdev, | 1
12 | 307/308,Arundeep Complex | NULL
13 | 7 Krishana Dyeing Compund, Nagardas rd., Andheri | 3
14 | B-23 ,Ground Floor,Sector 2 | 4
--------------------------------------------------------------------------
I have tried using below query
SELECT
(LEN(Address) - LEN(REPLACE(Address, LocationName, '')) ) / LEN(LocationName)
if Address contains Location Name then it will return number of occurrences otherwise it return 0.
But it will not give correct output. How can I do this? Thanks. Any suggestion would be appreciated.

Try following Query :
1.STEP1 : make one function which can split the sting by any character and return the output in table format .
CREATE FUNCTION [dbo].[fnSplit](
#sInputList VARCHAR(8000) -- List of delimited items
, #sDelimiter VARCHAR(8000) = ',' -- delimiter that separates items
) RETURNS #List TABLE (item VARCHAR(8000))
BEGIN
DECLARE #sItem VARCHAR(8000)
WHILE CHARINDEX(#sDelimiter,#sInputList,0) <> 0
BEGIN
SELECT
#sItem=RTRIM(LTRIM(SUBSTRING(#sInputList,1,CHARINDEX(#sDelimiter,#sInputList,0)-1))),
#sInputList=RTRIM(LTRIM(SUBSTRING(#sInputList,CHARINDEX(#sDelimiter,#sInputList,0)+LEN(#sDelimiter),LEN(#sInputList))))
IF LEN(#sItem) > 0
INSERT INTO #List SELECT #sItem
END
IF LEN(#sInputList) > 0
INSERT INTO #List SELECT #sInputList -- Put the last item in
RETURN
END
2.STEP2 : use following query to get your desire output .
DECLARE #LOCATION AS TABLE (ID INT ,NAME VARCHAR(MAX))
DECLARE #COMPANY AS TABLE (CID INT , ADDRESS VARCHAR(MAX) , LOCATIONID INT)
INSERT INTO #LOCATION VALUES(1,'Bodakdev')
INSERT INTO #LOCATION VALUES(2,'Thaltej Road')
INSERT INTO #LOCATION VALUES(3,'Andheri East')
INSERT INTO #LOCATION VALUES(4,'Noida Sector 2')
INSERT INTO #COMPANY VALUES(11,'301, GNFC Infotower, Bodakdev,' , NULL)
INSERT INTO #COMPANY VALUES(12,'307/308,Arundeep Complex' , NULL)
INSERT INTO #COMPANY VALUES(11,'7 Krishana Dyeing Compund, Nagardas rd., Andheri' , NULL)
INSERT INTO #COMPANY VALUES(11,'B-23 ,Ground Floor,Sector 2' , NULL)
UPDATE #Company
SET
LOCATIONID = B.ID
FROM #COMPANY AS A , #LOCATION AS B
WHERE
1 = CASE WHEN
(
SELECT COUNT(*)
FROM FNSPLIT(B.NAME , ' ')
WHERE A.ADDRESS LIKE '%' + ITEM + '%'
) > 0 THEN 1 ELSE 0 END
This is the one way to do it . we can do it using full text searching also.

Related

SQL SERVER update or insert after left join

I have a Table Animals
Id | Name | Count | -- (other columns not relevant)
1 | horse | 11
2 | giraffe | 20
I want to try to insert or update values from a CSV string
Is it possible to do something like the following in 1 query?
;with results as
(
select * from
(
values ('horse'), ('giraffe'), ('lion')
)
animal_csv(aName)
left join animals on
animals.[Name] = animal_csv.aName
)
update results
set
[Count] = 1 + animals.[Count]
-- various other columns are set here
where Id is not null
--else
--insert into results ([Name], [Count]) values (results.aName, 1)
-- (essentially Where id is null)
It looks like what you're looking for is a table variable or temporary table rather than a common table expression.
If I understand your problem correctly, you are building a result set based on data you're getting from a CSV, merging it by incrementing values, and then returning that result set.
As I read your code, it looks as if your results would look like this:
aName | Id | Name | Count
horse | 1 | horse | 12
giraffe | 2 | giraffe | 21
lion | | |
I think what you're looking for in your final result set is this:
Name | Count
horse | 12
giraffe | 21
lion | 1
First, you can get from your csv and table to a resultset in a single CTE statement:
;WITH animal_csv AS (SELECT * FROM (VALUES('horse'),('giraffe'), ('lion')) a(aName))
SELECT ISNULL(Name, aName) Name
, CASE WHEN [Count] IS NULL THEN 1 ELSE 1 + [Count] END [Count]
FROM animal_csv
LEFT JOIN animals
ON Name = animal_csv.aName
Or, if you want to build your resultset using a table variable:
DECLARE #Results TABLE
(
Name VARCHAR(30)
, Count INT
)
;WITH animal_csv AS (SELECT * FROM (VALUES('horse'),('giraffe'), ('lion')) a(aName))
INSERT #Results
SELECT ISNULL(Name, aName) Name
, CASE WHEN [Count] IS NULL THEN 1 ELSE 1 + [Count] END [Count]
FROM animal_csv
LEFT JOIN animals
ON Name = animal_csv.aName
SELECT * FROM #results
Or, if you just want to use a temporary table, you can build it like this (temp tables are deleted when the connection is released/closed or when they're explicitly dropped):
;WITH animal_csv AS (SELECT * FROM (VALUES('horse'),('giraffe'), ('lion')) a(aName))
SELECT ISNULL(Name, aName) Name
, CASE WHEN [Count] IS NULL THEN 1 ELSE 1 + [Count] END [Count]
INTO #results
FROM animal_csv
LEFT JOIN animals
ON Name = animal_csv.aName
SELECT * FROM #results

SQL Dynamic Charindex

I have a field in a sql table but I need to parse it via charindex, but the lttle caveat is, I don't know how many pieces there are.
The field data would look like the following:
(Image: "filename=a.jpg"), (Image: "filename=b.jpg")
But the question I'm not sure how many filenames there will be in this string, so i need to dynamically build this out this could be 1 or this could be 100.
Any suggestions?
Thanks
Since you cannot know in advance how many values you will extract from each value, I would suggest to represent the results as records, not columns.
If you are using SQL Server 2016 or higher, you can use function STRING_SPLIT() to turn CSV parts to records. Then, SUBSTRING() and CHARINDEX() can be used to extract the relevant information:
declare #t table ([txt] varchar(200))
insert into #t VALUES ('(Image: "filename=a.jpg"),(Image: "filename=b.jpg")')
SELECT value, SUBSTRING(
value,
CHARINDEX('=', value) + 1,
LEN(value) - CHARINDEX('=', value) - 2
)
FROM #t t
CROSS APPLY STRING_SPLIT(t.txt , ',')
Demo on DB Fiddle:
DECLARE #t table ([txt] varchar(200))
INSERT INTO #t VALUES ('(Image: "filename=a.jpg"),(Image: "filename=b.jpg")')
SELECT value, SUBSTRING(
value,
CHARINDEX('=', value) + 1,
LEN(value) - CHARINDEX('=', value) - 2
)
FROM #t t
CROSS APPLY STRING_SPLIT(t.txt , ',')
GO
value | (No column name)
:------------------------ | :---------------
(Image: "filename=a.jpg") | a.jpg
(Image: "filename=b.jpg") | b.jpg
NB : this assumes that the value to extract is always located after the first equal sign and until 2 characters before the end of string. If the pattern is different, you may need to adapt the SUBSTRING()/CHARINDEX() calls.
The real issue is: This is breaking 1.NF. You should never ever store more than one piece of data in one cell. Such CSV-formats are a pain in the neck and you really should use a related side table to store your image hints one by one.
Nevertheless, this can be handled:
--A mockup table
DECLARE #mockup TABLE(ID INT IDENTITY,YourString VARCHAR(1000));
INSERT INTO #mockup VALUES
('(Image: "filename=a.jpg"), (Image: "filename=b.jpg") ')
,('(Image: "filename=aa.jpg"), (Image: "filename=bb.jpg"), (Image: "filename=cc.jpg"), (Image: "filename=dd.jpg"), (Image: "filename=ee.jpg")');
--Pick one element by its position:
DECLARE #position INT=2;
SELECT CAST('<x>' + REPLACE(t.YourString,',','</x><x>') + '</x>' AS XML)
.value('/x[position()=sql:variable("#position")][1]','nvarchar(max)')
FROM #mockup t;
The trick is, to transform the string to XML and use XQuery to fetch the needed element by its position. The intermediate XML looks like this:
<x>(Image: "filename=a.jpg")</x>
<x> (Image: "filename=b.jpg") </x>
You can use some more replacements and L/RTRIM() to get it cleaner.
Read table data
And if you want to create a clean side table and you need all data neatly separated, you can use a bit more of the same:
SELECT CAST('<x><y><z>'
+ REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(
t.YourString,'(','') --no opening paranthesis
,')','') --no closing paranthesis
,'"','') --no quotes
,' ','') --no blanks
,'=','</z><z>') --Split at "="
,':','</z></y><y><z>') --Split at ":"
,',','</z></y></x><x><y><z>') --Split at ","
+ '</z></y></x>' AS XML)
FROM #mockup t;
This returns
<x>
<y>
<z>Image</z>
</y>
<y>
<z>filename</z>
<z>a.jpg</z>
</y>
</x>
<x>
<y>
<z>Image</z>
</y>
<y>
<z>filename</z>
<z>b.jpg</z>
</y>
</x>
And with this you would get a clean EAV-table (
WITH Casted AS
(
SELECT ID
,CAST('<x><y><z>'
+ REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(
t.YourString,'(','')
,')','')
,'"','')
,' ','')
,'=','</z><z>')
,':','</z></y><y><z>')
,',','</z></y></x><x><y><z>')
+ '</z></y></x>' AS XML) AS CastedToXml
FROM #mockup t
)
SELECT ROW_NUMBER() OVER(ORDER BY (SELECT NULL)) AS ID
,ID AS oldId
,eachElement.value('y[1]/z[1]','varchar(max)') AS DataType
,eachElement.value('y[2]/z[1]','varchar(max)') AS ContentType
,eachElement.value('y[2]/z[2]','varchar(max)') AS Content
FROM Casted
CROSS APPLY CastedToXml.nodes('/x') A(eachElement)
The result
+----+-------+----------+-------------+---------+
| ID | oldId | DataType | ContentType | Content |
+----+-------+----------+-------------+---------+
| 1 | 1 | Image | filename | a.jpg |
+----+-------+----------+-------------+---------+
| 2 | 1 | Image | filename | b.jpg |
+----+-------+----------+-------------+---------+
| 3 | 2 | Image | filename | aa.jpg |
+----+-------+----------+-------------+---------+
| 4 | 2 | Image | filename | bb.jpg |
+----+-------+----------+-------------+---------+
| 5 | 2 | Image | filename | cc.jpg |
+----+-------+----------+-------------+---------+
| 6 | 2 | Image | filename | dd.jpg |
+----+-------+----------+-------------+---------+
| 7 | 2 | Image | filename | ee.jpg |
+----+-------+----------+-------------+---------+
I used a table value function
ALTER FUNCTION [dbo].[Fn_sqllist_to_table](#list AS VARCHAR(8000),
#delim AS VARCHAR(10))
RETURNS #listTable TABLE(
Position INT,
Value VARCHAR(8000))
AS
BEGIN
DECLARE #myPos INT
SET #myPos = 1
WHILE Charindex(#delim, #list) > 0
BEGIN
INSERT INTO #listTable
(Position,Value)
VALUES (#myPos,LEFT(#list, Charindex(#delim, #list) - 1))
SET #myPos = #myPos + 1
IF Charindex(#delim, #list) = Len(#list)
INSERT INTO #listTable
(Position,Value)
VALUES (#myPos,'')
SET #list = RIGHT(#list, Len(#list) - Charindex(#delim, #list))
END
IF Len(#list) > 0
INSERT INTO #listTable
(Position,Value)
VALUES (#myPos,#list)
RETURN
END
By calling it via
select * into #test from tableX as T
cross apply [Fn_sqllist_to_table](fieldname,'(')
and then just substringed the value into the final table

How to check what column in INSERT do not have the correct data type?

Imagine I have 200 columns in one INSERT statement, and I occasionally get an "Cannot convert" error for one of columns. Things is, I do not know which column causes this error.
Is there any way in T-SQL or mybatis to check WHICH column has the incorrect format? (I have just date, char, numeric). I can use ISNUMERIC, ISDATE for every column, but this is not so elegant.
I'm using mybatis in Java, so I cannot use any PreparedStatement or so.
You could build a query that tries to convert each of the suspected columns.
And limit the query to where one of the attempts to convert fails.
Mostly the bad data will be in CHAR's or VARCHAR's when trying to cast or convert them to a datetime or number type.
So you can limit your research to those.
Also, from the error you should see which value failed to convert to which type. Which can also help to limit which fields you research.
A simplified example using table variables:
declare #T1 table (id int identity(1,1) primary key, field1 varchar(30), field2 varchar(30), field3 varchar(30));
declare #T2 table (id int identity(1,1) primary key, field1_int int, field2_date date, field3_dec decimal(10,2));
insert into #T1 (field1, field2, field3) values
('1','2018-01-01','1.23'),
('not an int','2018-01-01','1.23'),
('1','not a date','1.23'),
('1','2018-01-01','not a decimal'),
(null,'2018-01-01','1.23'),
('1',null,'1.23'),
('1','2018-01-01',null)
;
select top 1000
id,
case when try_convert(int, field1) is null then field1 end as field1,
case when try_convert(date, field2) is null then field2 end as field2,
case when try_convert(decimal(10,4), field3) is null then field3 end as field3
from #T1
where
try_convert(int, coalesce(field1, '0')) is null
or try_convert(date, coalesce(field2, '1900-01-01')) is null
or try_convert(decimal(10,4), coalesce(field3, '0.0')) is null;
Returns:
id field1 field2 field3
-- ---------- ----------- -------------
2 not an int NULL NULL
3 NULL not a date NULL
4 NULL NULL not a decimal
If the origin data doesn't have to much bad data you could try to fix the origin data first.
Or use the try_convert for the problematic columns with bad data.
For example:
insert into #T2 (field1_int, field2_date, field3_dec)
select
try_convert(int, field1),
try_convert(date, field2),
try_convert(decimal(10,4), field3)
from #T1;
With larger imports - especially when you expect issues - a two-stepped approach is highly recommended.
import the data to a very tolerant staging table (all NVARCHAR(MAX))
check, evaluate, manipulate, correct whatever is needed and do the real insert from here
Here is a generic approach you might adapt to your needs. It will check all tables values against a type-map-table and output all values, which fail in TRY_CAST (needs SQL-Server 2012+)
A table to mockup the staging table (partly borrowed from LukStorms' answer - thx!)
CREATE TABLE #T1 (id INT IDENTITY(1,1) PRIMARY KEY
,fldInt VARCHAR(30)
,fldDate VARCHAR(30)
,fldDecimal VARCHAR(30));
GO
INSERT INTO #T1 (fldInt, fldDate, fldDecimal) values
('1','2018-01-01','1.23'),
('blah','2018-01-01','1.23'),
('1','blah','1.23'),
('1','2018-01-01','blah'),
(null,'2018-01-01','1.23'),
('1',null,'1.23'),
('1','2018-01-01',null);
--a type map (might be taken from INFORMATION_SCHEMA of an existing target table automatically)
DECLARE #type_map TABLE(ColumnName VARCHAR(100),ColumnType VARCHAR(100));
INSERT INTO #type_map VALUES('fldInt','int')
,('fldDate','date')
,('fldDecimal','decimal(10,2)');
--The staging table's name
DECLARE #TableName NVARCHAR(100)='#T1';
--dynamically created statements for each column
DECLARE #columnSelect NVARCHAR(MAX)=
(SELECT
' UNION ALL SELECT id ,''' + tm.ColumnName + ''',''' + tm.ColumnType + ''',' + QUOTENAME(tm.ColumnName)
+ ',CASE WHEN TRY_CAST(' + QUOTENAME(tm.ColumnName) + ' AS ' + tm.ColumnType + ') IS NULL THEN 0 ELSE 1 END ' +
'FROM ' + QUOTENAME(#TableName)
FROM #type_map AS tm
FOR XML PATH('')
);
-The final dynamically created statement
DECLARE #cmd NVARCHAR(MAX)=
'SELECT tbl.*
FROM
(
SELECT 0 AS id,'''' AS ColumnName,'''' AS ColumnType,'''' AS ColumnValue,0 AS IsValid WHERE 1=0 '
+ #columnSelect +
') AS tbl
WHERE tbl.IsValid = 0;'
--Execution with EXEC()
EXEC(#cmd);
The result:
+----+------------+---------------+-------------+---------+
| id | ColumnName | ColumnType | ColumnValue | IsValid |
+----+------------+---------------+-------------+---------+
| 2 | fldInt | int | blah | 0 |
+----+------------+---------------+-------------+---------+
| 5 | fldInt | int | NULL | 0 |
+----+------------+---------------+-------------+---------+
| 3 | fldDate | date | blah | 0 |
+----+------------+---------------+-------------+---------+
| 6 | fldDate | date | NULL | 0 |
+----+------------+---------------+-------------+---------+
| 4 | fldDecimal | decimal(10,2) | blah | 0 |
+----+------------+---------------+-------------+---------+
| 7 | fldDecimal | decimal(10,2) | NULL | 0 |
+----+------------+---------------+-------------+---------+
The statement created is like here:
SELECT tbl.*
FROM
(
SELECT 0 AS id,'' AS ColumnName,'' AS ColumnType,'' AS ColumnValue,0 AS IsValid WHERE 1=0
UNION ALL SELECT id
,'fldInt'
,'int'
,[fldInt]
,CASE WHEN TRY_CAST([fldInt] AS int) IS NULL THEN 0 ELSE 1 END
FROM [#T1]
UNION ALL SELECT id
,'fldDate'
,'date',[fldDate]
,CASE WHEN TRY_CAST([fldDate] AS date) IS NULL THEN 0 ELSE 1 END
FROM [#T1]
UNION ALL SELECT id
,'fldDecimal'
,'decimal(10,2)'
,[fldDecimal]
,CASE WHEN TRY_CAST([fldDecimal] AS decimal(10,2)) IS NULL THEN 0 ELSE 1 END
FROM [#T1]
) AS tbl
WHERE tbl.IsValid = 0;

How to retrieve unique records having unique values in two columns from a table in SQL Server

I want to query a table where I need the result that contains unique values from two columns together. For e.g.
Table
EnquiryId | EquipmentId | Price
-----------+--------------+-------
1 | E20 | 10
1 | E50 | 40
1 | E60 | 20
2 | E30 | 90
2 | E20 | 10
2 | E90 | 10
3 | E90 | 10
3 | E60 | 10
For each EnquiryId, EquipmentId will be unique in the table. Now I want a result where I can get something like this
EnquiryId | EquipmentId | Price
-----------+--------------+-------
1 | E20 | 10
2 | E30 | 90
3 | E90 | 10
In the result each enquiryId present in the table should be displayed uniquely.
If suppose I have 3 EquipmentIds "E20,E50,E60" for EnquiryId "1".. Any random EquipmentId should be displayed from these three values only.
Any help would be appreciated. Thank you in advance.
QUERY
;WITH cte AS
(
SELECT *,
ROW_NUMBER() OVER
(PARTITION BY enquiryID
ORDER BY enquiryID ) AS RN
FROM tbl
)
SELECT enquiryID,equipmentID,Price
FROM cte
WHERE RN=1
FIND FIDDLE HERE
The following code must help you..
Sorry that I ended up in a lengthy solution only. Run it in your SSMS and see the result.
Declare #tab table (EnquiryId int, EquipmentId varchar(10),Price int)
Insert into #tab values
(1,'E20',10),
(1,'E50',40),
(1,'E60',20),
(2,'E30',90),
(2,'E20',10),
(2,'E90',10),
(3,'E90',10),
(3,'E60',10)
----------------------------------------------
Declare #s int = 1
Declare #e int,#z varchar(10)
Declare #Equipment table (EquipmentId varchar(10),ind int)
Insert into #Equipment (EquipmentId) Select Distinct EquipmentId From #tab
Declare #Enquiry table (id int identity(1,1),EnquiryId int,EquipmentId varchar(10))
Insert into #Enquiry (EnquiryId) Select Distinct EnquiryId From #tab
Set #e = ##ROWCOUNT
While #s <= #e
begin
Select Top 1 #z = T.EquipmentId
From #tab T
Join #Enquiry E On T.EnquiryId = E.EnquiryId
Join #Equipment Eq On Eq.EquipmentId = T.EquipmentId
Where E.id = #s
And Eq.ind is Null
Order by NEWID()
update #Enquiry
Set EquipmentId = #z
Where id = #s
update #Equipment
Set ind = 1
Where EquipmentId = #z
Set #s = #s + 1
End
Select T.EnquiryId,T.EquipmentId,T.Price
From #tab T
left join #Enquiry E on T.EnquiryId = E.EnquiryId
Where T.EquipmentId = E.EquipmentId
You can use GROUP BY (Typical way) to remove duplicate value.
Basic steps are:
Alter table & Add Identity Column.
Group by columns which can be dupicate.
Delete those record.
Check here Remove Duplicate Rows from a Table in SQL Server

Insert random Data content in SQL Server 2008

I know there are several topics on this, but none of them was suitable for me, that's why I took the chance to ask you again.
I have a table which has columns UserID, FirstName, Lastname.
I need to insert 300 000 records for each column and they have to be unique, for example:
UserID0001, John00001, Doe00001
UserID0002, John00002, Doe00002
UserID0003, John00003, Doe00003
I hope there is an easy way :)
Thank you in advance.
Best,
Lyubo
;with sequence as (
select N = row_number() over (order by ##spid)
from sys.all_columns c1, sys.all_columns c2
)
insert into [Table] (UserID, FirstName, Lastname)
select
'UserID' + right('000000' + cast(N as varchar(10)), 6),
'John' + right('000000' + cast(N as varchar(10)), 6),
'Doe' + right('000000' + cast(N as varchar(10)), 6)
from sequence where N <= 300000
You could use the ROW_NUMBER function to generate different numbers like this:
SQL Fiddle
MS SQL Server 2008 Schema Setup:
CREATE TABLE dbo.users(
Id INT IDENTITY(1,1) PRIMARY KEY CLUSTERED,
user_id VARCHAR(20),
first_name VARCHAR(20),
last_name VARCHAR(20)
);
GO
DECLARE #NoOfRows INT = 7;
INSERT INTO dbo.users(user_id, first_name, last_name)
SELECT 'User_'+n, 'John_'+n, 'Doe_'+n
FROM(
SELECT REPLACE(STR(ROW_NUMBER()OVER(ORDER BY (SELECT NULL))),' ','0') n FROM(
select TOP(#NoOfRows) 1 x from sys.objects A,sys.objects B,sys.objects C,sys.objects D,sys.objects E,sys.objects F,sys.objects G
)X
)N
Query 1:
SELECT * FROM dbo.users
Results:
| ID | USER_ID | FIRST_NAME | LAST_NAME |
-----------------------------------------------------------
| 1 | User_0000000001 | John_0000000001 | Doe_0000000001 |
| 2 | User_0000000002 | John_0000000002 | Doe_0000000002 |
| 3 | User_0000000003 | John_0000000003 | Doe_0000000003 |
| 4 | User_0000000004 | John_0000000004 | Doe_0000000004 |
| 5 | User_0000000005 | John_0000000005 | Doe_0000000005 |
| 6 | User_0000000006 | John_0000000006 | Doe_0000000006 |
| 7 | User_0000000007 | John_0000000007 | Doe_0000000007 |
Just change the #NoOfRows to 300000 to get the number of rows you are looking for.
I've adapted a script found in this article:
DECLARE #RowCount INT
DECLARE #RowString VARCHAR(14)
DECLARE #First VARCHAR(14)
DECLARE #LAST VARCHAR(14)
DECLARE #ID VARCHAR(14)
SET #ID = 'UserID'
SET #First = 'John'
SET #Last = 'Doe'
SET #RowCount = 1
WHILE #RowCount < 300001
BEGIN
SET #RowString = CAST(#RowCount AS VARCHAR(10))
SET #RowString = REPLICATE('0', 6 - DATALENGTH(#RowString)) + #RowString
INSERT INTO TestTableSize (
UserID
,FirstName
,LastName
)
VALUES
(#ID + #RowString
, #First + #RowString
, #Last + #RowString)
SET #RowCount = #RowCount + 1
END

Resources