Search for value of split field - sql-server

I have a field with multiple foreign keys which I need to search for a given value.
Say admin_ids :: 24,56,78
How do I search that field and get the following results:
24 = true; 6 = false; 7 = false
I cannot change the schema.
Any ideas?

What I've done in the past is create a table valued function to split the string into the distinct values, provided they're delimited by the same...delimiter.
There are examples out there on the web, for instance:
http://www.sqlservercentral.com/blogs/querying-microsoft-sql-server/2013/09/19/how-to-split-a-string-by-delimited-char-in-sql-server/
Here is an example, converted into a script:
DECLARE
#Data NVARCHAR(MAX) = '24,56,78',
#Delimeter NVARCHAR(MAX) = ','
DECLARE #Values TABLE (ID INT IDENTITY(1,1), Data NVARCHAR(MAX))
DECLARE #Users TABLE (ID INT)
INSERT INTO #Users (ID)
VALUES (1),(6),(7),(24),(30),(56)
DECLARE #Iterator INT
SET #Iterator = 1
DECLARE #FoundIndex INT
SET #FoundIndex = CHARINDEX(#Delimeter,#Data)
WHILE (#FoundIndex>0)
BEGIN
INSERT INTO #Values (data)
SELECT
Data = LTRIM(RTRIM(SUBSTRING(#Data, 1, #FoundIndex - 1)))
SET #Data = SUBSTRING(#Data,
#FoundIndex + DATALENGTH(#Delimeter) / 2,
LEN(#Data))
SET #Iterator = #Iterator + 1
SET #FoundIndex = CHARINDEX(#Delimeter, #Data)
END
INSERT INTO #Values (Data)
SELECT Data = LTRIM(RTRIM(#Data))
SELECT
usr.ID,
CASE ISNULL(val.Data, -1)
WHEN -1 THEN 'False'
ELSE 'True'
END
FROM #Users usr
LEFT JOIN #Values val ON val.Data = usr.ID
Hopefully this gives you enough to get started, if a TVF is the way you want to go :)
P.S. There are lots of ways to do this and I'm sure some are better than the one presented..this works and if the field is only varchar(255), then the efficiency should be fine.

Related

How to iterate over a string of varying length, replacing different abbreviations with their full text. All abbreviations separated by a semicolon

My problem is this; I have a field in a table that contains values like this:
NP
NP;MC;PE
MC;AB;AT;MI;TC;WM
OS
OG
I want to convert these abbreviations to their full name. i.e. NP becomes Nuclear Power, OG becomes Oil and Gas, MI becomes Military etc.
My desired output would be:
Nuclear Power
Nuclear Power;Military;Pesticides
and so on.
I'm creating this as a function. I got it working for just the one abbreviation and then the same for two. However my issue is that I may have 5 abbreviations or 7. I know my current approach is dreadful but cannot figure out how to loop it in the right way.
Please note: I've shortened the list of abbreviations for StackOverflow but there's 25 in total.
Please further note: I did the function bottom up (I don't know why) and got the two value and single value working. I've removed anything I did for values over 3 as nothing I did worked.
ALTER FUNCTION [dbo].[get_str_full]
(
-- Add the parameters for the function here
#str_input VARCHAR(250)
)
RETURNS VARCHAR(250)
AS
BEGIN
-- Declare the return variable here
DECLARE #Result VARCHAR(250)
DECLARE #TEMPSTRING VARCHAR(250)
DECLARE #TEMPSTRING_RIGHT AS VARCHAR(250)
-- DECLARE #PI_COUNT BIGINT
DECLARE #COUNTER INT
DECLARE #TOTAL_VALS BIGINT
DECLARE #STRING_ST VARCHAR(250)
DECLARE #POS_STR BIGINT
DECLARE #REMAINING_STR VARCHAR(250)
-- Used for easy loop skips
DECLARE #LEFTSKIP AS BIGINT
SET #LEFTSKIP = 1
SET #Result = #str_input
SET #STRING_ST = #Result
SET #COUNTER = (LEN(#Result) - LEN(REPLACE(#Result,';',''))) + 1
SET #TOTAL_VALS = (LEN(#Result) - LEN(REPLACE(#Result,';',''))) + 1
-- If the string has a semicolon then there's more than one PI value
IF CHARINDEX(';', #Result) > 0
BEGIN
WHILE #COUNTER > 0
BEGIN
IF #TOTAL_VALS >= 3 -- If counter is more than 2 then there's three or more
BEGIN
DECLARE #TEMP_VAL BIGINT
SET #TEMP_VAL = 5
END
ELSE IF #TOTAL_VALS = 2-- Theres 2
BEGIN
-- Do left two chars first
IF #LEFTSKIP = 1
BEGIN
SET #TEMPSTRING = LEFT(#Result, 2)
SELECT #TEMPSTRING = CASE #TEMPSTRING
WHEN 'MC' THEN 'Military Contracting'
WHEN 'NP' THEN 'Nuclear'
WHEN 'OG' THEN 'Oil & Gas'
WHEN 'OS' THEN 'Oil Sands'
WHEN 'PM' THEN 'Palm Oil'
WHEN 'PE' THEN 'Pesticides'
ELSE #TEMPSTRING
END
SET #LEFTSKIP = 2
END
ELSE IF #LEFTSKIP = 2
BEGIN
SET #TEMPSTRING_RIGHT = RIGHT(#Result, 2)
SELECT #TEMPSTRING_RIGHT = CASE #TEMPSTRING_RIGHT
WHEN 'MC' THEN 'Military Contracting'
WHEN 'NP' THEN 'Nuclear'
WHEN 'OG' THEN 'Oil & Gas'
WHEN 'OS' THEN 'Oil Sands'
WHEN 'PM' THEN 'Palm Oil'
WHEN 'PE' THEN 'Pesticides'
ELSE #TEMPSTRING_RIGHT
END
END
END
SET #COUNTER = #COUNTER - 1
END
SET #Result = CONCAT(#TEMPSTRING,';', #TEMPSTRING_RIGHT)
END
ELSE
BEGIN
SET #Result = REPLACE(#Result, 'MC', 'Military Contracting')
SET #Result = REPLACE(#RESULT, 'NP', 'Nuclear Power')
SET #Result = REPLACE(#Result, 'OG', 'Oil & Gas')
SET #Result = REPLACE(#Result, 'OS', 'Oil Sands')
SET #Result = REPLACE(#Result, 'PM', 'Palm Oil')
SET #Result = REPLACE(#Result, 'PE', 'Pesticides')
END
-- Return the result of the function
RETURN #Result
END
First for some easily consumable sample data:
DECLARE #tranlation TABLE(tCode VARCHAR(10), tString VARCHAR(40));
DECLARE #t TABLE(String VARCHAR(1000));
INSERT #t VALUES('PE;N'),('NP'),('NP;MC;PE;XX')
INSERT #tranlation VALUES ('N','Nukes'),('NP','Nuclear Power'),('MC','Military'),
('PE','Pesticides');
Note my updated sample data which includes "XX", which has no match , and an "N" for "Nukes" which would wreck any solution which leverages REPLACE. If you are on SQL 2016+ you can use STRING_SPLIT and STRING_AGG.
SELECT
OldString = t.String,
NewString = STRING_AGG(ISNULL(tx.tString,items.[value]),';')
FROM #t AS t
OUTER APPLY STRING_SPLIT(t.String,';') AS items
LEFT JOIN #tranlation AS tx
ON items.[value] = tx.tCode
GROUP BY t.String ;
Returns:
OldString NewString
----------------- -------------------------------------------
NP Nuclear Power
NP;MC;PE;XX Nuclear Power;Military;Pesticides;XX
PE;N Pesticides;Nukes
You should really fix your table design so that you do not store multiple pieces of info in one column.
If you would like it as a function, I would strongly recommend an inline Table-Valued function rather than a scalar function.
If you have SQL Server version 2017+ you can use STRING_SPLIT and STRING_AGG for this.
CREATE OR ALTER FUNCTION GetFullStr
( #str varchar(250) )
RETURNS TABLE
AS RETURN
(
SELECT STRING_AGG(ISNULL(v.FullStr, s.value), ';') result
FROM STRING_SPLIT(#str, ';') s
LEFT JOIN (VALUES
('MC', 'Military Contracting'),
('NP', 'Nuclear'),
('OG', 'Oil & Gas'),
('OS', 'Oil Sands'),
('PM', 'Palm Oil'),
('PE', 'Pesticides')
) v(Abbr, FullStr) ON v.Abbr = s.value
);
GO
You can, and should, replace the VALUES with a real table.
On 2016 you would need FOR XML PATH instead of STRING_AGG:
CREATE OR ALTER FUNCTION GetFullStr
( #str varchar(250) )
RETURNS TABLE
AS RETURN
(
SELECT STUFF(
(SELECT ';' + ISNULL(v.FullStr, s.value)
FROM STRING_SPLIT(#str, ';') s
LEFT JOIN (VALUES
('MC', 'Military Contracting'),
('NP', 'Nuclear'),
('OG', 'Oil & Gas'),
('OS', 'Oil Sands'),
('PM', 'Palm Oil'),
('PE', 'Pesticides')
) v(Abbr, FullStr) ON v.Abbr = s.value
FOR XML PATH(''), TYPE
).value('text()[1]','varchar(2500)'),
, 1, 1, '')
);
GO
You use it like this:
SELECT s.result AS FullStr
FROM table
OUTER APPLY GetFullStr(value) AS s;
-- alternatively
SELECT (SELECT * FROM GetFullStr(value)) AS FullStr
FROM table;
You could assign your abbreviation mappings to a TABLE variable and then use that for your REPLACE. You could build this into a function, then pass your string values in.
The test below returns Military:Nuclear Power:XX.
declare #mapping table (abbrev varchar(50), fullname varchar(100))
insert into #mapping(abbrev, fullname)
values ('NP','Nuclear Power'),
('MC','Military')
declare #testString varchar(100), #newString varchar(100)
set #teststring = 'MC:NP:XX'
set #newString = #testString
SELECT #newString = REPLACE(#newString, abbrev, fullname) FROM #mapping
select #newString

Replacing individual digits in a CustomerID field SQL Server

I have some customer data that needs to be anonymised. I have customerIds which consists of numbers.
for example:
CustomerID
3937487
I need to swap each digit with an alternative, which should be enough for my requirement. Based on the following lookup table
Only issue I'm having is when I use the REPLACE function on the field:
REPLACE(REPLACE(CustomerID,2,9),9,6)
which gives me
CustomerID
3637487
It's swapping the digit 2 to a 9, then that same 9 to a 6. It needs to only replace the digits ONCE.
As I'm going to be changing millions of records in one go, using temp tables isn't possible from a performance perspective. Can this be done in one query, recursively?
I can't think of any way of accomplishing this in a single query. If I wanted to do this I'd create a function something along the lines of
CREATE FUNCTION [dbo].[AnonymiseId]
(
#Id [int]
)
RETURNS [int]
AS
BEGIN
-- Declare the return variable here
DECLARE #ResultVar int;
DECLARE #substitutions nvarchar(10) = '7295380146';
DECLARE #stringId nvarchar(100) = CONVERT(nvarchar(100), #Id);
DECLARE #i int = 1
DECLARE #substituteStringId nvarchar(100) = '';
WHILE #i <= LEN(#stringID)
BEGIN
DECLARE #char nvarchar = SUBSTRING(#stringId, #i, 1);
DECLARE #charValue int = CONVERT(int, #char);
DECLARE #subsChar nvarchar = SUBSTRING(#substitutions, #charValue + 1, 1);
SET #substituteStringId = CONCAT(#substituteStringId, #subsChar);
SET #i = #i + 1
END
SET #ResultVar = CONVERT(int, #substituteStringId);
-- Return the result of the function
RETURN #ResultVar;
END
GO
and then just use it in the query
SELECT dbo.AnonymiseId(CustomerID) FROM ???

Foreach update with parameter separated with a delimiter in SQL Server stored procedure

UPDATE SampleTable
SET Schemaname = #SchemaName,
SchemaCode = #SchemaCode,
ForeignKeyColumn = #ForeignKeyColumn,
IsChildSchema = #IsChildSchema,
ModifiedBy = #ModifiedBy,
ModifiedDate = #ModifiedDate
WHERE
DataSchemaID = #DataSchemaId
My #ForeignKeyColumn parameter is
2233^SITE_CLM_NUMBER,2236^SITE_ID_N,
Can anyone help me in updating ForeignKeyColumn='SITE_CLM_NUMBER' where DataSchemaID=2233 and ForeignKeyColumn='SITE_ID_N' where DataSchemaID=2236
It's easy to pass multiple parameter values to a query, using a Table Valued Parameter. These are available in all versions of SQL Server since 2008.
First, you need to create a Table type with the fields you want:
CREATE TYPE dbo.KeyValueType AS TABLE
( Key int, Value nvarchar(50) )
This allows you to specify a parameter of type KeyValueType with the Key/Value combinations you want, eg #updatedColumns.
You can join the target table with the TVP to update rows with matching DataSchemaID values:
Create Procedure UpdateSchemas(...., #updatedColumns dbo.KeyValueType)
UPDATE SampleTable
SET
Schemaname=#SchemaName
,SchemaCode=#SchemaCode
,ForeignKeyColumn=t.Value
,IsChildSchema=#IsChildSchema
,ModifiedBy=#ModifiedBy
,ModifiedDate=#ModifiedDate
FROM SampleTable
INNER JOIN #updatedColumns t
ON t.ID=DataSchemaID
You can add an SplitString function, like this one :
How to Split String by Character into Separate Columns in SQL Server
CREATE FUNCTION [dbo].[Split]
(
#String varchar(max)
,#Delimiter char
)
RETURNS #Results table
(
Ordinal int
,StringValue varchar(max)
)
as
begin
set #String = isnull(#String,'')
set #Delimiter = isnull(#Delimiter,'')
declare
#TempString varchar(max) = #String
,#Ordinal int = 0
,#CharIndex int = 0
set #CharIndex = charindex(#Delimiter, #TempString)
while #CharIndex != 0 begin
set #Ordinal += 1
insert #Results values
(
#Ordinal
,substring(#TempString, 0, #CharIndex)
)
set #TempString = substring(#TempString, #CharIndex + 1, len(#TempString) - #CharIndex)
set #CharIndex = charindex(#Delimiter, #TempString)
end
if #TempString != '' begin
set #Ordinal += 1
insert #Results values
(
#Ordinal
,#TempString
)
end
return
end
Now you can easily extract each part of your input parameter.
declare #I int;
declare #TMP nvarchar(255);
set #I = 1;
set #TMP = null;
set #TMP = (select StringValue from Split(#ForeignKeyCoumn, ',') where Ordinal = 1);
while #TMP <> null
begin
set #ForeignKeyColumn = (select StringValue from Split(#TMP, '^') where Ordinal = 1);
set #DataSchemaID = (select StringValue from Split(#TMP, '^') where Ordinal = 2);
-- Update here your table with #ForeignKeyColumn and #DataSchemaID values
set #I = #I + 1;
set #TMP = null;
set #TMP = (select StringValue from Split(#ForeignKeyCoumn, ',') where Ordinal = #I);
end
PS: If your are using SQL Server 2016 it already includes an SplitString function, so you won't need to add your own. https://learn.microsoft.com/en-us/sql/t-sql/functions/string-split-transact-sql

Executing a query on csv data stored in an ntext column

Say that the raw text of CSV exports and an associated timestamps are stored in a database, where one record is equivalent to one export.
Does anyone have a way to execute a query on the CSV file stored in that field without creating a second connection to the database or exporting the data to a file and then reopening it using the csv text driver?
Assume that:
1) you can't write out a physical file onto the server in the solution
2) you can't a second connection to the server w/ OPENROWSET (servers, usernames & passwords change)
3) that it must be a 100% SQL solution - must be able to be run as an SP
4) that you only need to work with one record at time - the solution doesn't need to account for selecting from multiple csv files stored in the DB.
My solution would be to create a UDF that will parse the CSV data into a table variable. Then, in the SP, retrieve the CSV, pass it to the UDF, then run the query against the table variable.
First, create a UDF to return a table from the CSV value (uses CHAR(13) to determine new lines, may need to be altered to work with your data):
CREATE FUNCTION [dbo].[fnParseCSV] (#InputString NVARCHAR(MAX), #Delimiter NCHAR(1) = ',')
RETURNS #tbl TABLE (ID int, Val NVARCHAR(64)) AS
BEGIN
declare #singleLine nvarchar(max)
declare #id int
declare #val varchar(64)
WHILE LEN(#InputString) > 0 BEGIN
IF CHARINDEX(char(13), #InputString) > 0 BEGIN
SELECT #singleLine = SUBSTRING(#InputString, 1, CHARINDEX(char(13), #InputString) - 1)
IF CHARINDEX(#Delimiter, #singleline) > 0 BEGIN
SELECT #id = convert(int, SUBSTRING(#singleline, 1, CHARINDEX(#Delimiter, #singleline) - 1))
SELECT #val = RIGHT(#singleline, LEN(#singleline) - CHARINDEX(#Delimiter, #singleline) )
INSERT INTO #tbl (id, val) values (#id, #val)
END
SELECT #InputString = RIGHT(#InputString, LEN(#InputString) - CHARINDEX(char(13), #InputString) )
END
ELSE
BEGIN
IF CHARINDEX(#Delimiter, #inputString) > 0
BEGIN
SELECT #id = convert(int, SUBSTRING(#inputString, 1, CHARINDEX(#Delimiter, #inputString) - 1))
SELECT #val = RIGHT(#inputString, LEN(#inputString) - CHARINDEX(#Delimiter, #inputString) )
INSERT INTO #tbl (id, val) values (#id, #val)
END
set #inputString = ''
END
END
RETURN
END
Then run the query against that output:
select * from dbo.fnParseCsv('123,val1' + char(13) + '456,val2' + CHAR(13) + '789,val3', ',')
You could set up a series of user-defined functions which could parse through the column. It would likely be slow and wouldn't be robust at all.
As an example though (with no real error checking, etc. and only minimally tested):
IF OBJECT_ID('dbo.Test_CSV_Search') IS NOT NULL
DROP TABLE dbo.Test_CSV_Search
GO
CREATE TABLE dbo.Test_CSV_Search
(
my_id INT IDENTITY NOT NULL,
txt VARCHAR(MAX) NOT NULL,
CONSTRAINT PK_Test_CSV_Search PRIMARY KEY CLUSTERED (my_id)
)
GO
INSERT INTO dbo.Test_CSV_Search (txt) VALUES ('11, 12, 13, 14,15,16
21,22, 23,24, 25,26
31,22,33,34,35,36')
GO
IF OBJECT_ID('dbo.Get_CSV_Row') IS NOT NULL
DROP FUNCTION dbo.Get_CSV_Row
GO
CREATE FUNCTION dbo.Get_CSV_Row
(#my_id INT, #col_num SMALLINT, #search_value VARCHAR(100))
RETURNS #results TABLE (row_num INT, row_txt VARCHAR(MAX))
AS
BEGIN
DECLARE
#csv_txt VARCHAR(MAX),
#full_row VARCHAR(MAX),
#start_pos INT,
#end_pos INT,
#col_txt VARCHAR(100),
#cur_col SMALLINT,
#line_start INT,
#line_end INT,
#row_num INT
SELECT #csv_txt = txt + CHAR(10) FROM dbo.Test_CSV_Search WHERE my_id = #my_id
SELECT
#line_start = 1,
#cur_col = 1,
#start_pos = 1,
#row_num = 1
WHILE (CHARINDEX(CHAR(10), #csv_txt, #line_start) > 0)
BEGIN
SELECT
#line_end = CHARINDEX(CHAR(10), #csv_txt, #line_start),
#end_pos = CHARINDEX(',', #csv_txt, #start_pos)
WHILE (#cur_col < #col_num)
BEGIN
SET #start_pos = #end_pos + 1
SET #end_pos = CHARINDEX(',', #csv_txt, #start_pos)
SET #cur_col = #cur_col + 1
END
IF (RTRIM(LTRIM(SUBSTRING(#csv_txt, #start_pos, #end_pos - #start_pos))) = #search_value)
BEGIN
INSERT INTO #results (row_num, row_txt) VALUES (#row_num, RTRIM(LTRIM(SUBSTRING(#csv_txt, #line_start, #line_end - #line_start))))
END
SELECT
#line_start = #line_end + 1,
#start_pos = #line_end + 1,
#cur_col = 1,
#row_num = #row_num + 1
END
RETURN
END
GO
SELECT * FROM dbo.Get_CSV_Row(1, 1, '11')

How do you count the number of occurrences of a certain substring in a SQL varchar?

I have a column that has values formatted like a,b,c,d. Is there a way to count the number of commas in that value in T-SQL?
The first way that comes to mind is to do it indirectly by replacing the comma with an empty string and comparing the lengths
Declare #string varchar(1000)
Set #string = 'a,b,c,d'
select len(#string) - len(replace(#string, ',', ''))
Quick extension of cmsjr's answer that works for strings with more than one character.
CREATE FUNCTION dbo.CountOccurrencesOfString
(
#searchString nvarchar(max),
#searchTerm nvarchar(max)
)
RETURNS INT
AS
BEGIN
return (LEN(#searchString)-LEN(REPLACE(#searchString,#searchTerm,'')))/LEN(#searchTerm)
END
Usage:
SELECT * FROM MyTable
where dbo.CountOccurrencesOfString(MyColumn, 'MyString') = 1
You can compare the length of the string with one where the commas are removed:
len(value) - len(replace(value,',',''))
The answer by #csmjr has a problem in some instances.
His answer was to do this:
Declare #string varchar(1000)
Set #string = 'a,b,c,d'
select len(#string) - len(replace(#string, ',', ''))
This works in most scenarios, however, try running this:
DECLARE #string VARCHAR(1000)
SET #string = 'a,b,c,d ,'
SELECT LEN(#string) - LEN(REPLACE(#string, ',', ''))
For some reason, REPLACE gets rid of the final comma but ALSO the space just before it (not sure why). This results in a returned value of 5 when you'd expect 4. Here is another way to do this which will work even in this special scenario:
DECLARE #string VARCHAR(1000)
SET #string = 'a,b,c,d ,'
SELECT LEN(REPLACE(#string, ',', '**')) - LEN(#string)
Note that you don't need to use asterisks. Any two-character replacement will do. The idea is that you lengthen the string by one character for each instance of the character you're counting, then subtract the length of the original. It's basically the opposite method of the original answer which doesn't come with the strange trimming side-effect.
Building on #Andrew's solution, you'll get much better performance using a non-procedural table-valued-function and CROSS APPLY:
SET ANSI_NULLS ON
GO
SET QUOTED_IDENTIFIER ON
GO
/* Usage:
SELECT t.[YourColumn], c.StringCount
FROM YourDatabase.dbo.YourTable t
CROSS APPLY dbo.CountOccurrencesOfString('your search string', t.[YourColumn]) c
*/
CREATE FUNCTION [dbo].[CountOccurrencesOfString]
(
#searchTerm nvarchar(max),
#searchString nvarchar(max)
)
RETURNS TABLE
AS
RETURN
SELECT (DATALENGTH(#searchString)-DATALENGTH(REPLACE(#searchString,#searchTerm,'')))/NULLIF(DATALENGTH(#searchTerm), 0) AS StringCount
Declare #string varchar(1000)
DECLARE #SearchString varchar(100)
Set #string = 'as as df df as as as'
SET #SearchString = 'as'
select ((len(#string) - len(replace(#string, #SearchString, ''))) -(len(#string) -
len(replace(#string, #SearchString, ''))) % 2) / len(#SearchString)
Accepted answer is correct ,
extending it to use 2 or more character in substring:
Declare #string varchar(1000)
Set #string = 'aa,bb,cc,dd'
Set #substring = 'aa'
select (len(#string) - len(replace(#string, #substring, '')))/len(#substring)
Darrel Lee I think has a pretty good answer. Replace CHARINDEX() with PATINDEX(), and you can do some weak regex searching along a string, too...
Like, say you use this for #pattern:
set #pattern='%[-.|!,'+char(9)+']%'
Why would you maybe want to do something crazy like this?
Say you're loading delimited text strings into a staging table, where the field holding the data is something like a varchar(8000) or nvarchar(max)...
Sometimes it's easier/faster to do ELT (Extract-Load-Transform) with data rather than ETL (Extract-Transform-Load), and one way to do this is to load the delimited records as-is into a staging table, especially if you may want an simpler way to see the exceptional records rather than deal with them as part of an SSIS package...but that's a holy war for a different thread.
If we know there is a limitation on LEN and space, why cant we replace the space first?
Then we know there is no space to confuse LEN.
len(replace(#string, ' ', '-')) - len(replace(replace(#string, ' ', '-'), ',', ''))
Use this code, it is working perfectly.
I have create a sql function that accept two parameters, the first param is the long string that we want to search into it,and it can accept string length up to 1500 character(of course you can extend it or even change it to text datatype).
And the second parameter is the substring that we want to calculate the number of its occurance(its length is up to 200 character, of course you can change it to what your need). and the output is an integer, represent the number of frequency.....enjoy it.
CREATE FUNCTION [dbo].[GetSubstringCount]
(
#InputString nvarchar(1500),
#SubString NVARCHAR(200)
)
RETURNS int
AS
BEGIN
declare #K int , #StrLen int , #Count int , #SubStrLen int
set #SubStrLen = (select len(#SubString))
set #Count = 0
Set #k = 1
set #StrLen =(select len(#InputString))
While #K <= #StrLen
Begin
if ((select substring(#InputString, #K, #SubStrLen)) = #SubString)
begin
if ((select CHARINDEX(#SubString ,#InputString)) > 0)
begin
set #Count = #Count +1
end
end
Set #K=#k+1
end
return #Count
end
In SQL 2017 or higher, you can use this:
declare #hits int = 0
set #hits = (select value from STRING_SPLIT('F609,4DFA,8499',','));
select count(#hits)
Improved version based on top answer and other answers:
Wrapping the string with delimiters ensures that LEN works properly. Making the replace character string one character longer than the match string removes the need for division.
CREATE FUNCTION dbo.MatchCount(#value nvarchar(max), #match nvarchar(max))
RETURNS int
BEGIN
RETURN LEN('[' + REPLACE(#value,#match,REPLICATE('*', LEN('[' + #match + ']') - 1)) + ']') - LEN('['+#value+']')
END
DECLARE #records varchar(400)
SELECT #records = 'a,b,c,d'
select LEN(#records) as 'Before removing Commas' , LEN(#records) - LEN(REPLACE(#records, ',', '')) 'After Removing Commans'
The following should do the trick for both single character and multiple character searches:
CREATE FUNCTION dbo.CountOccurrences
(
#SearchString VARCHAR(1000),
#SearchFor VARCHAR(1000)
)
RETURNS TABLE
AS
RETURN (
SELECT COUNT(*) AS Occurrences
FROM (
SELECT ROW_NUMBER() OVER (ORDER BY O.object_id) AS n
FROM sys.objects AS O
) AS N
JOIN (
VALUES (#SearchString)
) AS S (SearchString)
ON
SUBSTRING(S.SearchString, N.n, LEN(#SearchFor)) = #SearchFor
);
GO
---------------------------------------------------------------------------------------
-- Test the function for single and multiple character searches
---------------------------------------------------------------------------------------
DECLARE #SearchForComma VARCHAR(10) = ',',
#SearchForCharacters VARCHAR(10) = 'de';
DECLARE #TestTable TABLE
(
TestData VARCHAR(30) NOT NULL
);
INSERT INTO #TestTable
(
TestData
)
VALUES
('a,b,c,de,de ,d e'),
('abc,de,hijk,,'),
(',,a,b,cde,,');
SELECT TT.TestData,
CO.Occurrences AS CommaOccurrences,
CO2.Occurrences AS CharacterOccurrences
FROM #TestTable AS TT
OUTER APPLY dbo.CountOccurrences(TT.TestData, #SearchForComma) AS CO
OUTER APPLY dbo.CountOccurrences(TT.TestData, #SearchForCharacters) AS CO2;
The function can be simplified a bit using a table of numbers (dbo.Nums):
RETURN (
SELECT COUNT(*) AS Occurrences
FROM dbo.Nums AS N
JOIN (
VALUES (#SearchString)
) AS S (SearchString)
ON
SUBSTRING(S.SearchString, N.n, LEN(#SearchFor)) = #SearchFor
);
I finally write this function that should cover all the possible situations, adding a char prefix and suffix to the input. this char is evaluated to be different to any of the char conteined in the search parameter, so it can't affect the result.
CREATE FUNCTION [dbo].[CountOccurrency]
(
#Input nvarchar(max),
#Search nvarchar(max)
)
RETURNS int AS
BEGIN
declare #SearhLength as int = len('-' + #Search + '-') -2;
declare #conteinerIndex as int = 255;
declare #conteiner as char(1) = char(#conteinerIndex);
WHILE ((CHARINDEX(#conteiner, #Search)>0) and (#conteinerIndex>0))
BEGIN
set #conteinerIndex = #conteinerIndex-1;
set #conteiner = char(#conteinerIndex);
END;
set #Input = #conteiner + #Input + #conteiner
RETURN (len(#Input) - len(replace(#Input, #Search, ''))) / #SearhLength
END
usage
select dbo.CountOccurrency('a,b,c,d ,', ',')
Declare #MainStr nvarchar(200)
Declare #SubStr nvarchar(10)
Set #MainStr = 'nikhildfdfdfuzxsznikhilweszxnikhil'
Set #SubStr = 'nikhil'
Select (Len(#MainStr) - Len(REPLACE(#MainStr,#SubStr,'')))/Len(#SubStr)
this T-SQL code finds and prints all occurrences of pattern #p in sentence #s. you can do any processing on the sentence afterward.
declare #old_hit int = 0
declare #hit int = 0
declare #i int = 0
declare #s varchar(max)='alibcalirezaalivisualization'
declare #p varchar(max)='ali'
while #i<len(#s)
begin
set #hit=charindex(#p,#s,#i)
if #hit>#old_hit
begin
set #old_hit =#hit
set #i=#hit+1
print #hit
end
else
break
end
the result is:
1
6
13
20
I ended up using a CTE table for this,
CREATE TABLE #test (
[id] int,
[field] nvarchar(500)
)
INSERT INTO #test ([id], [field])
VALUES (1, 'this is a test string http://url, and https://google.com'),
(2, 'another string, hello world http://example.com'),
(3, 'a string with no url')
SELECT *
FROM #test
;WITH URL_count_cte ([id], [url_index], [field])
AS
(
SELECT [id], CHARINDEX('http', [field], 0)+1 AS [url_index], [field]
FROM #test AS [t]
WHERE CHARINDEX('http', [field], 0) != 0
UNION ALL
SELECT [id], CHARINDEX('http', [field], [url_index])+1 AS [url_index], [field]
FROM URL_count_cte
WHERE CHARINDEX('http', [field], [url_index]) > 0
)
-- total urls
SELECT COUNT(1)
FROM URL_count_cte
-- urls per row
SELECT [id], COUNT(1) AS [url_count]
FROM URL_count_cte
GROUP BY [id]
Using this function, you can get the number of repetitions of words in a text.
/****** Object: UserDefinedFunction [dbo].[fn_getCountKeywords] Script Date: 22/11/2021 17:52:00 ******/
DROP FUNCTION IF EXISTS [dbo].[fn_getCountKeywords]
GO
/****** Object: UserDefinedFunction [dbo].[fn_getCountKeywords] Script Date: 2211/2021 17:52:00 ******/
SET ANSI_NULLS OFF
GO
SET QUOTED_IDENTIFIER ON
GO
-- =============================================
-- Author: m_Khezrian
-- Create date: 2021/11/22-17:52
-- Description: Return Count Keywords In Input Text
-- =============================================
Create OR Alter Function [dbo].[fn_getCountKeywords]
(#Text nvarchar(max)
,#Keywords nvarchar(max)
)
RETURNS #Result TABLE
(
[ID] int Not Null IDENTITY PRIMARY KEY
,[Keyword] nvarchar(max) Not Null
,[Cnt] int Not Null Default(0)
)
/*With ENCRYPTION*/ As
Begin
Declare #Key nvarchar(max);
Declare #Cnt int;
Declare #I int;
Set #I = 0 ;
--Set #Text = QUOTENAME(#Text);
Insert Into #Result
([Keyword])
Select Trim([value])
From String_Split(#Keywords,N',')
Group By [value]
Order By Len([value]) Desc;
Declare CntKey_Cursor Insensitive Cursor For
Select [Keyword]
From #Result
Order By [ID];
Open CntKey_Cursor;
Fetch Next From CntKey_Cursor Into #Key;
While (##Fetch_STATUS = 0) Begin
Set #Cnt = 0;
While (PatIndex(N'%'+#Key+'%',#Text) > 0) Begin
Set #Cnt += 1;
Set #I += 1 ;
Set #Text = Stuff(#Text,PatIndex(N'%'+#Key+'%',#Text),len(#Key),N'{'+Convert(nvarchar,#I)+'}');
--Set #Text = Replace(#Text,#Key,N'{'+Convert(nvarchar,#I)+'}');
End--While
Update #Result
Set [Cnt] = #Cnt
Where ([Keyword] = #Key);
Fetch Next From CntKey_Cursor Into #Key;
End--While
Close CntKey_Cursor;
Deallocate CntKey_Cursor;
Return
End
GO
--Test
Select *
From dbo.fn_getCountKeywords(
N'<U+0001F4E3> MARKET IMPACT Euro area Euro CPIarea annual inflation up to 3.0% MaCPIRKET forex'
,N'CPI ,core,MaRKET , Euro area'
)
Go
Reference https://learn.microsoft.com/en-us/sql/t-sql/functions/string-split-transact-sql?view=sql-server-ver15
Example:
SELECT s.*
,s.[Number1] - (SELECT COUNT(Value)
FROM string_split(s.[StringColumn],',')
WHERE RTRIM(VALUE) <> '')
FROM TableName AS s
Applies to: SQL Server 2016 (13.x) and later
You can use the following stored procedure to fetch , values.
IF EXISTS (SELECT * FROM sys.objects
WHERE object_id = OBJECT_ID(N'[dbo].[sp_parsedata]') AND type in (N'P', N'PC'))
DROP PROCEDURE [dbo].[sp_parsedata]
GO
create procedure sp_parsedata
(#cid integer,#st varchar(1000))
as
declare #coid integer
declare #c integer
declare #c1 integer
select #c1=len(#st) - len(replace(#st, ',', ''))
set #c=0
delete from table1 where complainid=#cid;
while (#c<=#c1)
begin
if (#c<#c1)
begin
select #coid=cast(replace(left(#st,CHARINDEX(',',#st,1)),',','') as integer)
select #st=SUBSTRING(#st,CHARINDEX(',',#st,1)+1,LEN(#st))
end
else
begin
select #coid=cast(#st as integer)
end
insert into table1(complainid,courtid) values(#cid,#coid)
set #c=#c+1
end
The Replace/Len test is cute, but probably very inefficient (especially in terms of memory).
A simple function with a loop will do the job.
CREATE FUNCTION [dbo].[fn_Occurences]
(
#pattern varchar(255),
#expression varchar(max)
)
RETURNS int
AS
BEGIN
DECLARE #Result int = 0;
DECLARE #index BigInt = 0
DECLARE #patLen int = len(#pattern)
SET #index = CHARINDEX(#pattern, #expression, #index)
While #index > 0
BEGIN
SET #Result = #Result + 1;
SET #index = CHARINDEX(#pattern, #expression, #index + #patLen)
END
RETURN #Result
END
Perhaps you should not store data that way. It is a bad practice to ever store a comma delimited list in a field. IT is very inefficient for querying. This should be a related table.

Resources