Querying Json whose root is an array of objects in SQL Server - sql-server

I have a column in SQL table that has json value like below:
[
{"address":{"value":"A9"},
"value":{"type":11,"value":"John"}},
{"address":{"value":"A10"},
"value":{"type":11,"value":"Doe"}}]
MSDN Examples for JSON_VALUE or JSON_QUERY require a json object at root. How can I query above to return rows that have "address" as A9 and "value" as John? I'm using SQL Azure.

Something like this:
declare #json nvarchar(max) = '[
{"address":{"value":"A9"},
"value":{"type":11,"value":"John"}},
{"address":{"value":"A10"},
"value":{"type":11,"value":"Doe"}}]'
select a.*
from openjson(#json) r
cross apply openjson(r.value)
with (
address nvarchar(200) '$.address.value',
name nvarchar(200) '$.value.value'
) a
where address = N'A9'
and name = N'John'
outputs
address name
------- -----
A9 John
(1 row affected)

It may not be entirely relevant to the OP's post as the usage is different, however it is possible to retrieve arbitrary items from a root-level unnamed JSON array e.g.
declare #json nvarchar(max) = '[
{"address":
{"value":"A9"},
"value":
{"type":11,"value":"John"}
},
{"address":
{"value":"A10"},
"value":
{"type":11,"value":"Doe"}
}
]'
select
JSON_VALUE(
JSON_QUERY(#json, '$[0]'),
'$.address.value') as 'First address.value',
JSON_VALUE(
JSON_QUERY(#json, '$[1]'),
'$.address.value') as 'Second address.value'
Output :
First address.value Second address.value
A9 A10

Related

Parsing a JSON based on a condition in SQL Server

This is the JSON definition that is going to be provided (just a short example) and the code that I have implemented to get the expected result:
declare #json nvarchar(max)
set #json = '{
"testJson":{
"testID":"Test1",
"Value":[
{
"Value1":"",
"Value2":"",
"Value3":"",
"Type": "1A"
},
{
"Value1":"123",
"Value2":"456",
"Value3":"Automatic",
"Type": "2A"
},
{
"Value1":"789",
"Value2":"159",
"Value3":"Manual",
"Value4":"Success" ,
"Type": "3A"
}
]
}
}'
select
'ValueFields' as groupDef,
-- b.[key],
-- c.[key],
STRING_AGG( c.value , ' | ') as val
from
openjson(#json, '$.testJson.Value') as b
cross apply
openjson(b.value) as c
where
b.[key] not in (select b.[key]
from openjson(#json, '$.testJson.Value') as b
where b.value like ('%1A%'))
As you can see each element in the array can have different quantity of attributes (value1,.., value4..), and I only need to consider those elements where the type attribute is not equal to "1A". The query gives me the result requested, however, I am wondering how can I improve the performance of the code given that I'm using the like operator in the sub select, and obviously the original JSON file could a considerable number of elements in the array.
…
select b.Value --,c.value
from
openjson(#json, '$.testJson.Value')
with
(
Value nvarchar(max) '$' as json,
Type varchar(100) '$.Type'
) as b
--cross apply openjson(b.Value) as c
where b.Type <> '1A'
SELECT
'ValueFields' as groupDef,
J.value as val
FROM
OPENJSON(#json,'$.testJson.Value') J
WHERE
JSON_VALUE([value],'$.Type') <> '1A'

How to merge 3 columns and 10 rows in one single column as json data

i have 10 rows and 3 columns,
out of 3 columns, 2 columns have data in JSON format
I am trying to merge the data of all rows in one single column which has the json data and that is valid,
like if row 1 has a json, it should join the json and the column name and then next row and so on
Have been able to do so my data is repeated like this
[ {
"role":"poweruser",
"account":"active",
"Name" : "Rose"
},
{
"role":"Admin",
"account":"inactive",
"Name" : "Rose"
}]
output i am trying is like this
[{
"name":"Rose" {
"role":"poweruser",
"account":"active"
},
{
"role":"Admin",
"account":"inactive"
}
}]
and my table structure is like this
select role,account,(another case statement) as name
from table1 inner join table2 on table1.id = table2.id
but i want that the JSON should not repeat the name, one name and other two as nested output
Literally have no clue what i should be doing, suggestion
Microsof SQL Server has some integrated support for handling JSON, you can check out some of the documentation here and here.
Working with the "table structure" query and the little example data you provided I get this:
-- create sample data
declare #user table
(
userid int,
username nvarchar(20)
);
insert into #user (userid, username) values
(1, 'Rose');
declare #userrole table
(
userid int,
rolename nvarchar(20),
rolestatus nvarchar(20)
);
insert into #userrole (userid, rolename, rolestatus) values
(1, 'poweruser', 'active'),
(1, 'admin', 'inactive');
A very basic query where SQL Server does all the work:
select u.username as 'Name',
roles.rolename as 'Role',
roles.rolestatus as 'Account'
from #user u
join #userrole Roles
on Roles.userid = u.userid
for json auto; -- specify that we want JSON output with AUTOmatic structure
This gives:
[{"Name":"Rose","roles":[{"Role":"poweruser","Account":"active"},{"Role":"admin","Account":"inactive"}]}]
Which can be formatted as:
[
{
"Name":"Rose",
"Roles":
[
{
"Role":"poweruser",
"Account":"active"
},
{
"Role":"admin",
"Account":"inactive"
}
]
}
]
Please expand your question (with more example data) if this is not what you are after.

How can I read below Json column stored in SQL Server using openjson?

Declare #ResponseText nvarchar(4000)
set #responseText ='{
"submissions": [
{
"xml_id":"id_x5d94851726b470.68571510",
"fields": [
{"fieldvalue":"customerEmail#xyzdomain.com","fieldid":"57282490"},
{"fieldvalue":"123","fieldid":"57282423"},
{"fieldvalue":"12345-678900","fieldid":"57282500"},
{"fieldvalue":"Test Message here ","fieldid":"57282564"}
]
}
]
}'
SELECT *
FROM OPENJSON (#ResponseText, '$.submissions') WITH (
ID NVARCHAR(100) '$.xml_id',
$.fields.field NVARCHAR(100) ...
)
etc rest of all the record? I got "NULL" for the rest fields under fields array
You can try it like this:
Declare #ResponseText nvarchar(4000)
set #responseText ='{
"submissions": [
{
"xml_id":"id_x5d94851726b470.68571510",
"fields": [
{"fieldvalue":"customerEmail#xyzdomain.com","fieldid":"57282490"},
{"fieldvalue":"123","fieldid":"57282423"},
{"fieldvalue":"12345-678900","fieldid":"57282500"},
{"fieldvalue":"Test Message here ","fieldid":"57282564"}
]
}
]
}'
--The query
SELECT A.ID
,B.*
FROM OPENJSON (#ResponseText, '$.submissions')
WITH (ID NVARCHAR(100) '$.xml_id'
,fields NVARCHAR(MAX) AS JSON) A
OUTER APPLY OPENJSON(a.fields)
WITH(fieldvalue NVARCHAR(150)
,fieldid BIGINT) B;
The result
ID fieldvalue fieldid
id_x5d94851726b470.68571510 customerEmail#xyzdomain.com 57282490
id_x5d94851726b470.68571510 123 57282423
id_x5d94851726b470.68571510 12345-678900 57282500
id_x5d94851726b470.68571510 Test Message here 57282564
The idea in short:
You started correctly using the WITH-clause to read the xml_id. The property fields is nothing else than another element on the same level. But we return it AS JSON. This will allow to add another APPLY OPENJSON(), pass in the fragment we got from $.fields and use another WITH-clause to get the two properties of the objects within the array.

OPENJSON unable to parse Chinese characters

I'm trying to convert my JSON data into a table format in SQL Server.
Following are my JSON data:
[{
"emp_no": "001",
"emp_designation":"Data Admin",
"emp_name": "Peter",
"emp_name2": "彼特"
},
{
"emp_no": "002",
"emp_designation":"Software Engineer",
"emp_name": "Lee",
"emp_name2": "李"
}]
What I had tried are:
DECLARE #JSON NVARCHAR(MAX)
set #JSON='[{
"emp_no": "001",
"emp_designation":"Data Admin",
"emp_name": "Peter",
"emp_name2": "彼特"},
{
"emp_no": "002",
"emp_designation":"Software Engineer",
"emp_name": "Lee",
"emp_name2": "李"
}]'
--Method 1
SELECT * INTO #emp_temp FROM OPENJSON(#JSON)
WITH (emp_no varchar(20),
emp_designation varchar(50),
emp_name NVARCHAR(100),
emp_name2 NVARCHAR(100))
SELECT * FROM #Emp_temp
DROP TABLE #Emp_temp
--Method 2
SELECT
JSON_Value (EMP.VALUE, '$.emp_no') as emp_no,
JSON_Value (EMP.VALUE, '$.emp_designation') as emp_designation,
JSON_Value (EMP.VALUE, '$.emp_name') as emp_name,
JSON_Value (EMP.VALUE, '$.emp_name2') as emp_name2
INTO #Emp_temp2
FROM OPENJSON (#JSON) as EMP
SELECT * FROM #Emp_temp2
DROP TABLE #Emp_temp2
However, both temp table return me following result, with the Chinese characters remain as "???".
Temp table select result
emp_no emp_designation emp_name emp_name2
001 |Data Admin | Peter| ??
002 |Software Engineer| Lee | ?
Any idea how to preserve the original Chinese characters after parse the data into temp table?
Thanks.
*Edit:
I know it can work by putting a extra 'N' in front of the JSON
set #JSON=N'[
{ "emp_no": "001...
.....
But actually the JSON is a parameter in a Store Procedure, I cannot simply add a N like : set #JSON = 'N' + #JSON,
which this will jeopardize the format of the JSON data, and cause an error.
ALTER PROCEDURE [dbo].[SP_StoreEmpInfo]
#JSON NVARCHAR(max)
#JSON = 'N' + #JSON
/*Will cause invalid JSON format error */
SELECT
JSON_Value (EMP.VALUE, '$.emp_no') as.....
Try adding 'N' before your sql set to indicate that unicode characters are contained within like this:
DECLARE #JSON NVARCHAR(MAX)
set #JSON=N'[{
"emp_no": "001",
"emp_designation":"Data Admin",
"emp_name": "Peter",
"emp_name2": "彼特"},
{
"emp_no": "002",
"emp_designation":"Software Engineer",
"emp_name": "Lee",
"emp_name2": "李"
}]'
This question may assist in background:
What does N' stands for in a SQL script ? (the one used before characters in insert script)

How to reverse the OPENJSON() function in SQL server?

The sql-server OPENJSON() function can take a json array and convert it into sql table with key-value pairs, e.g.:
DECLARE #json NVARCHAR(MAX);
SET #json = '{
"key1": "val1",
"key2": "val2",
"key3": "val3"
}';
SELECT * FROM OPENJSON(#json, '$')
Result:
key value type
--------------------
key1 val1 1
key2 val2 1
key3 val3 1
What is the best general-purpose method for converting this key/value table back into a json array?
Why? If we can do this with a single function, it opens up a range of json modifications which are otherwise not possible on sql server, e.g.:
Re-order elements
Rename properties (key names)
Split json array into smaller arrays / combine json arrays
Compare json arrays (which key/value elements exists in both jsons? What are the differences?)
Clean json (remove syntactical whitespace/newlines to compress it)
Now, I could start to do simple CONCAT('"',[key],'":"',[value]), then do a comma-list-aggregration. But if I want a code that is both easy to apply across my codebase and works for all data types, this is not a simple task. By looking at the json format definition, the conversion should take into account a) the 6 different data types, b) escape characters, c) SQL NULL/json null handling, d) what I may have overlooked I.e. at minimum, the below example should be supported:
DECLARE #test_json NVARCHAR(MAX);
SET #test_json = '{
"myNull": null,
"myString": "start_\\_\"_\/_\b_\f_\n_\r_\t_\u2600_stop",
"myNumber": 3.14,
"myBool": true,
"myArray": ["1", 2],
"myObject": {"key":"val"}
}'
SELECT * FROM OPENJSON(#test_json, '$')
Result:
key value type
------------------------------------------------
myNull NULL 0
myString start_\_"_/___ _ _ _☀_stop 1
myNumber 3.14 2
myBool true 3
myArray ["1", 2] 4
myObject {"key":"val"} 5
For the string-aggregation part, we have long suffered the 'FOR XML PATH'-pain. Luckily we have STRING_AGG() on SQL2017/AzureDB, and I will accept a solution depending on STRING_AGG().
You can do with this command, using FOR JSON
select * from table for json auto
My result:
[{"LogId":1,"DtLog":"2017-09-30T21:04:45.6700000","FileId":1},
{"LogId":2,"DtLog":"2017-09-30T21:08:35.8633333","FileId":3},{"LogId":3,"DtLog":"2017-09-30T21:08:36.4433333","FileId":2},{"LogId":4,"DtLog":"2017-09-30T21:08:36.9866667","FileId":12},{"LogId":5,"DtLog":"2017-09-30T21:15:22.5366667","FileId":13},{"LogId":6,"DtLog":"2017-09-30T21:38:43.7866667","FileId":17}]
I use string_agg
declare #json table ( Name varchar(80), Value varchar(max) )
insert into #json
select [Key], Value from openjson(#attributes)
insert into #json values ( 'name', #name )
insert into #json values ( 'title', #title )
insert into #json values ( 'description', #description )
set #attributes = '{' + (select STRING_AGG( '"' + Name + '":"' +
REPLACE (value, '"', '\"' ) +'"', ',') from #json) + '}'

Resources