I have created a stored procedure in Snowflake to dynamically pivot the table and create a view from JSON data. The query creates a view with 370 columns as I desired but it's too slow to query over the view in fact querying a single row takes up to 20 mins
create or replace procedure create_pivot_view()
returns string
language javascript
execute as caller as
$$
var cols_query = ` select
concat('\\'',
listagg(
distinct f.value:signalName::STRING,'\\',\\''),'\\'') as signal_name_list
FROM
(select *
FROM
PM_POWER),table(flatten(input=>c1:data:baseData:signals, mode=>'ARRAY')) as f`;
var stmt1 = snowflake.createStatement({sqlText: cols_query});
var results1 = stmt1.execute();
results1.next();
var col_list = results1.getColumnValue(1);
pivot_query = `
create or replace view AWSS3_PM.PUBLIC.PM_POWER_CN8000_V2 as
select * from (
select figures,stats,SignalName,id,Latitude,Longitude,Altitude
from (
select
c1:id::STRING as id,
c1:data:baseData:unitID::varchar as UnitID,
c1:data:baseData:latitude::varchar as Latitude,
c1:data:baseData:longitude::varchar as Longitude,
c1:data:baseData:altitude::varchar as Altitude,
c1:timestamp::varchar as TimeStamp,
f.value:"dataValue"::varchar as SignalDataValue,
f.value:"dataValueValid"::varchar as SignalDataValueValid,
f.value:"signalID"::varchar as SignalID,
f.value:"units"::varchar as SignalUnits,
f.value:"dataValueEnum"::varchar as SignalDataEnum,
f.value:"signalName"::varchar as SignalName
from
(
select *
FROM
PM_POWER), table(flatten(input=>c1:data:baseData:signals, mode=>'ARRAY')) as f
) flt
unpivot (figures for stats in(UnitID,SignalDataValue, SignalDataValueValid, SignalID, SignalUnits, SignalDataEnum, TimeStamp))
) up
pivot (min(up.figures) for up.SignalName in (${col_list}
))
`;
var stmt2 = snowflake.createStatement({sqlText: pivot_query});
stmt2.execute();
return pivot_query;
$$;
call create_pivot_view();
Any suggestions to fasten this approach or an alternative would be really grateful to improve the performance of the query.
A single row of JSON data looks like the below:
{
"data": {
"baseData": {
"altitude": 0,
"altitudeValid": "false",
"customerID": "CN",
"kind": "Power",
"latitude": 0,
"latitudeValid": "false",
"longitude": 0,
"longitudeValid": "false",
"name": "Predictive Maintenance Data.Power",
"signals": [
{
"dataValue": 3112900,
"dataValueValid": "true",
"signalID": 3424,
"signalName": "mainGeneratorPower",
"units": "kW"
},
{
"dataValue": 1.4035000801086426,
"dataValueValid": "true",
"signalID": 380,
"signalName": "DB_HandlePos",
"units": "V"
},
{
"dataValue": 2,
"dataValueEnum": "Reverse",
"dataValueValid": "true",
"signalID": 813,
"signalName": "reverserPos",
"units": "none"
},
{
"dataValue": 291400,
"dataValueValid": "true",
"signalID": 586,
"signalName": "tractiveEffort",
"units": "none"
},
{
"dataValue": 8,
"dataValueEnum": "T 8",
"dataValueValid": "true",
"signalID": 899,
"signalName": "throttlePos",
"units": "none"
},
{
"dataValue": 47.05950164794922,
"dataValueValid": "true",
"signalID": 1805,
"signalName": "AWTF",
"units": "°C"
},
{
"dataValue": 0.2971585690975189,
"dataValueValid": "true",
"signalID": 4925,
"signalName": "ChpDty",
"units": "none"
},
{
"dataValue": 20.14109992980957,
"dataValueValid": "true",
"signalID": 4835,
"signalName": "LDBBLWA",
"units": "A"
},
{
"dataValue": 36.02000045776367,
"dataValueValid": "true",
"signalID": 2669,
"signalName": "IcGVelM",
"units": "km/hr"
},
{
"dataValue": 479185.125,
"dataValueValid": "true",
"signalID": 1070,
"signalName": "WPEgILP",
"units": "PSIG"
},
{
"dataValue": 293026.875,
"dataValueValid": "true",
"signalID": 1799,
"signalName": "WPEgOtP",
"units": "PSIG"
},
{
"dataValue": 926750,
"dataValueValid": "true",
"signalID": 4698,
"signalName": "MR2 Prs",
"units": "PSIG"
},
{
"dataValue": 24,
"dataValueEnum": "ON",
"dataValueValid": "true",
"signalID": 664,
"signalName": "MVCC>",
"units": "none"
},
{
"dataValue": 907422.625,
"dataValueValid": "true",
"signalID": 4804,
"signalName": "SR Pres",
"units": "PSIG"
}
],
"unitID": "CN 8000",
"ver": "1.0.0"
},
"baseType": "PredictiveMaintenanceData"
},
"dataName": "CN8000.Prod.PredictiveMaintenanceData",
"id": "18a89f9e-9620-4453-a546-23412025e7c0",
"tags": {
"iaapl.access.level1": "Private",
"iaapl.access.level2": "OEM",
"iaapl.internal.deviceID": "",
"iaapl.internal.deviceName": "",
"iaapl.internal.encodeTime": "2021-02-25T07:41:19.000Z",
"iaapl.internal.sender": "Intelligent",
"iaapl.software.name": "",
"iaapl.software.partNumber": 0,
"iaapl.software.version": ""
},
"timestamp": "2021-02-25T07:32:31.000Z"
}
The result should look like :
Thanks in Advance
the bits where you type
( select * FROM PM_POWER)
could just be
PM_POWER
Your JSON is malformed.
You are reading from data:baseData:signals but there is no signals in you data..
If you want help with your View performance, I would remove the whole Stored Procedure part. And get SELECT that get the result you want from the demo data you provide. Because presently.
Update:
So your example data still does not generate the desired output. So if this is a performance improvement project, it's super frustrating, to not even have the input + code = output
anyways, here's you data and code mixed together:
with pm_power as (
select parse_json('{
"data": {
"baseData": {
"altitude": 0,
"altitudeValid": "false",
"customerID": "CN",
"kind": "Power",
"latitude": 0,
"latitudeValid": "false",
"longitude": 0,
"longitudeValid": "false",
"name": "Predictive Maintenance Data.Power",
"signals": [
{
"dataValue": 3112900,
"dataValueValid": "true",
"signalID": 3424,
"signalName": "mainGeneratorPower",
"units": "kW"
},
{
"dataValue": 1.4035000801086426,
"dataValueValid": "true",
"signalID": 380,
"signalName": "DB_HandlePos",
"units": "V"
},
{
"dataValue": 2,
"dataValueEnum": "Reverse",
"dataValueValid": "true",
"signalID": 813,
"signalName": "reverserPos",
"units": "none"
},
{
"dataValue": 291400,
"dataValueValid": "true",
"signalID": 586,
"signalName": "tractiveEffort",
"units": "none"
},
{
"dataValue": 8,
"dataValueEnum": "T 8",
"dataValueValid": "true",
"signalID": 899,
"signalName": "throttlePos",
"units": "none"
},
{
"dataValue": 47.05950164794922,
"dataValueValid": "true",
"signalID": 1805,
"signalName": "AWTF",
"units": "°C"
},
{
"dataValue": 0.2971585690975189,
"dataValueValid": "true",
"signalID": 4925,
"signalName": "ChpDty",
"units": "none"
},
{
"dataValue": 20.14109992980957,
"dataValueValid": "true",
"signalID": 4835,
"signalName": "LDBBLWA",
"units": "A"
},
{
"dataValue": 36.02000045776367,
"dataValueValid": "true",
"signalID": 2669,
"signalName": "IcGVelM",
"units": "km/hr"
},
{
"dataValue": 479185.125,
"dataValueValid": "true",
"signalID": 1070,
"signalName": "WPEgILP",
"units": "PSIG"
},
{
"dataValue": 293026.875,
"dataValueValid": "true",
"signalID": 1799,
"signalName": "WPEgOtP",
"units": "PSIG"
},
{
"dataValue": 926750,
"dataValueValid": "true",
"signalID": 4698,
"signalName": "MR2 Prs",
"units": "PSIG"
},
{
"dataValue": 24,
"dataValueEnum": "ON",
"dataValueValid": "true",
"signalID": 664,
"signalName": "MVCC>",
"units": "none"
},
{
"dataValue": 907422.625,
"dataValueValid": "true",
"signalID": 4804,
"signalName": "SR Pres",
"units": "PSIG"
}
],
"unitID": "CN 8000",
"ver": "1.0.0"
},
"baseType": "PredictiveMaintenanceData"
},
"dataName": "CN8000.Prod.PredictiveMaintenanceData",
"id": "18a89f9e-9620-4453-a546-23412025e7c0",
"tags": {
"iaapl.access.level1": "Private",
"iaapl.access.level2": "OEM",
"iaapl.internal.deviceID": "",
"iaapl.internal.deviceName": "",
"iaapl.internal.encodeTime": "2021-02-25T07:41:19.000Z",
"iaapl.internal.sender": "Intelligent",
"iaapl.software.name": "",
"iaapl.software.partNumber": 0,
"iaapl.software.version": ""
},
"timestamp": "2021-02-25T07:32:31.000Z"
}')as c1
)
select *
from (
select
figures,
stats,
SignalName,
id,
Latitude,
Longitude,
Altitude
from (
select
c1:id::STRING as id,
c1:data:baseData:unitID::varchar as UnitID,
c1:data:baseData:latitude::varchar as Latitude,
c1:data:baseData:longitude::varchar as Longitude,
c1:data:baseData:altitude::varchar as Altitude,
c1:timestamp::varchar as TimeStamp,
f.value:"dataValue"::varchar as SignalDataValue,
f.value:"dataValueValid"::varchar as SignalDataValueValid,
f.value:"signalID"::varchar as SignalID,
f.value:"units"::varchar as SignalUnits,
f.value:"dataValueEnum"::varchar as SignalDataEnum,
f.value:"signalName"::varchar as SignalName
from PM_POWER
,table( flatten(input=>c1:data:baseData:signals, mode=>'ARRAY') ) as f
) flt
unpivot (figures for stats in(UnitID, SignalDataValue, SignalDataValueValid, SignalID, SignalUnits, SignalDataEnum, TimeStamp))
) up
pivot (min(up.figures) for up.SignalName in (
'mainGeneratorPower','DB_HandlePos','reverserPos','tractiveEffort','throttlePos','AWTF','LDBBLWA','IcGVelM','WPEgOtP','MR2 Prs','SR Pres','ChpDty','MVCC>','WPEgILP'
))
STATS
ID
LATITUDE
LONGITUDE
ALTITUDE
'mainGeneratorPower'
'DB_HandlePos'
'reverserPos'
'tractiveEffort'
'throttlePos'
'AWTF'
'LDBBLWA'
'IcGVelM'
'WPEgOtP'
'MR2 Prs'
'SR Pres'
'ChpDty'
'MVCC>'
'WPEgILP'
UNITID
18a89f9e-9620-4453-a546-23412025e7c0
0
0
0
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
CN 8000
SIGNALID
18a89f9e-9620-4453-a546-23412025e7c0
0
0
0
3424
380
813
586
899
1805
4835
2669
1799
4698
4804
4925
664
1070
SIGNALUNITS
18a89f9e-9620-4453-a546-23412025e7c0
0
0
0
kW
V
none
none
none
°C
A
km/hr
PSIG
PSIG
PSIG
none
none
PSIG
TIMESTAMP
18a89f9e-9620-4453-a546-23412025e7c0
0
0
0
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
2021-02-25T07:32:31.000Z
SIGNALDATAVALUEVALID
18a89f9e-9620-4453-a546-23412025e7c0
0
0
0
true
true
true
true
true
true
true
true
true
true
true
true
true
true
SIGNALDATAENUM
18a89f9e-9620-4453-a546-23412025e7c0
0
0
0
Reverse
T 8
ON
SIGNALDATAVALUE
18a89f9e-9620-4453-a546-23412025e7c0
0
0
0
3112900
1.4035000801086426
2
291400
8
47.05950164794922
20.14109992980957
36.02000045776367
293026.875
926750
907422.625
0.2971585690975189
24
479185.125
An Answer:
so this SQL gets the answer in the same structure BUT timestamp,unitid are not rows as those are all the same value, so it seem having them as rows is a mistake, and they should be a column..
select
case lower(o.key )
when 'datavalue' then 'SignalDataValue'
when 'datavaluevalid' then 'SignalDataValueValid'
when 'signalid' then 'SignalID'
when 'units' then 'SignalUnits'
when 'datavalueenum' then 'SignalDataEnum'
when 'signalname' then 'SignalName'
end as stats,
c1:id::STRING as id,
c1:data:baseData:unitID::varchar as UnitID,
c1:data:baseData:latitude::varchar as Latitude,
c1:data:baseData:longitude::varchar as Longitude,
c1:data:baseData:altitude::varchar as Altitude,
c1:timestamp::varchar as TimeStamp
,max(iff(f.value:"signalName"='mainGeneratorPower', o.value, null)) as "'mainGeneratorPower'"
,max(iff(f.value:"signalName"='DB_HandlePos', o.value, null)) as "'DB_HandlePos'"
,max(iff(f.value:"signalName"='reverserPos', o.value, null)) as "'reverserPos'"
,max(iff(f.value:"signalName"='tractiveEffort', o.value, null)) as "'tractiveEffort'"
,max(iff(f.value:"signalName"='throttlePos', o.value, null)) as "'throttlePos'"
,max(iff(f.value:"signalName"='AWTF', o.value, null)) as "'AWTF'"
,max(iff(f.value:"signalName"='LDBBLWA', o.value, null)) as "'LDBBLWA'"
,max(iff(f.value:"signalName"='IcGVelM', o.value, null)) as "'IcGVelM'"
,max(iff(f.value:"signalName"='WPEgOtP', o.value, null)) as "'WPEgOtP'"
,max(iff(f.value:"signalName"='MR2 Prs', o.value, null)) as "'MR2 Prs'"
,max(iff(f.value:"signalName"='SR Pres', o.value, null)) as "'SR Pres'"
,max(iff(f.value:"signalName"='ChpDty', o.value, null)) as "'ChpDty'"
,max(iff(f.value:"signalName"='MVCC>', o.value, null)) as "'MVCC>'"
,max(iff(f.value:"signalName"='WPEgILP', o.value, null)) as "'WPEgILP'"
from PM_POWER
,table( flatten(input=>c1:data:baseData:signals, mode=>'ARRAY') ) as f
,table( flatten(input=>f.value, mode=>'OBJECT')) as o
WHERE o.key != 'signalName'
GROUP BY 1,2,3,4,5,6,7
ORDER BY 1;
gives:
which means your Store Procedure needs to build that.. which is you are happy that this is more performant, you can solve how to build that above in your SP.
Related
There is a part of a JSON file:
{
"payload": {
"orders": [
{
"quantity": 1,
"platinum": 4,
"visible": true,
"order_type": "sell",
"user": {
"reputation": 5,
"region": "en",
"last_seen": "2022-11-17T08:15:43.360+00:00",
"ingame_name": "Noxxat",
"id": "5b50d73859d885026b523cd1",
"avatar": null,
"status": "offline"
},
"platform": "pc",
"region": "en",
"creation_date": "2020-09-04T15:30:41.000+00:00",
"last_update": "2021-11-19T09:41:43.000+00:00",
"id": "5f525da1c98cd000d7513813"
},
{
"order_type": "sell",
"visible": true,
"quantity": 2,
"platinum": 6,
"user": {
"reputation": 3,
"region": "en",
"last_seen": "2022-11-18T14:22:53.023+00:00",
"ingame_name": "Dhatman",
"id": "5b79921649262103f74b6585",
"avatar": null,
"status": "offline"
},
"platform": "pc",
"region": "en",
"creation_date": "2020-11-06T10:32:32.000+00:00",
"last_update": "2022-10-11T16:51:55.000+00:00",
"id": "5fa526406ff3660486ef556c"
},
{
"quantity": 1,
"visible": true,
"platinum": 5,
"order_type": "sell",
"user": {
"reputation": 4,
"region": "en",
"last_seen": "2022-11-18T18:31:49.199+00:00",
"ingame_name": "TheronGuardxx",
"avatar": "user/avatar/5e235e94ab7656047a86f70c.png?7b1e90d474a62c6ba3c2d3ef06aed927",
"id": "5e235e94ab7656047a86f70c",
"status": "offline"
},
"platform": "pc",
"region": "en",
"creation_date": "2020-12-17T22:46:57.000+00:00",
"last_update": "2022-10-15T23:37:01.000+00:00",
"id": "5fdbdfe13e8c4f017f5e3352"
}
]
}
}
How to find the minimum amount of platinum in this file?
As I understand it, I need to make a loop that will go through the entire file and assign a new value to the variable min if the current amount of platinum is less than the amount currently written in min.
But what should the code look like?
At the moment I have written a block that finds the amount of platinum, the seller's alias and the number of items from the last element of the JSON-file.
num = 1
flagSell = 0
while flagSell == 0:
if r_json["payload"]["orders"][len(r_json["payload"]["orders"]) - num]["user"]['status'] == 'ingame':
if r_json["payload"]["orders"][len(r_json["payload"]["orders"]) - num]["region"] == 'en':
if r_json["payload"]["orders"][len(r_json["payload"]["orders"]) - num]["order_type"] == 'sell':
min = r_json["payload"]["orders"][len(r_json["payload"]["orders"]) - num]["platinum"]
author = r_json["payload"]["orders"][len(r_json["payload"]["orders"]) - num]["user"]["ingame_name"]
quantity = r_json["payload"]["orders"][len(r_json["payload"]["orders"]) - num]["quantity"]
flagSell = 1
else:
num += 1
else:
num += 1
else:
num += 1
Try to use bult-in function min() to find minimum order according to the platinum key (data is your dictionary from the question):
min_order = min(data["payload"]["orders"], key=lambda o: o["platinum"])
print("Min Platinum =", min_order["platinum"])
print("Name =", min_order["user"]["ingame_name"])
print("Quantity =", min_order["quantity"])
Prints:
Min Platinum = 4
Name = Noxxat
Quantity = 1
EDIT: If you want to search for a minimum in orders where order_type == 'sell':
min_order = min(
(o for o in data["payload"]["orders"] if o["order_type"] == "sell"),
key=lambda o: o["platinum"],
)
print("Min Platinum =", min_order["platinum"])
print("Name =", min_order["user"]["ingame_name"])
print("Quantity =", min_order["quantity"])
I've got the response from HTTP GET request as JSON object and I want to use that JSON and put other node pass it to the next HTTP request. I got the following response data
{
"settlementFolio": "0002",
"settlementID": 283,
"businessUnitID": 43,
"routeID": 955,
"chargeAmount": 22248.0000,
"paymentAmount": 68560.6000,
"isPaid": false,
"hasValidCrewConfiguration": true,
"settlementDate": "2020-09-21T00:00:00",
"charged": 68560.6000,
"packageRemain": 7500.0000,
"totalBoxes": 280,
"detail": [
{
"settlementTransactionID": 1,
"settlementTransactionDescription": "1 Ventas Netas",
"charge": 0.0,
"payment": 61060.6000,
"isAutomatic": true,
"isForClient": false,
"isCharge": false,
"isChecked": false,
"settlementTransactionClientDetail": [
{
"settlementTransactionID": 3456,
"clientID": 392342,
"saleNote": 11792,
"amount": 30530.3000,
"reference": "",
"clientName": "OXXO NUEVA MAYAPAN",
"isChecked": false
},
{
"settlementTransactionID": 3456,
"clientID": 391322,
"saleNote": 11793,
"amount": 30530.3000,
"reference": "",
"clientName": "OXXO CHICHI SUAREZ",
"isChecked": false
}
]
}
],
"productTypeCounter": [
{
"centralCostID": 1,
"centralCostName": "Refrescos",
"sellUnit": 280,
"cNomina": 0.0000
}
]
}
I need to append a node like this with "settlementTransactionID": 2,
{
"settlementFolio": "0002",
"settlementID": 283,
"businessUnitID": 43,
"routeID": 955,
"chargeAmount": 22248.0000,
"paymentAmount": 68560.6000,
"isPaid": false,
"hasValidCrewConfiguration": true,
"settlementDate": "2020-09-21T00:00:00",
"charged": 68560.6000,
"packageRemain": 7500.0000,
"totalBoxes": 280,
"detail": [
{
"settlementTransactionID": 1,
"settlementTransactionDescription": "1 Ventas Netas",
"charge": 0.0,
"payment": 61060.6000,
"isAutomatic": true,
"isForClient": false,
"isCharge": false,
"isChecked": false,
"settlementTransactionClientDetail": [
{
"settlementTransactionID": 3456,
"clientID": 392342,
"saleNote": 11792,
"amount": 30530.3000,
"reference": "",
"clientName": "OXXO NUEVA MAYAPAN",
"isChecked": false
},
{
"settlementTransactionID": 3456,
"clientID": 391322,
"saleNote": 11793,
"amount": 30530.3000,
"reference": "",
"clientName": "OXXO CHICHI SUAREZ",
"isChecked": false
}
]
},
{
"settlementTransactionID": 2,
"settlementTransactionDescription": "2 Envase Faltante",
"charge": 0.0,
"payment": 7500.0000,
"isAutomatic": true,
"isForClient": false,
"isCharge": false,
"isChecked": false,
"settlementTransactionClientDetail": [
{
"settlementTransactionID": 3461,
"clientID": 0,
"saleNote": null,
"amount": 7500.0000,
"reference": "202|75.00|100.000",
"clientName": "",
"isChecked": false
}
]
}
],
"productTypeCounter": [
{
"centralCostID": 1,
"centralCostName": "Refrescos",
"sellUnit": 280,
"cNomina": 0.0000
}
]
}
I am using something like this
String addMore = '${detail}';
def detail = new groovy.json.JsonSlurper().parseText(addMore);
detail.put(deposito);
vars.put("detail", new groovy.json.JsonBuilder(detail).toPrettyString());
log.info("",vars.get('detail'));
The log shows the following error
020-09-23 09:09:25,857 ERROR o.a.j.e.JSR223PostProcessor: Problem in JSR223 script, JSR223 PostProcessor
javax.script.ScriptException: groovy.lang.MissingMethodException: No signature of method: java.util.ArrayList.put() is applicable for argument types: (org.apache.groovy.json.internal.LazyMap) values: [[settlementTransactionID:11, settlementTransactionDescription:11 Deposito de Efectivo, ...]]
Possible solutions: sum(), pop(), get(int), get(int), sum(java.lang.Object), plus(java.lang.Object)
at org.codehaus.groovy.jsr223.GroovyScriptEngineImpl.eval(GroovyScriptEngineImpl.java:320) ~[groovy-jsr223-3.0.3.jar:3.0.3]
at org.codehaus.groovy.jsr223.GroovyCompiledScript.eval(GroovyCompiledScript.java:71) ~[groovy-jsr223-3.0.3.jar:3.0.3]
at javax.script.CompiledScript.eval(CompiledScript.java:89) ~[java.scripting:?]
at org.apache.jmeter.util.JSR223TestElement.processFileOrScript(JSR223TestElement.java:222) ~[ApacheJMeter_core.jar:5.3]
at org.apache.jmeter.extractor.JSR223PostProcessor.process(JSR223PostProcessor.java:45) [ApacheJMeter_components.jar:5.3]
at org.apache.jmeter.threads.JMeterThread.runPostProcessors(JMeterThread.java:940) [ApacheJMeter_core.jar:5.3]
at org.apache.jmeter.threads.JMeterThread.executeSamplePackage(JMeterThread.java:572) [ApacheJMeter_core.jar:5.3]
at org.apache.jmeter.threads.JMeterThread.processSampler(JMeterThread.java:489) [ApacheJMeter_core.jar:5.3]
at org.apache.jmeter.threads.JMeterThread.run(JMeterThread.java:256) [ApacheJMeter_core.jar:5.3]
at java.lang.Thread.run(Thread.java:832) [?:?]
Caused by: groovy.lang.MissingMethodException: No signature of method: java.util.ArrayList.put() is applicable for argument types: (org.apache.groovy.json.internal.LazyMap) values: [[settlementTransactionID:11, settlementTransactionDescription:11 Deposito de Efectivo, ...]]
Possible solutions: sum(), pop(), get(int), get(int), sum(java.lang.Object), plus(java.lang.Object)
at org.codehaus.groovy.runtime.ScriptBytecodeAdapter.unwrap(ScriptBytecodeAdapter.java:70) ~[groovy-3.0.3.jar:3.0.3]
at org.codehaus.groovy.runtime.callsite.PojoMetaClassSite.call(PojoMetaClassSite.java:46) ~[groovy-3.0.3.jar:3.0.3]
at org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:47) ~[groovy-3.0.3.jar:3.0.3]
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:125) ~[groovy-3.0.3.jar:3.0.3]
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:139) ~[groovy-3.0.3.jar:3.0.3]
at Script158.run(Script158.groovy:31) ~[?:?]
at org.codehaus.groovy.jsr223.GroovyScriptEngineImpl.eval(GroovyScriptEngineImpl.java:317) ~[groovy-jsr223-3.0.3.jar:3.0.3]
... 9 more
We cannot help you as it's unclear what are these ${detail} and ${deposito} mean and contain.
If you get a deposito JMeter Variable which looks like:
{
"settlementTransactionID": 2,
"settlementTransactionDescription": "2 Envase Faltante",
"charge": 0.0,
"payment": 7500.0000,
"isAutomatic": true,
"isForClient": false,
"isCharge": false,
"isChecked": false,
"settlementTransactionClientDetail": [
{
"settlementTransactionID": 3461,
"clientID": 0,
"saleNote": null,
"amount": 7500.0000,
"reference": "202|75.00|100.000",
"clientName": "",
"isChecked": false
}
]
}
and you want to add it to the response of a certain sampler and store the result into another JMeter Variable you can use JSR223 PostProcessor with the following code:
def originalResponse = new groovy.json.JsonSlurper().parse(prev.getResponseData())
originalResponse.detail.add(new groovy.json.JsonSlurper().parseText(vars.get('deposito')))
vars.put('modifiedResponse', new groovy.json.JsonBuilder(originalResponse).toPrettyString())
assuming everything goes well you will get the JSON payload you're looking for in the ${modifiedResponse} JMeter Variable.
More information:
Apache Groovy - Parsing and producing JSON
Apache Groovy - Why and How You Should Use It
How can I get the data out of this array stored in a variant column in Snowflake. I don't care if it's a new table, a view or a query. There is a second column of type varchar(256) that contains a unique ID.
If you can just help me read the "confirmed" data and the "editorIds" data I can probably take it from there. Many thanks!
Output example would be
UniqueID ConfirmationID EditorID
u3kd9 xxxx-436a-a2d7 nupd
u3kd9 xxxx-436a-a2d7 9l34c
R3nDo xxxx-436a-a3e4 5rnj
yP48a xxxx-436a-a477 jTpz8
yP48a xxxx-436a-a477 nupd
[
{
"confirmed": {
"Confirmation": "Entry ID=xxxx-436a-a2d7-3525158332f0: Confirmed order submitted.",
"ConfirmationID": "xxxx-436a-a2d7-3525158332f0",
"ConfirmedOrders": 1,
"Received": "8/29/2019 4:31:11 PM Central Time"
},
"editorIds": [
"xxsJYgWDENLoX",
"JR9bWcGwbaymm3a8v",
"JxncJrdpeFJeWsTbT"
] ,
"id": "xxxxx5AvGgeSHy8Ms6Ytyc-1",
"messages": [],
"orderJson": {
"EntryID": "xxxxx5AvGgeSHy8Ms6Ytyc-1",
"Orders": [
{
"DropShipFlag": 1,
"FromAddressValue": 1,
"OrderAttributes": [
{
"AttributeUID": 548
},
{
"AttributeUID": 553
},
{
"AttributeUID": 2418
}
],
"OrderItems": [
{
"EditorId": "aC3f5HsJYgWDENLoX",
"ItemAssets": [
{
"AssetPath": "https://xxxx573043eac521.png",
"DP2NodeID": "10000",
"ImageHash": "000000000000000FFFFFFFFFFFFFFFFF",
"ImageRotation": 0,
"OffsetX": 50,
"OffsetY": 50,
"PrintedFileName": "aC3f5HsJYgWDENLoX-10000",
"X": 50,
"Y": 52.03909266409266,
"ZoomX": 100,
"ZoomY": 93.75
}
],
"ItemAttributes": [
{
"AttributeUID": 2105
},
{
"AttributeUID": 125
}
],
"ItemBookAttribute": null,
"ProductUID": 52,
"Quantity": 1
}
],
"SendNotificationEmailToAccount": true,
"SequenceNumber": 1,
"ShipToAddress": {
"Addr1": "Addr1",
"Addr2": "0",
"City": "City",
"Country": "US",
"Name": "Name",
"State": "ST",
"Zip": "00000"
}
}
]
},
"orderNumber": null,
"status": "order_placed",
"submitted": {
"Account": "350000",
"ConfirmationID": "xxxxx-436a-a2d7-3525158332f0",
"EntryID": "xxxxx-5AvGgeSHy8Ms6Ytyc-1",
"Key": "D83590AFF0CC0000B54B",
"NumberOfOrders": 1,
"Orders": [
{
"LineItems": [],
"Note": "",
"Products": [
{
"Price": "00.30",
"ProductDescription": "xxxxxint 8x10",
"Quantity": 1
},
{
"Price": "00.40",
"ProductDescription": "xxxxxut Black 8x10",
"Quantity": 1
},
{
"Price": "00.50",
"ProductDescription": "xxxxx"
},
{
"Price": "00.50",
"ProductDescription": "xxxscount",
"Quantity": 1
}
],
"SequenceNumber": "1",
"SubTotal": "00.70",
"Tax": "1.01",
"Total": "00.71"
}
],
"Received": "8/29/2019 4:31:10 PM Central Time"
},
"tracking": null,
"updatedOn": 1.598736670503000e+12
}
]
So, this is how I'd query that exact JSON assuming the data is in column var in table x:
SELECT x.var[0]:confirmed:ConfirmationID::varchar as ConfirmationID,
f.value::varchar as EditorID
FROM x,
LATERAL FLATTEN(input => var[0]:editorIds) f
;
Since your sample output doesn't match the JSON that you provided, I will assume that this is what you need.
Also, as a note, your JSON includes outer [ ] which indicates that the entire JSON string is inside an array. This is the reason for var[0] in my query. If you have multiple records inside that array, then you should remove that. In general, you should exclude those and instead load each record into the table separately. I wasn't sure whether you could make that change, so I just wanted to make note.
Does HERE have data on property parcel boundaries?
I am looking for the coordinates of individual properties to overlay their maps.
Unfortunately we do not have this kind of data available. Maybe this might be interesting for you:
Within the Reverse Geocode you can Request the shape of a postal district for a given latitude and longitude
This example retrieves the shape and details of the first address around a specified location in Chicago (41.8839,-87.6389) using a 150 meter radius to retrieve the address. The expected address is: 425 W Randolph St, Chicago, IL 60606, United States.
The addition of the additionaldata=IncludeShapeLevel,postalCode parameter ensures that the shape of the postal district is also included in the response. Reverse geocoding requests can be made using the reversegeocode endpoint and adding the prox parameter to the request URL. The number of results returned can be restricted using the maxresults parameter.
https://reverse.geocoder.ls.hereapi.com/6.2/reversegeocode.json?prox=41.8839%2C-87.6389%2C150&mode=retrieveAddresses&maxresults=1&additionaldata=IncludeShapeLevel%2CpostalCode&gen=9&apiKey=xxx
{
"Response": {
"MetaInfo": {
"Timestamp": "2020-07-27T09:56:24.943+0000",
"NextPageInformation": "2"
},
"View": [
{
"_type": "SearchResultsViewType",
"ViewId": 0,
"Result": [
{
"Relevance": 1,
"Distance": 16.3,
"MatchLevel": "houseNumber",
"MatchQuality": {
"Country": 1,
"State": 1,
"County": 1,
"City": 1,
"District": 1,
"Street": [
1
],
"HouseNumber": 1,
"PostalCode": 1
},
"MatchType": "pointAddress",
"Location": {
"LocationId": "NT_puy2gbuVuGd-an6zGdSyNA_xADM",
"LocationType": "address",
"DisplayPosition": {
"Latitude": 41.88403,
"Longitude": -87.63881
},
"NavigationPosition": [
{
"Latitude": 41.88401,
"Longitude": -87.63845
}
],
"MapView": {
"TopLeft": {
"Latitude": 41.8851542,
"Longitude": -87.6403199
},
"BottomRight": {
"Latitude": 41.8829058,
"Longitude": -87.6373001
}
},
"Address": {
"Label": "100 N Riverside Plz, Chicago, IL 60606, United States",
"Country": "USA",
"State": "IL",
"County": "Cook",
"City": "Chicago",
"District": "West Loop",
"Street": "N Riverside Plz",
"HouseNumber": "100",
"PostalCode": "60606",
"AdditionalData": [
{
"value": "United States",
"key": "CountryName"
},
{
"value": "Illinois",
"key": "StateName"
},
{
"value": "Cook",
"key": "CountyName"
},
{
"value": "N",
"key": "PostalCodeType"
}
]
},
"MapReference": {
"ReferenceId": "1190062166",
"MapId": "NAAM20117",
"MapVersion": "Q1/2020",
"MapReleaseDate": "2020-06-29",
"Spot": 0.59,
"SideOfStreet": "left",
"CountryId": "21000001",
"StateId": "21002247",
"CountyId": "21002623",
"CityId": "21002647",
"BuildingId": "9000000000002726912",
"AddressId": "79186499",
"RoadLinkId": "499349060"
},
"Shape": {
"_type": "WKTShapeType",
"Value": "MULTIPOLYGON (((-87.6339 41.88446, -87.6338 41.8813, -87.63239 41.88132, -87.63238 41.88067, -87.63378 41.88068, -87.63376 41.8794, -87.63377 41.87812, -87.6352 41.87811, -87.6352 41.87682, -87.63665 41.87678, -87.63663 41.87666, -87.63664 41.87658, -87.6367 41.87664, -87.63674 41.87678, -87.63706 41.87677, -87.6374 41.87807, -87.63756 41.87861, -87.63774 41.87936, -87.63794 41.88062, -87.63791 41.8819, -87.63779 41.88322, -87.63764 41.88449, -87.63727 41.88574, -87.63739 41.88602, -87.63603 41.88695, -87.63559 41.88717, -87.63248 41.8871, -87.63248 41.88703, -87.63374 41.88703, -87.63386 41.887, -87.63395 41.88702, -87.6339 41.88446)), ((-87.64102 41.87676, -87.64104 41.87804, -87.63955 41.87805, -87.63959 41.87933, -87.63966 41.88058, -87.63969 41.88187, -87.63976 41.88318, -87.6398 41.88446, -87.64022 41.88445, -87.64022 41.8846, -87.64025 41.88479, -87.64035 41.8851, -87.64047 41.88571, -87.63981 41.88572, -87.64062 41.88625, -87.64063 41.88639, -87.64064 41.88678, -87.63989 41.88679, -87.63993 41.88758, -87.6401 41.88769, -87.64035 41.88782, -87.64054 41.8879, -87.6407 41.88793, -87.64076 41.88828, -87.64085 41.88859, -87.63996 41.88847, -87.63999 41.88906, -87.63971 41.88905, -87.63961 41.88882, -87.63954 41.8887, -87.63918 41.88675, -87.63873 41.8864, -87.63841 41.88588, -87.6383 41.88573, -87.63812 41.88522, -87.63825 41.88449, -87.63845 41.88321, -87.63855 41.88231, -87.63858 41.88104, -87.63855 41.88061, -87.63836 41.87935, -87.63787 41.87794, -87.63778 41.87751, -87.63752 41.87751, -87.63752 41.87731, -87.63775 41.87728, -87.6377 41.87687, -87.63784 41.87684, -87.63778 41.87676, -87.64102 41.87676)))"
}
}
}
]
}
]
}
}
See also https://developer.here.com/blog/how-to-get-the-shape-of-an-area-using-the-here-geocoder-api
Below is my returned json from twitter
{
"created_at": "Sat, 11 Feb 2012 06:38:28 +0000",
"entities": {
"hashtags": [
{
"text": "Shubhdin",
"indices": [
9,
18
]
}
],
"urls": [],
"user_mentions": [
{
"screen_name": "SAMdLaw",
"name": "Sabyasachi Mohapatra",
"id": 104420398,
"id_str": "104420398",
"indices": [
0,
8
]
}
]
},
"from_user": "nilayshah80",
"from_user_id": 213599118,
"from_user_id_str": "213599118",
"from_user_name": "Nilay Shah",
"geo": {
"coordinates": [
18.6003,
73.825
],
"type": "Point"
},
"id": 168222351106899968,
"id_str": "168222351106899968",
"iso_language_code": "in",
"metadata": {
"result_type": "recent"
},
"profile_image_url": "http://a2.twimg.com/profile_images/1528184590/IMG_0465_normal.JPG",
"profile_image_url_https": "https://si0.twimg.com/profile_images/1528184590/IMG_0465_normal.JPG",
"source": "<a href="http://twabbit.wordpress.com/" rel="nofollow">twabbit</a>",
"text": "#SAMdLaw #Shubhdin mitra",
"to_user": "SAMdLaw",
"to_user_id": 104420398,
"to_user_id_str": "104420398",
"to_user_name": "Sabyasachi Mohapatra",
"in_reply_to_status_id": 168219865197461505,
"in_reply_to_status_id_str": "168219865197461505"
},
{
"created_at": "Sun, 12 Feb 2012 01:54:07 +0000",
"entities": {
"hashtags": [
{
"text": "IWIllAlwaysLoveYou",
"indices": [
88,
107
]
}
],
"urls": [],
"user_mentions": [],
"media": [
{
"id": 168513175187238912,
"id_str": "168513175187238912",
"indices": [
108,
128
],
"media_url": "http://p.twimg.com/Alat1wsCMAAh-wE.jpg",
"media_url_https": "https://p.twimg.com/Alat1wsCMAAh-wE.jpg",
"url": "http://shortener.twitter.com/dRc4dXH3",
"display_url": "pic.twitter.com/dRc4dXH3",
"expanded_url": "http://twitter.com/RIPWhitneyH/status/168513175183044608/photo/1",
"type": "photo",
"sizes": {
"orig": {
"w": 395,
"h": 594,
"resize": "fit"
},
"large": {
"w": 395,
"h": 594,
"resize": "fit"
},
"thumb": {
"w": 150,
"h": 150,
"resize": "crop"
},
"small": {
"w": 340,
"h": 511,
"resize": "fit"
},
"medium": {
"w": 395,
"h": 594,
"resize": "fit"
}
}
}
]
},
"from_user": "RIPWhitneyH",
"from_user_id": 19319043,
"from_user_id_str": "19319043",
"from_user_name": "RIP Whitney Houston",
"geo": null,
"id": 168513175183044608,
"id_str": "168513175183044608",
"iso_language_code": "en",
"metadata": {
"recent_retweets": 8,
"result_type": "popular"
},
"profile_image_url": "http://a2.twimg.com/profile_images/1820957590/images__13__normal.jpg",
"profile_image_url_https": "https://si0.twimg.com/profile_images/1820957590/images__13__normal.jpg",
"source": "<a href="http://twitter.com/">web</a>",
"text": "R-T if you think that the Grammy's should organize an \"R.I.P. Whitney Houston\" tribute. #IWIllAlwaysLoveYou http://shortener.twitter.com/dRc4dXH3",
"to_user": null,
"to_user_id": null,
"to_user_id_str": null,
"to_user_name": null
},
If you noticed Media under entities not available in above 2 and when i tried to call below snippet gives me null reference error
MediaUrl = (from user in tweet["entities"]["media"]
select new mediaUrl
{
shortUrl = (string)user["url"],
longUrl = (string)user["expanded_url"],
url = (string)user["media_url"],
start = user["indices"][0].ToString(),
end = user["indices"][1].ToString(),
mediaType = (string)user["type"],
}).ToList()
Same code work for Entities/URL, Hashtags and mentions but not for Media.
Also tried this -> Get JSON object node but still getting null reference exception.
In first tweet, entities object doesn't have media property, so when evaluating first tweet, your code would be equivalent to :
MediaUrl = (from user in (IEnumerable<JToken>)null
select new mediaUrl
{
shortUrl = (string)user["url"],
longUrl = (string)user["expanded_url"],
url = (string)user["media_url"],
start = user["indices"][0].ToString(),
end = user["indices"][1].ToString(),
mediaType = (string)user["type"],
}).ToList()
which will throw ArgumentNullException because that code does query on null reference collection.
Finally got working. Not appropriate solution but works for me.
I created separate method for parsing Media. Passed Entity as string and in that method i checked is EntityString.Contains Media or not. If yes, then parse media json else returned null. See below Snippet.
if (Entities != string.Empty)
{
if (Entities.Contains("\"media\":"))
{
JObject searchResult = JObject.Parse(Entities);
returnMedia = (from user in searchResult["media"]
select new mediaUrl
{
shortUrl = (string)user["url"],
longUrl = (string)user["expanded_url"],
url = (string)user["media_url"],
start = user["indices"][0].ToString(),
end = user["indices"][1].ToString(),
mediaType = (string)user["type"],
}).ToList();
}
}
This works for me. If you have any better solution then Please let me know.