One to many relationship of products sold with different time periods - pivot-table

I need to track the performance of one brand (store brand) versus all other brands. I believe this requires a one to many relationship.
My data set looks like this:
Date
Type
Product Number
Sales
01.2020
Store Brand
2345
500
01.2020
Brand
1111
400
01.2020
Brand
2222
425
01.2020
Brand
3333
450
02.2020
Brand
1111
300
02.2020
Brand
2222
325
02.2020
Brand
3333
350
03.2020
Store Brand
2346
600
But I need to transform it so that it will look like this:
Year
Month
Store Brand Product Number
Brand Product Number
Store Brand Sales
Brand Sales
2020
1
2345
1111
500
400
2020
1
2345
2222
500
425
2020
1
2345
3333
500
450
2020
2
2345
1111
0
300
2020
2
2345
2222
0
325
2020
2
2345
3333
0
350
2020
3
2346
1112
600
0
2020
3
2346
2223
600
0
2020
3
2346
3332
600
0
I have a mapping table that shows which brand products can be compared with a store brand product. Which looks like this:
Store Brand
Brand
2345
1111
2345
2222
2345
3333
2346
1112
2346
2223
2346
3332
For every time period in the data set all brand products need to be compared to the store brand. If in a certain period no store brand products were sold, then these need to be listed as 0, or as an empty cell. Vice versa, if only store brands were sold in a certain time period, then the comparable brand sales need to be listed as 0, or as an empty cell.
I am using Excel 365, so if possible, please only post answers that I can implement in Excel 365, because I cannot use Power BI Desktop at the moment.
Please let me know if anything is unclear or if I need to add additional information.
Thank you!

let
_mapping =
let
Source = Web.BrowserContents(
"https://stackoverflow.com/questions/68349163/one-to-many-relationship-of-products-sold-with-different-time-periods?noredirect=1#comment120797753_68349163"
),
#"Extracted Table From Html" = Html.Table(
Source,
{
{"Column1", "DIV.s-table-container:nth-child(6) > TABLE.s-table > * > TR > :nth-child(1)"},
{"Column2", "DIV.s-table-container:nth-child(6) > TABLE.s-table > * > TR > :nth-child(2)"}
},
[RowSelector = "DIV.s-table-container:nth-child(6) > TABLE.s-table > * > TR"]
),
#"Promoted Headers" = Table.PromoteHeaders(
#"Extracted Table From Html",
[PromoteAllScalars = true]
),
#"Changed Type" = Table.TransformColumnTypes(
#"Promoted Headers",
{{"Store Brand", Int64.Type}, {"Brand", Int64.Type}}
)
in
#"Changed Type",
_fact =
let
Source = Web.BrowserContents(
"https://stackoverflow.com/questions/68349163/one-to-many-relationship-of-products-sold-with-different-time-periods?noredirect=1#comment120797753_68349163"
),
#"Extracted Table From Html" = Html.Table(
Source,
{
{"Column1", "DIV.s-table-container:nth-child(2) > TABLE.s-table > * > TR > :nth-child(1)"},
{"Column2", "DIV.s-table-container:nth-child(2) > TABLE.s-table > * > TR > :nth-child(2)"},
{"Column3", "DIV.s-table-container:nth-child(2) > TABLE.s-table > * > TR > :nth-child(3)"},
{"Column4", "DIV.s-table-container:nth-child(2) > TABLE.s-table > * > TR > :nth-child(4)"}
},
[RowSelector = "DIV.s-table-container:nth-child(2) > TABLE.s-table > * > TR"]
),
#"Promoted Headers" = Table.PromoteHeaders(
#"Extracted Table From Html",
[PromoteAllScalars = true]
),
#"Changed Type" = Table.TransformColumnTypes(
#"Promoted Headers",
{
{"Date", type text},
{"Type", type text},
{"Product Number", Int64.Type},
{"Sales", Int64.Type}
}
)
in
#"Changed Type",
#"Added Index" = Table.AddIndexColumn(_fact, "Index", 1, 1, Int64.Type),
#"Split Column by Delimiter" = Table.SplitColumn(
#"Added Index",
"Date",
Splitter.SplitTextByDelimiter(".", QuoteStyle.Csv),
{"month", "year"}
),
#"Changed Type" = Table.TransformColumnTypes(
#"Split Column by Delimiter",
{{"month", Int64.Type}, {"year", Int64.Type}}
),
#"Reordered Columns" = Table.ReorderColumns(
#"Changed Type",
{"Index", "month", "year", "Type", "Product Number", "Sales"}
),
#"Grouped Rows" = Table.Group(
#"Reordered Columns",
{"year", "month"},
{
{
"ad",
each _,
type table [
Index = number,
month = nullable number,
year = nullable number,
Type = nullable text,
Product Number = nullable number,
Sales = nullable number
]
}
}
),
#"Added Custom" = Table.AddColumn(
#"Grouped Rows",
"Custom",
each
let
x = [ad],
#"Sorted Rows" = Table.Sort(x, {{"Type", Order.Descending}}),
#"Added Custom" = Table.AddColumn(
#"Sorted Rows",
"Brand Sales",
each if [Type] <> "Store Brand" then [Sales] else null
),
#"Added Custom1" = Table.AddColumn(
#"Added Custom",
"Store Brand Product Number",
each if [Type] = "Store Brand" then [Product Number] else null
),
#"Filled Down" = Table.FillDown(#"Added Custom1", {"Store Brand Product Number"}),
#"Added Custom2" = Table.AddColumn(
#"Filled Down",
"Store Brand Sales",
each if [Type] = "Store Brand" then [Sales] else null
),
#"Filled Down1" = Table.FillDown(#"Added Custom2", {"Store Brand Sales"}),
#"Filtered Rows" = Table.SelectRows(#"Filled Down1", each ([Type] = "Brand")),
#"Reordered Columns1" = Table.ReorderColumns(
#"Filtered Rows",
{
"Index",
"year",
"month",
"Type",
"Product Number",
"Sales",
"Brand Sales",
"Store Brand Product Number",
"Store Brand Sales"
}
),
#"Renamed Columns" = Table.RenameColumns(
#"Reordered Columns1",
{{"Product Number", "Brand Product Number"}}
),
#"Removed Columns" = Table.RemoveColumns(#"Renamed Columns", {"Type", "Sales"}),
#"Reordered Columns2" = Table.ReorderColumns(
#"Removed Columns",
{
"Index",
"year",
"month",
"Store Brand Product Number",
"Brand Product Number",
"Store Brand Sales",
"Brand Sales"
}
)
in
#"Reordered Columns2"
),
#"Added Custom1" = Table.AddColumn(
#"Added Custom",
"Custom.1",
each
if Table.IsEmpty([Custom]) = true then
let
x = [ad],
#"Merged Queries" = Table.NestedJoin(
x,
{"Product Number"},
_mapping,
{"Store Brand"},
"_mapping",
JoinKind.LeftOuter
),
#"Expanded _mapping" = Table.ExpandTableColumn(
#"Merged Queries",
"_mapping",
{"Brand"},
{"Brand"}
),
#"Removed Columns1" = Table.RemoveColumns(#"Expanded _mapping", {"Type"}),
#"Reordered Columns3" = Table.ReorderColumns(
#"Removed Columns1",
{"Index", "year", "month", "Product Number", "Sales", "Brand"}
),
#"Renamed Columns1" = Table.RenameColumns(
#"Reordered Columns3",
{
{"Product Number", "Store Brand Product Number"},
{"Sales", "Store Brand Sales"},
{"Brand", "Brand Product Number"}
}
),
#"Added Custom3" = Table.AddColumn(#"Renamed Columns1", "Brand Sales", each 0),
#"Reordered Columns4" = Table.ReorderColumns(
#"Added Custom3",
{
"Index",
"year",
"month",
"Store Brand Product Number",
"Brand Product Number",
"Store Brand Sales",
"Brand Sales"
}
)
in
#"Reordered Columns4"
else
[Custom]
),
#"Removed Other Columns" = Table.SelectColumns(#"Added Custom1", {"Custom.1"}),
#"Expanded Custom.1" = Table.ExpandTableColumn(
#"Removed Other Columns",
"Custom.1",
{
"Index",
"year",
"month",
"Store Brand Product Number",
"Brand Product Number",
"Store Brand Sales",
"Brand Sales"
},
{
"Index",
"year",
"month",
"Store Brand Product Number",
"Brand Product Number",
"Store Brand Sales",
"Brand Sales"
}
),
#"Sorted Rows" = Table.Sort(#"Expanded Custom.1", {{"Index", Order.Ascending}}),
#"Filtered Rows" = Table.SelectRows(#"Sorted Rows", each ([Index] <> null)),
#"Filled Down" = Table.FillDown(#"Filtered Rows", {"Store Brand Product Number"}),
#"Replaced Value" = Table.ReplaceValue(
#"Filled Down",
null,
0,
Replacer.ReplaceValue,
{"Store Brand Sales"}
)
in
#"Replaced Value"

Related

Snowflake : Json data flattern

I have below sql to fetch the data from JSON file, but my file contains array of data with multiple values.
SELECT
select
DISTINCT
,REPLACE(DOCUMENT:"_id"::VARCHAR(50),'guests-','') GUEST_ID
,PARSE_JSON(DOCUMENT):"_rev"::string as GUEST_REVISION_ID
,PARSE_JSON(DOCUMENT):personal_info:addresses:address_id::varchar(255) as ADDRESS_ID
,PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"address_type"::varchar(255) as ADDRESS_CODE
,UPPER(regexp_replace(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"address_line1"::VARCHAR(255),'[\n\r]','')) as ADDRESS_LINE_1
,UPPER(regexp_replace(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"address_line2"::VARCHAR(255),'[\n\r]','')) as ADDRESS_LINE_2
,UPPER(regexp_replace(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"city"::VARCHAR(255),'[\n\r]','')) as CITY_NAME
,UPPER(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"state"::varchar(255)) as STATE_CODE
,UPPER(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"country"::varchar(255)) as COUNTRY
,PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"postal_code"::varchar(255) as POSTAL_CODE
,UPPER(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"country_code"::varchar(255)) as COUNTRY_CODE
,UPPER(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"first_name"::varchar(255)) as ADDRESS_FIRST_NAME
,UPPER(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"last_name"::varchar(255)) as ADDRESS_LAST_NAME
,PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"phone_number"::varchar(255) as PHONE_NUMBER
,CASE
WHEN LOWER(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"primary") = 'true' THEN 1
WHEN LOWER(PARSE_JSON(DOCUMENT):"personal_info":"addresses[]":"primary") = 'false' THEN 0
ELSE NULL END as FLAG
from test
Sample Data :
{
"_id":"guests-240c8ef1-65f0-11e9-8e7e-8568b9f986fb",
"personal_info": {
"addresses": [
{
"address_id":"555148381793213101",
"address_line1":"509 BROADLEAF LANE",
"address_type":"generic",
"city":"MCKINNEY",
"country":"United States",
"country_code":"US",
"postal_code":"75070",
"primary": true,
"state":"TX"
},
{
"address_id":"856855604204997103",
"address_line1":"11 Blossom Dr",
"address_line2":"Basking Ridge",
"address_type":"billing",
"city":"Basking Ridge",
"country":"United States",
"country_code":"US",
"email_address":"deb_ron.fischang#att.net",
"first_name":"Deborah",
"last_name":"Fischang",
"phone_number":"9086723249",
"postal_code":"07920",
"primary": false,
"state":"NJ"
},
{
"address_id":"856855604204997103",
"address_line1":"11 Blossom Dr",
"address_line2":"Basking Ridge",
"address_type":"generic",
"city":"Basking Ridge",
"country":"United States",
"country_code":"US",
"email_address":"deb_ron.fischang#att.net",
"first_name":"Deborah",
"last_name":"Fischang",
"phone_number":"9086723249",
"postal_code":"07920",
"primary": false,
"state":"NJ"
}
]
How to get this data in mutiple rows basis on number of address in array. I tried lateral flattern but its not working........
How to get this data in mutiple rows basis on number of address in array. I tried lateral flattern but its not working........
You may use something like this:
SELECT
DISTINCT
REPLACE(DOCUMENT:"_id"::VARCHAR(50),'guests-','') GUEST_ID
,VALUE:address_id::varchar(255) as ADDRESS_ID
,VALUE:"address_type"::varchar(255) as ADDRESS_CODE
,regexp_replace(VALUE:"address_line1"::VARCHAR(255),'[\n\r]','') as ADDRESS_LINE_1
,regexp_replace(VALUE:"address_line2"::VARCHAR(255),'[\n\r]','') as ADDRESS_LINE_2
,regexp_replace(VALUE:"city"::VARCHAR(255),'[\n\r]','') as CITY_NAME
from test, lateral flatten( input => PARSE_JSON(DOCUMENT):"personal_info":"addresses" ) f;

Updating JSON in postgres based on dynamic input of type json array

I have a column in postgres table which is of JSON type and looks something like
{
"Name": "Some Name",
"Stages": [
{
"Title": "Early Flight",
"Tags": [....],
"Date": "2021-11-05T00:00:00",
"CloseDate": ""
},
{
"Title": "Midway Flight",
"Tags": [....],
"Date": "2021-11-05T00:00:00",
"CloseDate": ""
},
{
"Title": "Pro Flight",
"Tags": [....],
"Date": "2021-11-05T00:00:00",
"CloseDate": ""
},
{
"Title": "Expert Start",
"Tags": [....],
"Date": "2021-11-05T00:00:00",
"CloseDate": ""
}
]
}
I want to update the Date for the number of items that are provide in the newInputItem,
meaning the Date for Midway Flight and Expert Flight needs to change.
I tried using CTE as below but the query updates only the first element of the input array in this case its just Midway Flight that gets updated.
WITH newInputItem as
(
select
arr.newInputItem ::json ->> 'Title' as State,
(arr.newInputItem ::json ->> 'NewDate')::timestamp as NewDate
from
json_array_elements('[
{"Title" : "Midway Flight", "Date" : "01 / 01 / 1777"},
{"Title" : "Expert Flight", "Date" : "01 / 01 / 1999"}
]') WITH ORDINALITY arr(newInputItem, index)
),
oldItem AS
(
SELECT
('{Stages,' || index - 1 || ',"Date"}')::TEXT[] AS path,
user_id,
arr.oldItem ::json ->> 'Title' AS title
FROM
department.Process_Instance
jsonb_array_elements(process_instance_data -> 'Stages') WITH ORDINALITY arr(oldItem, index)
WHERE
department.Process_Instance."user_id" = 17
)
UPDATE
department.Process_Instance pi
SET
process_instance_data = jsonb_set(process_instance_data, oldItem.path, to_json(newInputItem.NewDate)::JSONB)
FROM
oldItem,
newInputItem
WHERE
pi.user_id = oldItem.user_id
AND oldItem.title = newInputItem.State;
In order to make several updates into the same jsonb data within the same query, you need to create an aggregate function based on the standard jsonb_set function :
CREATE OR REPLACE FUNCTION jsonb_set (x jsonb, y jsonb, p text[], z jsonb, b boolean)
RETURNS jsonb LANGUAGE sql IMMUTABLE AS
$$ SELECT jsonb_set (COALESCE(x, y), p, z, b) ; $$ ;
CREATE AGGREGATE jsonb_set_agg(jsonb, text[], jsonb, boolean)
( sfunc = jsonb_set, stype = jsonb) ;
Then, as you can't call an aggregate function directly in the SET clause of an UPDATE statement, you have to insert an additional cte before your UPDATE statement :
WITH newInputItem as
(
select
arr.newInputItem ::json ->> 'Title' as State,
(arr.newInputItem ::json ->> 'NewDate')::timestamp as NewDate
from
json_array_elements('[
{"Title" : "Midway Flight", "Date" : "01 / 01 / 1777"},
{"Title" : "Expert Flight", "Date" : "01 / 01 / 1999"}
]') WITH ORDINALITY arr(newInputItem, index)
), oldItem AS
(
SELECT
('{Stages,' || index - 1 || ',"Date"}')::TEXT[] AS path,
user_id,
arr.oldItem ::json ->> 'Title' AS title
FROM
department.Process_Instance
jsonb_array_elements(process_instance_data -> 'Stages') WITH ORDINALITY arr(oldItem, index)
WHERE
department.Process_Instance."user_id" = 17
), final AS
(
SELECT oldItem.user_id
, jsonb_set_agg( process_instance_data, oldItem.path,
to_json(newInputItem.NewDate)::JSONB, True) AS data_final
FROM oldItem
INNER JOIN newInputItem
ON oldItem.title = newInputItem.State
GROUP BY oldItem.user_id
)
UPDATE
department.Process_Instance pi
SET
process_instance_data = final.data_final
FROM
final
WHERE
pi.user_id = final.user_id ;

Snowflake: JSON Data in Array

JSON data as below
{name : Mike, job : [{name: abc, value: 123},{name: def,value: 456}]}
How to retrieve the value of name = abc and def?
EDIT:(SOLUTION) Got the solution myself thanks
WITH x AS (
SELECT parse_json('{"name" : "Mike", "job" : [{"name": "abc", "value": "123"},{"name": "def","value": "456"}]}' ) as payload_json)
select x.payload_json:name,
job.value:name::varchar as name,
job.value:value::varchar as value
from x,
lateral flatten( input => x.payload_json:job, outer => true) as job;
I got the answer myself as below
WITH x AS (
SELECT parse_json('{"name" : "Mike", "job" : [{"name": "abc", "value": "123"},{"name": "def","value": "456"}]}' ) as payload_json)
select x.payload_json:name,
job.value:name::varchar as name,
job.value:value::varchar as value
from x,
lateral flatten( input => x.payload_json:job, outer => true) as job;

data set with 1 array & 2 nested objects yield <0 rows> (or 0-length row.names) error with tidyjson

I'm working with an AOL data set that I have passed through prettify(). The types and lengths of the data are:
> json_types(People)
document.id type
1 , 1 array
> json_lengths(People)
document.id length
1 , 1, 4
A glimpse of the data after it has gone through prettify():
{
"distinct_id": "159d26d852bc2-0218a9eedf5d02-1d326f50-13c680-159d26d852c2cc",
"time": 1485294450309,
"properties": {
"$browser": "Chrome",
"$browser_version": 55,
"$city": "San Francisco",
"$country_code": "US",
"$email": "amir.movafaghi#mixpanel.com",
"$initial_referrer": "$direct",
"$initial_referring_domain": "$direct",
"$name": "Amir MOvafaghi",
"$os": "Mac OS X",
"$region": "California",
"$timezone": "America/Los_Angeles",
"$transactions": [
{
"$amount": 0.99,
"$time": "2017-01-24T13:43:30.000Z"
}
],
"Favorite Genre": "Rock",
"Lifetime Song Play Count": 1,
"Lifetime Song Purchase Count": 1,
"Plan": "Premium"
},
"last_seen": 1485294450309,
"labels": [
]
},
I set up my transformation as such:
people_b <- People %>%
gather_array %>% # stack the user data
spread_values(
distinct_id = jstring("distinct_id"),
time_id = jnumber("time"),
last_seen = jstring("last_seen"),
label = jstring("label")) %>% # extract user data
enter_object("properties") %>% # stack the properties
spread_values(
browser = jstring("$browser"),
browser_version = jnumber("$browser_version"),
city = jstring("$city"),
country_code = jstring("$country_code"),
email = jstring("$email"),
initial_referrer = jstring("$initial_referrer"),
initial_referring_domain = jstring("$initial_referring_domain"),
name = jstring("$name"),
operating_system = jstring("$os"),
region = jstring("$region"),
timezone = jstring("$timezone"),
favorite_genre = jstring("Favorite Genre"),
first_login_date = jstring("First Login Date"),
lifetime_song_play_count = jnumber("Lifetime Song Play Count"),
lifetime_song_purchase_count = jnumber("Lifetime Song Purchase Count"),
plan = jstring("Plan")) %>% #extract the properties)
enter_object("transactions") %>% #stack the transactions
gather_array %>%
spread_values(
amount = jnumber("$amount"),
transaction_time = jstring("$time")) %>% # extract the transactions
select(distinct_id, time_id, last_seen, label, browser, browser_version, city, country_code, email, initial_referrer,
initial_referring_domain, name, operating_system, region, timezone, favorite_genre,
first_login_date,lifetime_song_play_count, lifetime_song_purchase_count, plan, amount, transaction_time)
However I receive an error code:
> people_b
[1] distinct_id time_id last_seen label
[5] browser browser_version city country_code
[9] email initial_referrer initial_referring_domain name
[13] operating_system region timezone favorite_genre
[17] first_login_date lifetime_song_play_count lifetime_song_purchase_count plan
[21] amount transaction_time
<0 rows> (or 0-length row.names)
sample output from a second data set (that I still need to tidy):
> event_b
name distinct_id label time sampling_factor browser_type
1 Page Loaded 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
2 Page Loaded 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
3 Sign Up 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
4 Page Loaded 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
5 Song Played 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
6 Song Played 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
7 Song Purchased 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
8 Plan Downgraded 159f0ddf9c437c-0b4d95a6f3b9be-123a6850-13c680-159f0ddf9c525a list() 1.485776e+12 1 Chrome
It looks to me like your issue is in the enter_object('transactions') component of your pipeline. In your JSON object, you have the key $transactions, so you are using the wrong path. Changing to '$transactions' seemed to work.
...
enter_object("$transactions") %>% #stack the transactions
...
And the full example. Note that I removed gather_array since your example is only a single object.
json <- '{
"distinct_id": "159d26d852bc2-0218a9eedf5d02-1d326f50-13c680-159d26d852c2cc",
"time": 1485294450309,
"properties": {
"$browser": "Chrome",
"$browser_version": 55,
"$city": "San Francisco",
"$country_code": "US",
"$email": "amir.movafaghi#mixpanel.com",
"$initial_referrer": "$direct",
"$initial_referring_domain": "$direct",
"$name": "Amir MOvafaghi",
"$os": "Mac OS X",
"$region": "California",
"$timezone": "America/Los_Angeles",
"$transactions": [
{
"$amount": 0.99,
"$time": "2017-01-24T13:43:30.000Z"
}
],
"Favorite Genre": "Rock",
"Lifetime Song Play Count": 1,
"Lifetime Song Purchase Count": 1,
"Plan": "Premium"
},
"last_seen": 1485294450309,
"labels": [
]
}'
people_b <- json %>%
spread_values(
distinct_id = jstring("distinct_id"),
time_id = jnumber("time"),
last_seen = jstring("last_seen"),
label = jstring("label")) %>% # extract user data
enter_object("properties") %>% # stack the properties
spread_values(
browser = jstring("$browser"),
browser_version = jnumber("$browser_version"),
city = jstring("$city"),
country_code = jstring("$country_code"),
email = jstring("$email"),
initial_referrer = jstring("$initial_referrer"),
initial_referring_domain = jstring("$initial_referring_domain"),
name = jstring("$name"),
operating_system = jstring("$os"),
region = jstring("$region"),
timezone = jstring("$timezone"),
favorite_genre = jstring("Favorite Genre"),
first_login_date = jstring("First Login Date"),
lifetime_song_play_count = jnumber("Lifetime Song Play Count"),
lifetime_song_purchase_count = jnumber("Lifetime Song Purchase Count"),
plan = jstring("Plan")) %>% #extract the properties)
enter_object("$transactions") %>% #<<<--- EDITED HERE
gather_array %>%
spread_values(
amount = jnumber("$amount"),
transaction_time = jstring("$time")) %>% # extract the transactions
select(distinct_id, time_id, last_seen, label, browser, browser_version, city, country_code, email, initial_referrer,
initial_referring_domain, name, operating_system, region, timezone, favorite_genre,
first_login_date,lifetime_song_play_count, lifetime_song_purchase_count, plan, amount, transaction_time)
nrow(people_b)
## [1] 1

Condition for while loop not working, unable to populate tableview cells

I'm a beginner to ios and am building my first app...so go easy on me :-)
I am trying to display the results from a web service in tableview cells. However my while loop condition is failing. I moved the nslog into the while loop and it clearly prints the objects in the array as you would expect. However there is something wrong when it ends I believe.
Does anyone have any ideas as to why it is failing?
Basically I'm trying to get 'Recipe name' and 'smallImageUrls; from the web service results and print that in a tableview cell.
I know my tableview cell wont print properly as is. I'll address that in a bit. Probably extract the urls into one array and then use that array to in the tableviewcell method.
Any assistance anyone can provide is greatly appreciated. I just find it strange as it is clearly going through the while loop without any issues. :-S
No real error message just:
self SearchedYummlyViewController * 0x12b39570
matchesCount int 40
in the left part of the pane at the bottom.
- (void)connectionDidFinishLoading:(NSURLConnection *)connection
{
self.searchYummlyRecipeResults = [NSJSONSerialization JSONObjectWithData:self.yummlyRecipesNSData options:nil error:nil];
self.tempYummlyResultsMatches = [[NSMutableArray alloc]init];
int matchesCount = 0;
while([self.searchYummlyRecipeResults[#"matches"]objectAtIndex:matchesCount]){
[self.tempYummlyRecipeMatch addObject:[[self.searchYummlyRecipeResults[#"matches"]objectAtIndex:matchesCount]objectForKey:#"recipeName"]];
[self.tempYummlyRecipeMatch addObject:[[self.searchYummlyRecipeResults[#"matches"]objectAtIndex:matchesCount]objectForKey:#"smallImageUrls"]];
NSLog(#"tempYummlyRecipeMatch array's contents: %#", self.tempYummlyRecipeMatch);
matchesCount++;
}
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath
{
static NSString *CellIdentifier = #"Cell";
UITableViewCell *cell = [tableView dequeueReusableCellWithIdentifier:CellIdentifier];
// Configure the cell...
NSString *recipeString = self.tempYummlyRecipeMatch[indexPath.row];
cell.textLabel.text = recipeString;
//The below is code on how to add a image from a web service to a cell. I dont think that will work but my code cant make it down there yet so I'll tackle that after while loop issue.
NSURL *url = [NSURL URLWithString:self.tempYummlyRecipeMatch[indexPath.row+1];
UIImage *image = [UIImage imageWithData: [NSData dataWithContentsOfURL:url]];
cell.imageView.image = image;
return cell;
}
Example of the webservice output below:
Recipe List: {
attribution = {
html = "<a href='http://www.yummly.com/recipes/onion-soup'>onion soup recipes</a> search powered by <img src='http://static.yummly.com/api-logo.png'/>";
logo = "http://static.yummly.com/api-logo.png";
text = "onion soup recipes: search powered by Yummly";
url = "http://www.yummly.com/recipes/onion-soup";
};
criteria = {
allowedIngredients = (
);
excludedIngredients = (
);
facetFields = (
);
maxResults = 6;
requirePictures = 0;
resultsToSkip = 0;
terms = (
onion,
soup
);
};
facetCounts = {
};
matches = (
{
attributes = {
};
flavors = {
bitter = "0.1666666666666667";
meaty = "0.3333333333333333";
piquant = 0;
salty = "0.5";
sour = "0.3333333333333333";
sweet = "0.5";
};
id = "French-onion-soup-sandwiches-309090";
ingredients = (
"olive oil",
"sea salt",
"fresh thyme leaves",
"granulated sugar",
"yellow onions",
"unsalted butter",
"beef broth",
"cracked black pepper",
"gruyere cheese",
bread
);
rating = 0;
recipeName = "French Onion Soup Sandwiches";
smallImageUrls = (
"http://i.yummly.com/French-onion-soup-sandwiches-309090-273265.s.jpg"
);
sourceDisplayName = "Joy the Baker";
totalTimeInSeconds = 0;
},
{
attributes = {
course = (
"Main Dishes"
);
};
flavors = {
bitter = 1;
meaty = "0.1666666666666667";
piquant = 0;
salty = 1;
sour = "0.1666666666666667";
sweet = "0.3333333333333333";
};
id = "Awesome-Slow-Cooker-Pot-Roast-Allrecipes";
ingredients = (
"condensed cream of mushroom soup",
"onion soup mix",
"pot roast",
water
);
rating = "4.69";
recipeName = "Awesome Slow Cooker Pot Roast";
smallImageUrls = (
"http://i2.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-2.s.png",
"http://i.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-58919.s.png"
);
sourceDisplayName = AllRecipes;
totalTimeInSeconds = 29400;
},
{
attributes = {
course = (
Soups,
Appetizers
);
cuisine = (
French
);
holiday = (
Thanksgiving
);
};
flavors = {
bitter = "0.1666666666666667";
meaty = "0.1666666666666667";
piquant = 0;
salty = "0.1666666666666667";
sour = "0.1666666666666667";
sweet = "0.1666666666666667";
};
id = "French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364";
ingredients = (
"stick butter",
"french bread",
"yellow onion",
"low sodium chicken broth",
"gruyere cheese",
"minced garlic",
"dry white wine",
"worcestershire sauce",
"beef broth"
);
rating = 0;
recipeName = "French Onion Soup";
smallImageUrls = (
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1512.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1199.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-220.s.jpg"
);
sourceDisplayName = "The Pioneer Woman";
totalTimeInSeconds = 0;
},
{
attributes = {
course = (
Soups
);
};
flavors = "<null>";
id = "Curried-sweet-potato-soup-333086";
ingredients = (
broth,
salt,
"sweet potatoes",
"lite coconut milk",
onion,
"coconut oil",
"curry powder",
garlic,
spinach,
lime,
"chopped cilantro",
"red pepper flakes",
"brown sugar",
peas
);
rating = 0;
recipeName = "Curried Sweet Potato Soup";
smallImageUrls = (
"http://i.yummly.com/Curried-sweet-potato-soup-333086-295006.s.jpg"
);
sourceDisplayName = "Camille Styles";
totalTimeInSeconds = 0;
},
{
attributes = {
};
flavors = "<null>";
id = "French-onion-soup-grilled-cheese-308496";
ingredients = (
"unsalted butter",
"kosher salt",
"sweet onions",
"unsalted beef stock",
pepper,
"gruyere cheese",
"dry sherry",
"italian bread",
"fresh thyme"
);
rating = 0;
recipeName = "French Onion Soup Grilled Cheese";
smallImageUrls = (
"http://i.yummly.com/French-onion-soup-grilled-cheese-308496-272505.s.jpg"
);
sourceDisplayName = "The Kitchn";
totalTimeInSeconds = 0;
},
{
attributes = {
};
flavors = {
bitter = 1;
meaty = 1;
piquant = 0;
salty = 1;
sour = 1;
sweet = "0.8333333333333334";
};
id = "Vidalia-onion-soup-with-wild-rice-and-blue-cheese-305366";
ingredients = (
"vidalia onions",
"chicken stock",
herb,
pepper,
"wild rice",
"unsalted butter",
baguette,
salt,
"extra virgin olive oil",
other
);
rating = 0;
recipeName = "Vidalia Onion Soup with Wild Rice and Blue Cheese";
smallImageUrls = (
"http://i.yummly.com/Vidalia-onion-soup-with-wild-rice-and-blue-cheese-305366-268824.s.jpg"
);
sourceDisplayName = "Smitten Kitchen";
totalTimeInSeconds = 0;
},
Example of tempYummlyRecipeResults nslog
tempYummlyRecipeMatch array's contents: (
"French Onion Soup Sandwiches",
(
"http://i.yummly.com/French-onion-soup-sandwiches-309090-273265.s.jpg"
)
)
2013-02-27 22:13:34.334 CustomTableView[435:11303] tempYummlyRecipeMatch array's contents: (
"French Onion Soup Sandwiches",
(
"http://i.yummly.com/French-onion-soup-sandwiches-309090-273265.s.jpg"
),
"Awesome Slow Cooker Pot Roast",
(
"http://i2.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-2.s.png",
"http://i.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-58919.s.png"
)
)
2013-02-27 22:13:34.335 CustomTableView[435:11303] tempYummlyRecipeMatch array's contents: (
"French Onion Soup Sandwiches",
(
"http://i.yummly.com/French-onion-soup-sandwiches-309090-273265.s.jpg"
),
"Awesome Slow Cooker Pot Roast",
(
"http://i2.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-2.s.png",
"http://i.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-58919.s.png"
),
"French Onion Soup",
(
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1512.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1199.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-220.s.jpg"
)
)
2013-02-27 22:13:34.335 CustomTableView[435:11303] tempYummlyRecipeMatch array's contents: (
"French Onion Soup Sandwiches",
(
"http://i.yummly.com/French-onion-soup-sandwiches-309090-273265.s.jpg"
),
"Awesome Slow Cooker Pot Roast",
(
"http://i2.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-2.s.png",
"http://i.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-58919.s.png"
),
"French Onion Soup",
(
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1512.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1199.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-220.s.jpg"
),
"Curried Sweet Potato Soup",
(
"http://i.yummly.com/Curried-sweet-potato-soup-333086-295006.s.jpg"
)
)
2013-02-27 22:13:34.335 CustomTableView[435:11303] tempYummlyRecipeMatch array's contents: (
"French Onion Soup Sandwiches",
(
"http://i.yummly.com/French-onion-soup-sandwiches-309090-273265.s.jpg"
),
"Awesome Slow Cooker Pot Roast",
(
"http://i2.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-2.s.png",
"http://i.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-58919.s.png"
),
"French Onion Soup",
(
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1512.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1199.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-220.s.jpg"
),
"Curried Sweet Potato Soup",
(
"http://i.yummly.com/Curried-sweet-potato-soup-333086-295006.s.jpg"
),
"French Onion Soup Grilled Cheese",
(
"http://i.yummly.com/French-onion-soup-grilled-cheese-308496-272505.s.jpg"
)
)
2013-02-27 22:13:34.335 CustomTableView[435:11303] tempYummlyRecipeMatch array's contents: (
"French Onion Soup Sandwiches",
(
"http://i.yummly.com/French-onion-soup-sandwiches-309090-273265.s.jpg"
),
"Awesome Slow Cooker Pot Roast",
(
"http://i2.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-2.s.png",
"http://i.yummly.com/Awesome-Slow-Cooker-Pot-Roast-Allrecipes-58919.s.png"
),
"French Onion Soup",
(
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1512.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-1199.s.jpg",
"http://i.yummly.com/French-Onion-Soup-The-Pioneer-Woman-Cooks-_-Ree-Drummond-41364-220.s.jpg"
),
"Curried Sweet Potato Soup",
(
"http://i.yummly.com/Curried-sweet-potato-soup-333086-295006.s.jpg"
),
"French Onion Soup Grilled Cheese",
(
"http://i.yu
At the end of your connectionDidFinishLoading, you should reload your table view.
[self.tableView reloadData];
Just updating your array does not instantly update your table view.
EDIT: You are right, your while loop condition is incorrect. Currently, it only checks if the object at index matchesCount is not nil (somewhat similar to while(object) or while(object != nil)). I think what you are looking for is if the number of objects in the array is greater than matchesCount so as to prevent an array index out of bounds exception. To do so, you need to use while([self.searchYummlyRecipeResults[#"matches"] count] > matchesCount).

Resources