I am trying to merge 2 JSON inputs (this example is from a file, but it will be from a Google Pub Sub input later) from these:
orderID.json:
{"orderID":"test1","orderPacked":"Yes","orderSubmitted":"Yes","orderVerified":"Yes","stage":1}
combined.json:
{"barcode":"95590","name":"Ash","quantity":6,"orderID":"test1"}
{"barcode":"95591","name":"Beat","quantity":6,"orderID":"test1"}
{"barcode":"95592","name":"Cat","quantity":6,"orderID":"test1"}
{"barcode":"95593","name":"Dog","quantity":6,"orderID":"test2"}
{"barcode":"95594","name":"Scar","quantity":6,"orderID":"test2"}
To something like this (using orderID as the unique and primary key):
output.json:
{"orderID":"test1","orderPacked":"Yes","orderSubmitted":"Yes","orderVerified":"Yes","stage":1,"barcode":"95590","name":"Ash","quantity":6}
{"orderID":"test1","orderPacked":"Yes","orderSubmitted":"Yes","orderVerified":"Yes","stage":1,"barcode":"95591","name":"Beat","quantity":6}
{"orderID":"test1","orderPacked":"Yes","orderSubmitted":"Yes","orderVerified":"Yes","stage":1,"barcode":"95592","name":"Cat","quantity":6}
I have my codes like this now which was adapted from join two json in Google Cloud Platform with dataflow
from __future__ import absolute_import
import argparse
import apache_beam as beam
import json
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from google.api_core import datetime_helpers
from google.api_core.exceptions import InternalServerError
from google.api_core.exceptions import ServiceUnavailable
from google.api_core.exceptions import TooManyRequests
from google.cloud import bigquery
def run(argv=None):
"""Build and run the pipeline."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--topic',
type=str,
help='Pub/Sub topic to read from')
parser.add_argument(
'--topic2',
type=str,
help='Pub/Sub topic to match with'
)
parser.add_argument(
'--output',
help=('Output local filename'))
args, pipeline_args = parser.parse_known_args(argv)
options = PipelineOptions(pipeline_args)
options.view_as(SetupOptions).save_main_session = True
options.view_as(StandardOptions).streaming = True
p = beam.Pipeline(options=options)
orderID = (p | 'read from text1' >> beam.io.ReadFromText('orderID.json')
#'Read from orderID PubSub' >> beam.io.ReadFromPubSub(topic=args.topic2)
| 'Parse JSON to Dict' >> beam.Map(lambda e: json.loads(e))
| 'key_orderID' >> beam.Map(lambda orders: (orders['orderID'], orders))
)
orders_si = beam.pvalue.AsDict(orderID)
orderDetails = (p | 'read from text' >> beam.io.ReadFromText('combined.json')
| 'Parse JSON to Dict1' >> beam.Map(lambda e: json.loads(e)))
#'Read from PubSub' >> beam.io.ReadFromPubSub(topic=args.topic))
def join_orderID_orderDetails(order, order_dict):
return order.update(order_dict[order['orderID']])
joined_dicts = orderDetails | beam.Map(join_orderID_orderDetails, order_dict=orders_si)
joined_dicts | beam.io.WriteToText('beam.output')
p.run()
#result.wait_until_finish()
if __name__ == '__main__':
run()
But my output now in beam.output just shows:
None
None
None
Can someone point out to me what I am doing wrong about this ?
The question that is different from the reported duplicate post is:
- Why are my results "None"?
- What am I doing wrong here?
I suspect these are the issues:
- "order" variable - is that correctly referenced in "join_orderID_orderDetails"
- List item "join_orderID_orderDetails" in "join_dicts? - is that correctly referneced too?