Saving data to DynamoDB using Apache Spark

I have an application where 1. I am reading JSON files from S3 using SqlContext.read.json in Dataframe 2. Then do some conversions on DataFrame 3. Finally, I want to save the DynamoDB records using one of the record values ​​as a key and other JSON parameters as values ​​/ columns.

I am trying something like:

JobConf jobConf = new JobConf(sc.hadoopConfiguration());
jobConf.set("dynamodb.servicename", "dynamodb");
jobConf.set("dynamodb.input.tableName", "my-dynamo-table");   // Pointing to DynamoDB table
jobConf.set("dynamodb.endpoint", "dynamodb.us-east-1.amazonaws.com");
jobConf.set("dynamodb.regionid", "us-east-1");
jobConf.set("dynamodb.throughput.read", "1");
jobConf.set("dynamodb.throughput.read.percent", "1");
jobConf.set("dynamodb.version", "2011-12-05");

jobConf.set("mapred.output.format.class", "org.apache.hadoop.dynamodb.write.DynamoDBOutputFormat");
jobConf.set("mapred.input.format.class", "org.apache.hadoop.dynamodb.read.DynamoDBInputFormat");

DataFrame df = sqlContext.read().json("s3n://mybucket/abc.json");
RDD<String> jsonRDD = df.toJSON();
JavaRDD<String> jsonJavaRDD = jsonRDD.toJavaRDD();
PairFunction<String, Text, DynamoDBItemWritable> keyData = new PairFunction<String, Text, DynamoDBItemWritable>() {
    public Tuple2<Text, DynamoDBItemWritable> call(String row) {
        DynamoDBItemWritable writeable = new DynamoDBItemWritable();
        try {
            System.out.println("JSON : " + row);
            JSONObject jsonObject = new JSONObject(row);

            System.out.println("JSON Object: " + jsonObject);

            Map<String, AttributeValue> attributes = new HashMap<String, AttributeValue>();
            AttributeValue attributeValue = new AttributeValue();
            attributeValue.setS(row);
            attributes.put("values", attributeValue);

            AttributeValue attributeKeyValue = new AttributeValue();
            attributeValue.setS(jsonObject.getString("external_id"));
            attributes.put("primary_key", attributeKeyValue);

            AttributeValue attributeSecValue = new AttributeValue();
            attributeValue.setS(jsonObject.getString("123434335"));
            attributes.put("creation_date", attributeSecValue);
            writeable.setItem(attributes);
        } catch (Exception e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        return new Tuple2(new Text(row), writeable);
    }
};

JavaPairRDD<Text, DynamoDBItemWritable> pairs = jsonJavaRDD
        .mapToPair(keyData);

Map<Text, DynamoDBItemWritable> map = pairs.collectAsMap();
System.out.println("Results : " + map);
pairs.saveAsHadoopDataset(jobConf);    

However, I do not see any data being written to DynamoDB. I also do not receive error messages.

+4
source share
1 answer

I'm not sure, but yours seems more complicated than it can be.

RDD DynamoDB:

val ddbInsertFormattedRDD = inputRDD.map { case (skey, svalue) =>
    val ddbMap = new util.HashMap[String, AttributeValue]()

    val key = new AttributeValue()
    key.setS(skey.toString)
    ddbMap.put("DynamoDbKey", key)


    val value = new AttributeValue()
    value.setS(svalue.toString)
    ddbMap.put("DynamoDbKey", value)

    val item = new DynamoDBItemWritable()
    item.setItem(ddbMap)

    (new Text(""), item)
}

val ddbConf = new JobConf(sc.hadoopConfiguration)
ddbConf.set("dynamodb.output.tableName", "my-dynamo-table")
ddbConf.set("dynamodb.throughput.write.percent", "0.5")
ddbConf.set("mapred.input.format.class", "org.apache.hadoop.dynamodb.read.DynamoDBInputFormat")
ddbConf.set("mapred.output.format.class", "org.apache.hadoop.dynamodb.write.DynamoDBOutputFormat")
ddbInsertFormattedRDD.saveAsHadoopDataset(ddbConf)

, ?

+4

All Articles