-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathshell.sample
43 lines (29 loc) · 1.33 KB
/
shell.sample
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
import org.apache.hadoop.io.Text;
import org.apache.hadoop.dynamodb.DynamoDBItemWritable
import org.apache.hadoop.dynamodb.read.DynamoDBInputFormat
import org.apache.hadoop.hive.dynamodb.read.HiveDynamoDBInputFormat
import org.apache.hadoop.dynamodb.write.DynamoDBOutputFormat
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.io.LongWritable
var jobConf = new JobConf(sc.hadoopConfiguration)
jobConf.set("dynamodb.input.tableName", "test")
jobConf.set("dynamodb.output.tableName", "testOutput")
jobConf.set("mapred.output.format.class", "org.apache.hadoop.dynamodb.write.DynamoDBOutputFormat")
jobConf.set("mapred.input.format.class", "org.apache.hadoop.dynamodb.read.DynamoDBInputFormat")
import java.util.HashMap
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
var aryRDD = sc.parallelize(Array(("name3","value3"),("name4","value4")))
var ddbInsertFormattedRDD = aryRDD.map( a => {
var ddbMap = new HashMap[String, AttributeValue]()
var itemKey = new AttributeValue()
itemKey.setS(a._1)
ddbMap.put("name",itemKey)
var itemValue = new AttributeValue()
itemValue.setS(a._2)
ddbMap.put("value",itemValue)
var item = new DynamoDBItemWritable()
item.setItem(ddbMap)
(new Text(""), item)
})
ddbInsertFormattedRDD.saveAsHadoopDataset(jobConf)
spark.sql("insert into ddbtable values ('name10','value10')")