Skip to content

Commit 13dca89

Browse files
committed
Merge pull request #7 from huydx/master
directly use Configuration instead of getting from job in BasicSaveProtobuf example
2 parents fb07383 + bd3975b commit 13dca89

File tree

1 file changed

+2
-3
lines changed

1 file changed

+2
-3
lines changed

src/main/scala/com/oreilly/learningsparkexamples/scala/BasicSaveProtoBuf.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,14 @@ import org.apache.spark.SparkContext._
1111
import org.apache.hadoop.io.Text
1212
import com.twitter.elephantbird.mapreduce.io.ProtobufWritable
1313
import com.twitter.elephantbird.mapreduce.output.LzoProtobufBlockOutputFormat
14-
import org.apache.hadoop.mapreduce.Job
14+
import org.apache.hadoop.conf.Configuration
1515

1616
object BasicSaveProtoBuf {
1717
def main(args: Array[String]) {
1818
val master = args(0)
1919
val outputFile = args(1)
2020
val sc = new SparkContext(master, "BasicSaveProtoBuf", System.getenv("SPARK_HOME"))
21-
val job = new Job()
22-
val conf = job.getConfiguration
21+
val conf = new Configuration()
2322
LzoProtobufBlockOutputFormat.setClassConf(classOf[Places.Venue], conf);
2423
val dnaLounge = Places.Venue.newBuilder()
2524
dnaLounge.setId(1);

0 commit comments

Comments
 (0)