2016-04-05 25 views
3

Girdiyi CSV olarak alan ve ORC biçiminde yazan ancak NullPointerException özel durumuna bakan bir mapreduce programı yazmaya çalışıyorum. Aşağıda CSV'yi ORC'ye dönüştürürken istisnalar

mapreduce işi

/** sürücü kod **/

public class RunORC extends Configured implements Tool { 


    public static void main(String[] args) throws Exception { 

     int res = ToolRunner.run(new Configuration(), new RunORC(), args); 
     System.exit(res); 

    } 

    public int run(String[] arg) throws Exception { 
     Configuration conf=getConf(); 

     //Set ORC configuration parameters 
     conf.set("orc.create.index", "true"); 


     Job job = Job.getInstance(conf); 
     job.setJarByClass(RunORC.class); 
     job.setJobName("ORC Output"); 


     job.setMapperClass(ORCMapper.class); 
     // job.setReducerClass(OrcReducer.class); 
     //job.setNumReduceTasks(Integer.parseInt(arg[2])); 
     job.setNumReduceTasks(0); 

     job.setMapOutputKeyClass(NullWritable.class); 
     job.setMapOutputValueClass(Writable.class); 

     // job.setOutputKeyClass(NullWritable.class); 
     // job.setOutputValueClass(Writable.class); 
     job.setOutputFormatClass(OrcNewOutputFormat.class); 



     FileInputFormat.addInputPath(job, new Path(arg[0])); 
     Path output=new Path(arg[1]); 

    // OrcNewOutputFormat.setCompressOutput(job,true); 
     OrcNewOutputFormat.setOutputPath(job,output); 

     return job.waitForCompletion(true) ? 0: 1; 
    } 


} 

/** mapper için kod i

java.lang.Exception: java.lang.NullPointerException 
    at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:406) 
Caused by: java.lang.NullPointerException 
    at org.apache.hadoop.hive.ql.io.orc.WriterImpl.createTreeWriter(WriterImpl.java:1584) 

at org.apache.hadoop.hive.ql.io.orc.WriterImpl.<init>(WriterImpl.java:176) 
at org.apache.hadoop.hive.ql.io.orc.OrcFile.createWriter(OrcFile.java:369) 
at org.apache.hadoop.hive.ql.io.orc.OrcNewOutputFormat$OrcRecordWriter.write(OrcNewOutputFormat.java:51) 
at org.apache.hadoop.hive.ql.io.orc.OrcNewOutputFormat$OrcRecordWriter.write(OrcNewOutputFormat.java:37) 
at org.apache.hadoop.mapred.MapTask$NewDirectOutputCollector.write(MapTask.java:558) 
at org.apache.hadoop.mapreduce.task.TaskInputOutputContextImpl.write(TaskInputOutputContextImpl.java:85) 
at org.apache.hadoop.mapreduce.lib.map.WrappedMapper$Context.write(WrappedMapper.java:106) 
at ORCMapper.map(ORCMapper.java:22) 
at ORCMapper.map(ORCMapper.java:1) 
at org.apache.hadoop.mapreduce.Mapper.run(Mapper.java:140) 
at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:672) 
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:330) 
at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:268) 
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) 
at java.util.concurrent.FutureTask.run(FutureTask.java:262) 
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) 
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) 
at java.lang.Thread.run(Thread.java:745) 

alma sonrasında am durum yığın izlemesi olduğunu kod **/

public class ORCMapper extends Mapper<LongWritable,Text,NullWritable,Writable> { 

     private final OrcSerde serde = new OrcSerde(); 

     @Override 
     protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { 

      Writable row = serde.serialize(value, null); 
        context.write(NullWritable.get(), row); 
       } 
     //} 
    } 

cevap