Note that there are some explanatory texts on larger screens.

plurals
  1. PO
    primarykey
    data
    text
    <p>I think as you are trying to output NULL as key from the map so you can use NullWritable. So your code would be something as below:-</p> <pre><code>import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.GenericOptionsParser; public class ParseDataToDB { public static final String SEPARATOR_FIELD = new String(new char[] {1}); public static final String SEPARATOR_ARRAY_VALUE = new String(new char[] {2}); public static class MyMapper extends Mapper&lt;LongWritable, Text, NullWritable, Text&gt; { //private final static IntWritable one = new IntWritable(1); private Text word = new Text(); private ArrayList&lt;String&gt; bazValues = new ArrayList&lt;String&gt;(); public void map(LongWritable key, Text value, OutputCollector&lt;NullWritable, Text&gt; context) throws IOException { String line = value.toString(); StringTokenizer tokenizer = new StringTokenizer(line); while(tokenizer.hasMoreTokens()){ word.set(tokenizer.nextToken()); if(word.find("extract") &gt; -1) { System.out.println("in herer"); bazValues.add(line); } } // Build up the array values as a delimited string. StringBuilder bazValueBuilder = new StringBuilder(); int i = 0; for (String bazValue : bazValues) { bazValueBuilder.append(bazValue); ++i; if (i &lt; bazValues.size()) { bazValueBuilder.append(SEPARATOR_ARRAY_VALUE); } } // Build up the column values / fields as a delimited string. String hiveRow = new String(); hiveRow += "fooValue"; hiveRow += SEPARATOR_FIELD; hiveRow += "barValue"; hiveRow += SEPARATOR_FIELD; hiveRow += bazValueBuilder.toString(); System.out.println("in herer hiveRow" + hiveRow); // Emit a null key and a Text object containing the delimited fields context.collect(NullWritable.get(), new Text(hiveRow)); } } public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException { Configuration conf = new Configuration(); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); Job job = new Job(conf, "MyTest"); job.setJarByClass(ParseDataToDB.class); job.setMapperClass(MyMapper.class); job.setMapOutputKeyClass(NullWritable.class); job.setMapOutputValueClass(Text.class); job.setOutputKeyClass(NullWritable.class); job.setOutputValueClass(Text.class); FileInputFormat.addInputPath(job, new Path(otherArgs[0])); FileOutputFormat.setOutputPath(job, new Path(otherArgs[1])); System.exit(job.waitForCompletion(true) ? 0 : 1); } </code></pre> <p>}</p>
    singulars
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    plurals
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
    1. This table or related slice is empty.
 

Querying!

 
Guidance

SQuiL has stopped working due to an internal error.

If you are curious you may find further information in the browser console, which is accessible through the devtools (F12).

Reload