diff options
author | lresende <lresende@13f79535-47bb-0310-9956-ffa450edef68> | 2008-08-15 16:38:03 +0000 |
---|---|---|
committer | lresende <lresende@13f79535-47bb-0310-9956-ffa450edef68> | 2008-08-15 16:38:03 +0000 |
commit | 2732a927f48aba526c48000bd9fb065117abfd55 (patch) | |
tree | 9e37dfa36d37504a352bf42956828e1ec951e316 /sandbox/ctrezzo/HadoopTest | |
parent | 7c66d5dff2273acf4d32da658d6ac5d59b9f20a7 (diff) |
Refactoring to move src to it's own project module
git-svn-id: http://svn.us.apache.org/repos/asf/tuscany@686275 13f79535-47bb-0310-9956-ffa450edef68
Diffstat (limited to 'sandbox/ctrezzo/HadoopTest')
-rw-r--r-- | sandbox/ctrezzo/HadoopTest/file01.txt | 1 | ||||
-rw-r--r-- | sandbox/ctrezzo/HadoopTest/src/Test.java | 50 | ||||
-rw-r--r-- | sandbox/ctrezzo/HadoopTest/src/Test2.java | 70 | ||||
-rw-r--r-- | sandbox/ctrezzo/HadoopTest/src/org/apache/hadoop/myExamples/WordCount.java | 157 | ||||
-rw-r--r-- | sandbox/ctrezzo/HadoopTest/src/services/WordCount.java | 31 | ||||
-rw-r--r-- | sandbox/ctrezzo/HadoopTest/src/services/WordCountImpl.java | 69 |
6 files changed, 378 insertions, 0 deletions
diff --git a/sandbox/ctrezzo/HadoopTest/file01.txt b/sandbox/ctrezzo/HadoopTest/file01.txt new file mode 100644 index 0000000000..3171a0c052 --- /dev/null +++ b/sandbox/ctrezzo/HadoopTest/file01.txt @@ -0,0 +1 @@ +Hello World Bye World
\ No newline at end of file diff --git a/sandbox/ctrezzo/HadoopTest/src/Test.java b/sandbox/ctrezzo/HadoopTest/src/Test.java new file mode 100644 index 0000000000..d15f84bb79 --- /dev/null +++ b/sandbox/ctrezzo/HadoopTest/src/Test.java @@ -0,0 +1,50 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.hadoop.util.RunJar; + +public class Test { + + /** + * @param args + * + * The following should be entered as parameters for the main method: + * Jar File, Name of Main Class, Input File, Output directory + * + * Example: + * wordcount.jar org.apache.hadoop.examples.WordCount /file01.txt /output1 + * + * The jar file has to be in the same directory as the executable. The input/output paths + * refer to either a local directory, or an HDFS directory, depending on which job tracker + * is used. + * + * Also, all Hadoop jar files must be on the class path, as well as the Hadoop conf directory. + */ + public static void main(String[] args) { + + try { + RunJar.main(args); + } + catch(Throwable e) { + System.out.println(e); + } + + } + +} diff --git a/sandbox/ctrezzo/HadoopTest/src/Test2.java b/sandbox/ctrezzo/HadoopTest/src/Test2.java new file mode 100644 index 0000000000..2142b3e0d2 --- /dev/null +++ b/sandbox/ctrezzo/HadoopTest/src/Test2.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; + + +public class Test2 { + + /* + * This submits a Map-Reduce job without using runJar, or the main method in WordCount. + */ + public static void main(String[] args) { + + //this path is a HDFS path + Path inputPath = new Path("/file01.txt"); + //this path is a HDFS path + Path outputPath = new Path("/output7"); + + org.apache.hadoop.examples.WordCount myCount = new org.apache.hadoop.examples.WordCount(); + Configuration conf = new Configuration(); + + myCount.setConf(conf); + + JobConf mapredConf = new JobConf(myCount.getConf(), org.apache.hadoop.examples.WordCount.class); + mapredConf.setJobName("wordcount"); + + // the keys are words (strings) + mapredConf.setOutputKeyClass(Text.class); + // the values are counts (ints) + mapredConf.setOutputValueClass(IntWritable.class); + + mapredConf.setMapperClass(org.apache.hadoop.examples.WordCount.MapClass.class); + mapredConf.setCombinerClass(org.apache.hadoop.examples.WordCount.Reduce.class); + mapredConf.setReducerClass(org.apache.hadoop.examples.WordCount.Reduce.class); + + mapredConf.setInputPath(inputPath); + mapredConf.setOutputPath(outputPath); + + try { + JobClient.runJob(mapredConf); + } + catch(Exception e) { + System.out.println("ERROR: " + e); + } + } + +} + + diff --git a/sandbox/ctrezzo/HadoopTest/src/org/apache/hadoop/myExamples/WordCount.java b/sandbox/ctrezzo/HadoopTest/src/org/apache/hadoop/myExamples/WordCount.java new file mode 100644 index 0000000000..54b43905af --- /dev/null +++ b/sandbox/ctrezzo/HadoopTest/src/org/apache/hadoop/myExamples/WordCount.java @@ -0,0 +1,157 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.myExamples; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.StringTokenizer; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.LongWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.apache.hadoop.mapred.MapReduceBase; +import org.apache.hadoop.mapred.Mapper; +import org.apache.hadoop.mapred.OutputCollector; +import org.apache.hadoop.mapred.Reducer; +import org.apache.hadoop.mapred.Reporter; +import org.apache.hadoop.util.Tool; +import org.apache.hadoop.util.ToolRunner; + +/** + * This is an example Hadoop Map/Reduce application. + * It reads the text input files, breaks each line into words + * and counts them. The output is a locally sorted list of words and the + * count of how often they occurred. + * + * To run: bin/hadoop jar build/hadoop-examples.jar wordcount + * [-m <i>maps</i>] [-r <i>reduces</i>] <i>in-dir</i> <i>out-dir</i> + */ +public class WordCount extends Configured implements Tool { + + /** + * Counts the words in each line. + * For each line of input, break the line into words and emit them as + * (<b>word</b>, <b>1</b>). + */ + public static class MapClass extends MapReduceBase + implements Mapper<LongWritable, Text, Text, IntWritable> { + + private final static IntWritable one = new IntWritable(1); + private Text word = new Text(); + + public void map(LongWritable key, Text value, + OutputCollector<Text, IntWritable> output, + Reporter reporter) throws IOException { + String line = value.toString(); + StringTokenizer itr = new StringTokenizer(line); + while (itr.hasMoreTokens()) { + word.set(itr.nextToken()); + output.collect(word, one); + } + } + } + + /** + * A reducer class that just emits the sum of the input values. + */ + public static class Reduce extends MapReduceBase + implements Reducer<Text, IntWritable, Text, IntWritable> { + + public void reduce(Text key, Iterator<IntWritable> values, + OutputCollector<Text, IntWritable> output, + Reporter reporter) throws IOException { + int sum = 0; + while (values.hasNext()) { + sum += values.next().get(); + } + output.collect(key, new IntWritable(sum)); + } + } + + static int printUsage() { + System.out.println("wordcount [-m <maps>] [-r <reduces>] <input> <output>"); + ToolRunner.printGenericCommandUsage(System.out); + return -1; + } + + /** + * The main driver for word count map/reduce program. + * Invoke this method to submit the map/reduce job. + * @throws IOException When there is communication problems with the + * job tracker. + */ + public int run(String[] args) throws Exception { + JobConf conf = new JobConf(getConf(), WordCount.class); + conf.setJobName("wordcount"); + + // the keys are words (strings) + conf.setOutputKeyClass(Text.class); + // the values are counts (ints) + conf.setOutputValueClass(IntWritable.class); + + conf.setMapperClass(MapClass.class); + conf.setCombinerClass(Reduce.class); + conf.setReducerClass(Reduce.class); + + List<String> other_args = new ArrayList<String>(); + for(int i=0; i < args.length; ++i) { + try { + if ("-m".equals(args[i])) { + conf.setNumMapTasks(Integer.parseInt(args[++i])); + } else if ("-r".equals(args[i])) { + conf.setNumReduceTasks(Integer.parseInt(args[++i])); + } else { + other_args.add(args[i]); + } + } catch (NumberFormatException except) { + System.out.println("ERROR: Integer expected instead of " + args[i]); + return printUsage(); + } catch (ArrayIndexOutOfBoundsException except) { + System.out.println("ERROR: Required parameter missing from " + + args[i-1]); + return printUsage(); + } + } + // Make sure there are exactly 2 parameters left. + if (other_args.size() != 2) { + System.out.println("ERROR: Wrong number of parameters: " + + other_args.size() + " instead of 2."); + return printUsage(); + } + conf.setInputPath(new Path(other_args.get(0))); + conf.setOutputPath(new Path(other_args.get(1))); + + JobClient.runJob(conf); + return 0; + } + + + public static void main(String[] args) throws Exception { + int res = ToolRunner.run(new Configuration(), new WordCount(), args); + System.exit(res); + } + +} diff --git a/sandbox/ctrezzo/HadoopTest/src/services/WordCount.java b/sandbox/ctrezzo/HadoopTest/src/services/WordCount.java new file mode 100644 index 0000000000..a055efe76c --- /dev/null +++ b/sandbox/ctrezzo/HadoopTest/src/services/WordCount.java @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package services; +import org.osoa.sca.annotations.Remotable; + +@Remotable +public interface WordCount { + + /* + * Runs the Map-Reduce job. + */ + void runJob(); + +} diff --git a/sandbox/ctrezzo/HadoopTest/src/services/WordCountImpl.java b/sandbox/ctrezzo/HadoopTest/src/services/WordCountImpl.java new file mode 100644 index 0000000000..0575506cc4 --- /dev/null +++ b/sandbox/ctrezzo/HadoopTest/src/services/WordCountImpl.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package services; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.io.IntWritable; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.mapred.JobClient; +import org.apache.hadoop.mapred.JobConf; +import org.osoa.sca.annotations.Property; + +public class WordCountImpl implements services.WordCount { + + //this path is a HDFS path + @Property + Path inputPath = new Path("/file01.txt"); + + //this path is a HDFS path + @Property + Path outputPath = new Path("/output1"); + + public void runJob() { + + org.apache.hadoop.examples.WordCount myCount = new org.apache.hadoop.examples.WordCount(); + Configuration conf = new Configuration(); + + myCount.setConf(conf); + + JobConf mapredConf = new JobConf(myCount.getConf(), org.apache.hadoop.examples.WordCount.class); + mapredConf.setJobName("wordcount"); + + // the keys are words (strings) + mapredConf.setOutputKeyClass(Text.class); + // the values are counts (ints) + mapredConf.setOutputValueClass(IntWritable.class); + + mapredConf.setMapperClass(org.apache.hadoop.examples.WordCount.MapClass.class); + mapredConf.setCombinerClass(org.apache.hadoop.examples.WordCount.Reduce.class); + mapredConf.setReducerClass(org.apache.hadoop.examples.WordCount.Reduce.class); + + mapredConf.setInputPath(inputPath); + mapredConf.setOutputPath(outputPath); + + try { + JobClient.runJob(mapredConf); + } + catch(Exception e) { + System.out.println("ERROR: " + e); + } + } +} |