summaryrefslogtreecommitdiffstats
path: root/sandbox/ctrezzo/src/Test2.java
diff options
context:
space:
mode:
Diffstat (limited to 'sandbox/ctrezzo/src/Test2.java')
-rw-r--r--sandbox/ctrezzo/src/Test2.java70
1 files changed, 0 insertions, 70 deletions
diff --git a/sandbox/ctrezzo/src/Test2.java b/sandbox/ctrezzo/src/Test2.java
deleted file mode 100644
index 2142b3e0d2..0000000000
--- a/sandbox/ctrezzo/src/Test2.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.JobConf;
-
-
-public class Test2 {
-
- /*
- * This submits a Map-Reduce job without using runJar, or the main method in WordCount.
- */
- public static void main(String[] args) {
-
- //this path is a HDFS path
- Path inputPath = new Path("/file01.txt");
- //this path is a HDFS path
- Path outputPath = new Path("/output7");
-
- org.apache.hadoop.examples.WordCount myCount = new org.apache.hadoop.examples.WordCount();
- Configuration conf = new Configuration();
-
- myCount.setConf(conf);
-
- JobConf mapredConf = new JobConf(myCount.getConf(), org.apache.hadoop.examples.WordCount.class);
- mapredConf.setJobName("wordcount");
-
- // the keys are words (strings)
- mapredConf.setOutputKeyClass(Text.class);
- // the values are counts (ints)
- mapredConf.setOutputValueClass(IntWritable.class);
-
- mapredConf.setMapperClass(org.apache.hadoop.examples.WordCount.MapClass.class);
- mapredConf.setCombinerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
- mapredConf.setReducerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
-
- mapredConf.setInputPath(inputPath);
- mapredConf.setOutputPath(outputPath);
-
- try {
- JobClient.runJob(mapredConf);
- }
- catch(Exception e) {
- System.out.println("ERROR: " + e);
- }
- }
-
-}
-
-