summaryrefslogtreecommitdiffstats
path: root/sandbox/ctrezzo/HadoopTest/src/Test2.java
blob: 2142b3e0d24eb8dc873ea5ed63d0d5f7028b75cd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 * 
 *   http://www.apache.org/licenses/LICENSE-2.0
 * 
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.    
 */

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;


public class Test2 {
	
	/*
	 * This submits a Map-Reduce job without using runJar, or the main method in WordCount.
	 */
	public static void main(String[] args) {
		
		//this path is a HDFS path
	    Path inputPath = new Path("/file01.txt");
	    //this path is a HDFS path
	    Path outputPath = new Path("/output7");
	    
	    org.apache.hadoop.examples.WordCount myCount = new org.apache.hadoop.examples.WordCount();
		Configuration conf = new Configuration();
		
		myCount.setConf(conf);
		
		JobConf mapredConf = new JobConf(myCount.getConf(), org.apache.hadoop.examples.WordCount.class);
	    mapredConf.setJobName("wordcount");
	 
	    // the keys are words (strings)
	    mapredConf.setOutputKeyClass(Text.class);
	    // the values are counts (ints)
	    mapredConf.setOutputValueClass(IntWritable.class);
	    
	    mapredConf.setMapperClass(org.apache.hadoop.examples.WordCount.MapClass.class);        
	    mapredConf.setCombinerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
	    mapredConf.setReducerClass(org.apache.hadoop.examples.WordCount.Reduce.class);
	    
	    mapredConf.setInputPath(inputPath);
	    mapredConf.setOutputPath(outputPath);
	    
	    try {
	    	JobClient.runJob(mapredConf);
	    }
	    catch(Exception e) {
	    	System.out.println("ERROR: " + e);
	    }
	}
	
}