-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathbuildVocab.java
More file actions
169 lines (142 loc) · 6.6 KB
/
buildVocab.java
File metadata and controls
169 lines (142 loc) · 6.6 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
package docsim;
import java.io.*;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.jobcontrol.*;
import org.apache.hadoop.util.*;
public class buildVocab extends Configured implements Tool {
public static class Map extends MapReduceBase implements Mapper<LongWritable,Text,Text,IntWritable> {
static enum Counters { INPUT_WORDS };
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
DistributedCache ds = new DistributedCache();
private boolean caseSensitive = false;
private Set<String> patternsToSkip = new HashSet<String>();
private HashMap<Integer,String> docIdMap = new HashMap<Integer,String>();
private long numRecords = 0;
private String inputFile,docidFile;
public void configure(JobConf job) {
caseSensitive = job.getBoolean("docsim.case.sensitive", true);
inputFile = job.get("map.input.file");
if (job.getBoolean("docsim.skip.patterns", true)) {
Path[] patternsFiles = new Path[0];
try {
patternsFiles = DistributedCache.getLocalCacheFiles(job);
} catch (IOException ioe) {
System.err.println("Caught exception while getting cached files: " + StringUtils.stringifyException(ioe));
}
for (Path patternsFile : patternsFiles) {
if(patternsFile.getName().matches("docid_map.txt"))
parseSkipFile(patternsFile);
}
}
if (job.getBoolean("docsim.docids.map", true)) {
Path[] docidMapFile = new Path[1];
try {
docidMapFile = DistributedCache.getLocalCacheFiles(job);
} catch (IOException ioe) {
System.err.println("Caught exception while getting cached files: " + StringUtils.stringifyException(ioe));
}
for (Path docidFile : docidMapFile) {
if(docidFile.getName().matches("docid_map.txt"))
loadDocIdMap(docidFile);
}
}
}
private void parseSkipFile(Path patternsFile) {
try {
BufferedReader fis = new BufferedReader(new FileReader(patternsFile.toString()));
String pattern = null;
while ((pattern = fis.readLine()) != null) {
patternsToSkip.add(pattern);
}
} catch (IOException ioe) {
System.err.println("Caught exception while parsing the cached file '" + patternsFile + "' : " + StringUtils.stringifyException(ioe));
}
}
private void loadDocIdMap(Path docIdsFile) {
try {
BufferedReader fis = new BufferedReader(new FileReader(docIdsFile.toString()));
String pattern = null;
Integer key;
String value;
while ((pattern = fis.readLine()) != null) {
StringTokenizer st1 = new StringTokenizer(pattern);
key = Integer.parseInt(st1.nextToken());
value = st1.nextToken();
docIdMap.put(key, value);
}
} catch (IOException ioe) {
System.err.println("Caught exception while parsing the cached file '" + docIdMap + "' : " + StringUtils.stringifyException(ioe));
}
}
public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException {
String line = (caseSensitive) ? value.toString() : value.toString().toLowerCase();
for (String pattern : patternsToSkip) {
line = line.replaceAll(pattern, "");
}
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
word.set(tokenizer.nextToken());
output.collect(word, one);
reporter.incrCounter(Counters.INPUT_WORDS, 1);
}
if ((++numRecords % 100) == 0) {
reporter.setStatus("Finished processing " + numRecords + " records " + "from the input file: " + inputFile);
}
}
}
public static class Reduce extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable> {
public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException {
int sum = 0;
while (values.hasNext()) {
sum += values.next().get();
}
output.collect(key, new IntWritable(sum));
}
}
public int run(String[] args) throws Exception {
JobConf conf = new JobConf(getConf(), buildVocab.class);
conf.setJobName("vocab");
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(IntWritable.class);
conf.setMapperClass(Map.class);
conf.setCombinerClass(Reduce.class);
conf.setReducerClass(Reduce.class);
conf.set(XmlInputFormat.START_TAG_KEY, "<body>");
conf.set(XmlInputFormat.END_TAG_KEY, "</body>");
conf.setInputFormat(XmlInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
conf.setNumMapTasks(20);
conf.setNumReduceTasks(20);
List<String> other_args = new ArrayList<String>();
for (int i=0; i < args.length; ++i) {
if ("-skip".equals(args[i])) {
DistributedCache.addCacheFile(new Path(args[++i]).toUri(), conf);
conf.setBoolean("docsim.skip.patterns", true);
}
else if ("-docmap".equals(args[i])) {
DistributedCache.addCacheFile(new Path(args[++i]).toUri(), conf);
conf.setBoolean("docsim.docids.map", true);
}
else {
other_args.add(args[i]);
}
}
FileInputFormat.setInputPaths(conf, new Path(other_args.get(0)));
FileOutputFormat.setOutputPath(conf, new Path(other_args.get(1)));
JobClient.runJob(conf);
return 0;
}
public static void main(String[] args) throws Exception {
String inpath = args[0];
String outpath = "vocab.txt";
String paths[]= { inpath, outpath };
int res = ToolRunner.run(new Configuration(), new buildVocab(), paths);
System.exit(res);
}
}