001 /**
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements. See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership. The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License. You may obtain a copy of the License at
009 *
010 * http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018 package org.apache.hadoop.mapreduce.lib.chain;
019
020 import org.apache.hadoop.classification.InterfaceAudience;
021 import org.apache.hadoop.classification.InterfaceStability;
022 import org.apache.hadoop.conf.Configuration;
023 import org.apache.hadoop.mapreduce.Job;
024 import org.apache.hadoop.mapreduce.Mapper;
025 import org.apache.hadoop.mapreduce.Reducer;
026 import org.apache.hadoop.mapreduce.lib.chain.Chain.ChainBlockingQueue;
027
028 import java.io.IOException;
029
030 /**
031 * The ChainReducer class allows to chain multiple Mapper classes after a
032 * Reducer within the Reducer task.
033 *
034 * <p>
035 * For each record output by the Reducer, the Mapper classes are invoked in a
036 * chained (or piped) fashion. The output of the reducer becomes the input of
037 * the first mapper and output of first becomes the input of the second, and so
038 * on until the last Mapper, the output of the last Mapper will be written to
039 * the task's output.
040 * </p>
041 * <p>
042 * The key functionality of this feature is that the Mappers in the chain do not
043 * need to be aware that they are executed after the Reducer or in a chain. This
044 * enables having reusable specialized Mappers that can be combined to perform
045 * composite operations within a single task.
046 * </p>
047 * <p>
048 * Special care has to be taken when creating chains that the key/values output
049 * by a Mapper are valid for the following Mapper in the chain. It is assumed
050 * all Mappers and the Reduce in the chain use matching output and input key and
051 * value classes as no conversion is done by the chaining code.
052 * </p>
053 * </p> Using the ChainMapper and the ChainReducer classes is possible to
054 * compose Map/Reduce jobs that look like <code>[MAP+ / REDUCE MAP*]</code>. And
055 * immediate benefit of this pattern is a dramatic reduction in disk IO. </p>
056 * <p>
057 * IMPORTANT: There is no need to specify the output key/value classes for the
058 * ChainReducer, this is done by the setReducer or the addMapper for the last
059 * element in the chain.
060 * </p>
061 * ChainReducer usage pattern:
062 * <p/>
063 *
064 * <pre>
065 * ...
066 * Job = new Job(conf);
067 * ....
068 * <p/>
069 * Configuration reduceConf = new Configuration(false);
070 * ...
071 * ChainReducer.setReducer(job, XReduce.class, LongWritable.class, Text.class,
072 * Text.class, Text.class, true, reduceConf);
073 * <p/>
074 * ChainReducer.addMapper(job, CMap.class, Text.class, Text.class,
075 * LongWritable.class, Text.class, false, null);
076 * <p/>
077 * ChainReducer.addMapper(job, DMap.class, LongWritable.class, Text.class,
078 * LongWritable.class, LongWritable.class, true, null);
079 * <p/>
080 * ...
081 * <p/>
082 * job.waitForCompletion(true);
083 * ...
084 * </pre>
085 */
086 @InterfaceAudience.Public
087 @InterfaceStability.Stable
088 public class ChainReducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> extends
089 Reducer<KEYIN, VALUEIN, KEYOUT, VALUEOUT> {
090
091 /**
092 * Sets the {@link Reducer} class to the chain job.
093 *
094 * <p>
095 * The key and values are passed from one element of the chain to the next, by
096 * value. For the added Reducer the configuration given for it,
097 * <code>reducerConf</code>, have precedence over the job's Configuration.
098 * This precedence is in effect when the task is running.
099 * </p>
100 * <p>
101 * IMPORTANT: There is no need to specify the output key/value classes for the
102 * ChainReducer, this is done by the setReducer or the addMapper for the last
103 * element in the chain.
104 * </p>
105 *
106 * @param job
107 * the job
108 * @param klass
109 * the Reducer class to add.
110 * @param inputKeyClass
111 * reducer input key class.
112 * @param inputValueClass
113 * reducer input value class.
114 * @param outputKeyClass
115 * reducer output key class.
116 * @param outputValueClass
117 * reducer output value class.
118 * @param reducerConf
119 * a configuration for the Reducer class. It is recommended to use a
120 * Configuration without default values using the
121 * <code>Configuration(boolean loadDefaults)</code> constructor with
122 * FALSE.
123 */
124 public static void setReducer(Job job, Class<? extends Reducer> klass,
125 Class<?> inputKeyClass, Class<?> inputValueClass,
126 Class<?> outputKeyClass, Class<?> outputValueClass,
127 Configuration reducerConf) {
128 job.setReducerClass(ChainReducer.class);
129 job.setOutputKeyClass(outputKeyClass);
130 job.setOutputValueClass(outputValueClass);
131 Chain.setReducer(job, klass, inputKeyClass, inputValueClass,
132 outputKeyClass, outputValueClass, reducerConf);
133 }
134
135 /**
136 * Adds a {@link Mapper} class to the chain reducer.
137 *
138 * <p>
139 * The key and values are passed from one element of the chain to the next, by
140 * value For the added Mapper the configuration given for it,
141 * <code>mapperConf</code>, have precedence over the job's Configuration. This
142 * precedence is in effect when the task is running.
143 * </p>
144 * <p>
145 * IMPORTANT: There is no need to specify the output key/value classes for the
146 * ChainMapper, this is done by the addMapper for the last mapper in the
147 * chain.
148 * </p>
149 *
150 * @param job
151 * The job.
152 * @param klass
153 * the Mapper class to add.
154 * @param inputKeyClass
155 * mapper input key class.
156 * @param inputValueClass
157 * mapper input value class.
158 * @param outputKeyClass
159 * mapper output key class.
160 * @param outputValueClass
161 * mapper output value class.
162 * @param mapperConf
163 * a configuration for the Mapper class. It is recommended to use a
164 * Configuration without default values using the
165 * <code>Configuration(boolean loadDefaults)</code> constructor with
166 * FALSE.
167 */
168 public static void addMapper(Job job, Class<? extends Mapper> klass,
169 Class<?> inputKeyClass, Class<?> inputValueClass,
170 Class<?> outputKeyClass, Class<?> outputValueClass,
171 Configuration mapperConf) throws IOException {
172 job.setOutputKeyClass(outputKeyClass);
173 job.setOutputValueClass(outputValueClass);
174 Chain.addMapper(false, job, klass, inputKeyClass, inputValueClass,
175 outputKeyClass, outputValueClass, mapperConf);
176 }
177
178 private Chain chain;
179
180 protected void setup(Context context) {
181 chain = new Chain(false);
182 chain.setup(context.getConfiguration());
183 }
184
185 public void run(Context context) throws IOException, InterruptedException {
186 setup(context);
187
188 // if no reducer is set, just do nothing
189 if (chain.getReducer() == null) {
190 return;
191 }
192 int numMappers = chain.getAllMappers().size();
193 // if there are no mappers in chain, run the reducer
194 if (numMappers == 0) {
195 chain.runReducer(context);
196 return;
197 }
198
199 // add reducer and all mappers with proper context
200 ChainBlockingQueue<Chain.KeyValuePair<?, ?>> inputqueue;
201 ChainBlockingQueue<Chain.KeyValuePair<?, ?>> outputqueue;
202 // add reducer
203 outputqueue = chain.createBlockingQueue();
204 chain.addReducer(context, outputqueue);
205 // add all mappers except last one
206 for (int i = 0; i < numMappers - 1; i++) {
207 inputqueue = outputqueue;
208 outputqueue = chain.createBlockingQueue();
209 chain.addMapper(inputqueue, outputqueue, context, i);
210 }
211 // add last mapper
212 chain.addMapper(outputqueue, context, numMappers - 1);
213
214 // start all threads
215 chain.startAllThreads();
216
217 // wait for all threads
218 chain.joinAllThreads();
219 }
220 }