import java.io.IOException;
import org.apache.hadoop.io.DoubleWritable;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.mahout.common.StringTuple;
/**
**
Calculates weight sum for a unique label,and feature
**/
public
class BayesWeightSummerMapper extends MapReduceBase implements
Mapper<StringTuple,DoubleWritable,StringTuple,DoubleWritable> {
/**
*We need to calculate the weight sums across each
label and each feature
**
@param
key
*
The label,feature tuple containing the tfidf value
*/
@Override
public
void map(StringTuple key,
DoubleWritable value,
OutputCollector<StringTuple,DoubleWritable> output,
Reporter reporter) throws IOException {
String label = key.stringAt(1);
String feature = key.stringAt(2);
reporter.setStatus("Bayes Weight Summer Mapper: " + key);
//计算某个特征的全部 TF-IDF和值
StringTuple featureSum = new StringTuple(BayesConstants.FEATURE_SUM );
featureSum.add(feature);
output.collect(featureSum, value); // sum of weight for all labels for a
// feature Sigma_j
//计算某个类别的全部 TF-IDF和值
StringTuple labelSum = new StringTuple(BayesConstants.LABEL_SUM );
labelSum.add(label);
output.collect(labelSum, value); // sum of weight for all features for a
// label Sigma_k
//计算所有的的全部 TF-IDF和值
StringTuple totalSum = new StringTuple(BayesConstants.TOTAL_SUM );
output.collect(totalSum, value); // sum of weight of all features for all
// label Sigma_kSigma_j
}
}