(转)hadoop上运行c++程序步骤

一。写一个程序保存为wordcount-simple.cpp:

#include "hadoop/Pipes.hh"
#include "hadoop/TemplateFactory.hh"
#include "hadoop/StringUtils.hh"

const std::string WORDCOUNT = "WORDCOUNT";
const std::string INPUT_WORDS = "INPUT_WORDS";
const std::string OUTPUT_WORDS = "OUTPUT_WORDS";

class WordCountMap: public HadoopPipes::Mapper { // Mapper类
public:
HadoopPipes::TaskContext::Counter* inputWords;

WordCountMap(HadoopPipes::TaskContext& context) {
inputWords = context.getCounter(WORDCOUNT, INPUT_WORDS);
}

void map(HadoopPipes::MapContext& context) {
std::vector<std::string> words = 
HadoopUtils::splitString(context.getInputValue(), " "); // 按空格进行单词分割
for(unsigned int i=0; i < words.size(); ++i) {
context.emit(words[i], "1"); // 单词作为key,value为1
}
context.incrementCounter(inputWords, words.size()); // 向map-reduce提交进度信息
}
};

class WordCountReduce: public HadoopPipes::Reducer { // reduce类
public:
HadoopPipes::TaskContext::Counter* outputWords;

WordCountReduce(HadoopPipes::TaskContext& context) {
outputWords = context.getCounter(WORDCOUNT, OUTPUT_WORDS);
}

void reduce(HadoopPipes::ReduceContext& context) {
int sum = 0;
while (context.nextValue()) {
sum += HadoopUtils::toInt(context.getInputValue()); // 统计单词出现的次数
}
context.emit(context.getInputKey(), HadoopUtils::toString(sum)); // 输出结果
context.incrementCounter(outputWords, 1); 
}
};

int main(int argc, char *argv[]) {
return HadoopPipes::runTask(HadoopPipes::TemplateFactory<WordCountMap, 
WordCountReduce>()); // 运行任务
}
二。Makefile:
CC = g++ HADOOP_INSTALL = $(HADOOP_HOME) PLATFORM = Linux-i386-32 CPPFLAGS = -O2 -m32 -I$(HADOOP_INSTALL)/c++/$(PLATFORM)/include LDFLAGS += -lcrypto -lhadooppipes -lhadooputils -lpthread wordcount-simple: wordcount-simple.cpp $(CC) $(CPPFLAGS) $< -Wall -L$(HADOOP_INSTALL)/c++/$(PLATFORM)/lib $(LDFLAGS) -o $@
三。make生成可执行文件
四。将可执行文件编译上传到HDFS:
hadoop dfs -copyFromLocal ./wordcount-simple /home
五。创建配置文件:word.xml
  1. <?xml version="1.0"?>
  2. <configuration>
  3.   <property>
  4.     // Set the binary path on DFS
  5.     <name>hadoop.pipes.executable</name>
  6.     <value>/home/wordcount</value>
  7.   </property>
  8.   <property>
  9.     <name>hadoop.pipes.java.recordreader</name>
  10.     <value>true</value>
  11.   </property>
  12.   <property>
  13.     <name>hadoop.pipes.java.recordwriter</name>
  14.     <value>true</value>
  15.   </property>
  16. </configuration>

六。创建本地文件hello.txt内容为:hello world

七。将hello.txt上传到dfs
hadoop dfs -copyFromLocal ./hello.txt /home
八。运行程序
hadoop pipes -conf ./word.xml -input /home/hello -output /home/result
(dfs会自动创建result目录保存结果)
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值