<!--Workflow-DEF-NAME--><workflow-appxmlns='uri:oozie:workflow:0.3'name='shell-wf'><!--shell1: node-name --><startto='shell1'/><actionname='shell1'><shellxmlns="uri:oozie:shell-action:0.1"><job-tracker>${jobTracker}</job-tracker><name-node>${nameNode}</name-node><configuration><property><name>mapred.job.queue.name</name><value>${queueName}</value></property></configuration><!--${EXEC}是shell脚本命令,例如;mkdir--><exec>${EXEC}</exec><!--argument标签中是exec中的参数值--><argument>A</argument><argument>B</argument><!--file标签里面放的hdfs上写好的shell脚本,例如:test.sh --><file>${EXEC}#${EXEC}</file><!--Copy the executable to compute node's current working directory --></shell><!--end和fail都是node-name --><okto="end"/><errorto="fail"/></action><!--fork与join成对使用的举例start--><forkname="[FORK-NODE-NAME]"><pathstart="[NODE-NAME]"/>
...
<pathstart="[NODE-NAME]"/></fork><joinname="[JOIN-NODE-NAME]"to="[NODE-NAME]"/><!--fork与join成对使用的举例end--><killname="fail"><message>Script failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message></kill><endname='end'/></workflow-app>
编写job.properties
# 这里写的是workflow.xml和job.properties在hdfs的哪个目录下
oozie.wf.application.path=hdfs://localhost:8020/user/kamrul/workflows/script#Execute is expected to be in the Workflow directory.
#Shell Script to run
EXEC=script.sh
#CPP executable. Executable should be binary compatible to the compute node OS.
#EXEC=hello
#Perl script
#EXEC=script.pl
#jobTracker实际上就是resourceManager的地址
jobTracker=localhost:8021
nameNode=hdfs://localhost:8020
queueName=default