oozie 调用spark
1.workflow.xml
<workflow-app name="Spark-example2" xmlns="uri:oozie:workflow:0.5">
<start to="SparkOozieAction1"/>
<action name="SparkOozieAction1">
<spark xmlns="uri:oozie:spark-action:0.1">
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<configuration>
<property>
<name>mapred.job.queue.name</name>
<value>${queueName}</value>
</property>
</configuration>
<master>yarn-cluster</master>
<mode>cluster</mode>
<name>Spark Example1</name>
<class>com.ocn.itv.rinse.ErrorCollectRinse</class>
<jar>ocn-itv-spark-3.0.3-rc1.jar</jar>
<spark-opts>${sparkopts}</spark-opts>
<arg>${input1}</arg>
</spark >
<ok to="end"/>
<error to="kill"/>
</action>
<kill name="kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<end name="end"/>
</workflow-app>
job.properties
nameNode=hdfs://hgdp-001:8020 --HDFS文件系统的端口
jobTracker=hgdp-001:8032 resourceManager的端口
queueName=default --oozie队列
input1=2017-05-09 --要输入的参数
hdfspath=user/root --自定义路径
examplesRoot=ocn-itv-oozie ---全局目录
oozie.use.system.libpath=True
sparkopts=--executor-memory 1G --是否加载用户lib库(oozie的system share lib)
oozie.libpath=${nameNode}/${hdfspath}/${examplesRoot}/lib/ 用户自定义lib库(存放所需jar包)
oozie.wf.application.path=${nameNode}/${hdfspath}/${examplesRoot}/wf/wf2/ --workflow.xml在hdfs中的地址
文章结构紧凑,层次分明,逻辑严密,让人一读即懂。