gpt4 book ai didi

hadoop - 无法阻止Oozie逻辑炸弹

转载 作者:行者123 更新时间:2023-12-02 22:02:35 26 4
gpt4 key购买 nike

我正在使用Oozie作为任务管理器的Hadoop集群。我的一个队友犯了一个错误,并在集群上运行了以下oozie工作流程:

<?xml version="1.0" encoding="UTF-8"?>
<!-- A adapter ... -->
<workflow-app xmlns="uri:oozie:workflow:0.5" name="My-Workflow">
<global>
<job-tracker>${wf:conf('oozie.job.tracker')}</job-tracker>
<name-node>${wf:conf('oozie.name.node')}</name-node>
<configuration>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${wf:conf('queueName')}</value>
</property>
</configuration>
</global>

<start to="compute-start"/>

<fork name="compute-start">
<path start="ci_mr-workflow"/>
<path start="ci_mr_rem-workflow"/>
<path start="ci_reg-workflow"/>
<path start="ci_reg_read-workflow"/>
<path start="ci_sp_geo-workflow"/>
<path start="ci_sp_mtr_hist-workflow"/>
<path start="cm_c_mr_bloc-workflow"/>
</fork>


<!-- Sub Workflows -->

<action name="ci_mr-workflow">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/ci_mr-workflow.xml</app-path>
<propagate-configuration/>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<action name="ci_mr_rem-workflow">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/ci_mr_rem-workflow.xml</app-path>
<propagate-configuration/>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<action name="ci_reg-workflow">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/ci_reg-workflow.xml</app-path>
<propagate-configuration/>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<action name="ci_reg_read-workflow">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/ci_reg_read-workflow.xml</app-path>
<propagate-configuration/>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<action name="ci_sp_geo-workflow">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/ci_sp_geo-workflow.xml</app-path>
<propagate-configuration/>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<action name="ci_sp_mtr_hist-workflow">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/ci_sp_mtr_hist-workflow.xml</app-path>
<propagate-configuration/>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<action name="cm_c_mr_bloc-workflow">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/cm_c_mr_bloc-workflow.xml</app-path>
<propagate-configuration/>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<join name="compute-end" to="end"/>


<kill name="fail">
<message>Job failed, error message: ${wf:errorMessage(wf:lastErrorNode())}</message>
</kill>

<end name="end"/>
</workflow-app>

还有一个子工作流程的示例:
<?xml version="1.0" encoding="UTF-8"?>
<workflow-app xmlns="uri:oozie:workflow:0.5" name="My_Subworkflow">
<global>
<job-tracker>${wf:conf('oozie.job.tracker')}</job-tracker>
<name-node>${wf:conf('oozie.name.node')}</name-node>
<configuration>
<property>
<name>oozie.launcher.mapred.job.queue.name</name>
<value>${wf:conf('queueName')}</value>
</property>
</configuration>
</global>

<start to="oracle-compute-ci_mr_rem"/>

<action name="oracle-compute-ci_mr_rem">
<sub-workflow>
<app-path>${wf:conf('app.dir')}/oozie/_Oracle-workflow.xml</app-path>
<propagate-configuration/>
<configuration>
<property>
<name>computeName</name>
<value>ci_mr_rem</value>
</property>
<property>
<name>computeTargetName</name>
<value>ci_mr_rem</value>
</property>
<property>
<name>computeFullPath</name>
<value>ci_mr_rem</value>
</property>
<property>
<name>computeType</name>
<value>Objects</value>
</property>
<property>
<name>computeSourceType</name>
<value>SourceType</value></value>
</property>
<property>
<name>computeSourceName</name>
<value>SourceName</value>
</property>
</configuration>
</sub-workflow>
<ok to="compute-end"/>
<error to="fail"/>
</action>

<join name="compute-end" to="end"/>

<kill name="fail">
<message>Job failed, error message: ${wf:errorMessage(wf:lastErrorNode())}</message>
</kill>

<end name="end"/>
</workflow-app>

由于此工作流程已成为逻辑炸弹,因此它会无限地复制自身(及其子工作流程)...我想“fork”操作会将父工作流程复制到子工作流程中(就像编程过程一样)。

我迅速删除了工作流文件和目录并运行了命令:
oozie jobs -kill -filter user=my_user

但这并没有停止该过程,作业仍然存在并且正在复制自己。

需要帮助吗?

最佳答案

重新启动集群。 Oozie作业将不会重新启动。

关于hadoop - 无法阻止Oozie逻辑炸弹,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/53812043/

26 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com