gpt4 book ai didi

org.apache.helix.task.WorkflowConfig.getJobDag()方法的使用及代码示例

转载 作者:知者 更新时间:2024-03-24 02:17:05 24 4
gpt4 key购买 nike

本文整理了Java中org.apache.helix.task.WorkflowConfig.getJobDag()方法的一些代码示例,展示了WorkflowConfig.getJobDag()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。WorkflowConfig.getJobDag()方法的具体详情如下:
包路径:org.apache.helix.task.WorkflowConfig
类名称:WorkflowConfig
方法名:getJobDag

WorkflowConfig.getJobDag介绍

暂无

代码示例

代码示例来源:origin: org.apache.helix/helix-core

private void removeWorkflowFromZK(String workflow) {
 Set<String> jobSet = new HashSet<>();
 // Note that even WorkflowConfig is null, if WorkflowContext exists, still need to remove workflow
 WorkflowConfig wCfg = TaskUtil.getWorkflowConfig(_accessor, workflow);
 if (wCfg != null) {
  jobSet.addAll(wCfg.getJobDag().getAllNodes());
 }
 boolean success = TaskUtil.removeWorkflow(_accessor, _propertyStore, workflow, jobSet);
 if (!success) {
  LOG.info("Failed to delete the workflow " + workflow);
  throw new HelixException("Failed to delete the workflow " + workflow);
 }
}

代码示例来源:origin: apache/helix

private void removeWorkflowFromZK(String workflow) {
 Set<String> jobSet = new HashSet<>();
 // Note that even WorkflowConfig is null, if WorkflowContext exists, still need to remove workflow
 WorkflowConfig wCfg = TaskUtil.getWorkflowConfig(_accessor, workflow);
 if (wCfg != null) {
  jobSet.addAll(wCfg.getJobDag().getAllNodes());
 }
 boolean success = TaskUtil.removeWorkflow(_accessor, _propertyStore, workflow, jobSet);
 if (!success) {
  LOG.info("Failed to delete the workflow " + workflow);
  throw new HelixException("Failed to delete the workflow " + workflow);
 }
}

代码示例来源:origin: org.apache.helix/helix-core

/**
 * Count the number of jobs in a workflow that are not in final state.
 * @param workflowCfg
 * @param workflowCtx
 * @return
 */
public static int getInCompleteJobCount(WorkflowConfig workflowCfg, WorkflowContext workflowCtx) {
 int inCompleteCount = 0;
 for (String jobName : workflowCfg.getJobDag().getAllNodes()) {
  TaskState jobState = workflowCtx.getJobState(jobName);
  if (jobState == TaskState.IN_PROGRESS || jobState == TaskState.STOPPED) {
   ++inCompleteCount;
  }
 }
 return inCompleteCount;
}

代码示例来源:origin: apache/helix

/**
 * Count the number of jobs in a workflow that are not in final state.
 * @param workflowCfg
 * @param workflowCtx
 * @return
 */
public static int getInCompleteJobCount(WorkflowConfig workflowCfg, WorkflowContext workflowCtx) {
 int inCompleteCount = 0;
 for (String jobName : workflowCfg.getJobDag().getAllNodes()) {
  TaskState jobState = workflowCtx.getJobState(jobName);
  if (jobState == TaskState.IN_PROGRESS || jobState == TaskState.STOPPED
    || jobState == TaskState.STOPPING) {
   ++inCompleteCount;
  }
 }
 return inCompleteCount;
}

代码示例来源:origin: apache/helix

/**
 * Checks if the workflow has been stopped.
 * @param ctx Workflow context containing task states
 * @param cfg Workflow config containing set of tasks
 * @return returns true if all tasks are {@link TaskState#STOPPED}, false otherwise.
 */
private static boolean isWorkflowStopped(WorkflowContext ctx, WorkflowConfig cfg) {
 for (String job : cfg.getJobDag().getAllNodes()) {
  if (ctx.getJobState(job) != TaskState.STOPPED && ctx.getJobState(job) != null) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: org.apache.helix/helix-core

/**
 * Checks if the workflow has been stopped.
 * @param ctx Workflow context containing task states
 * @param cfg Workflow config containing set of tasks
 * @return returns true if all tasks are {@link TaskState#STOPPED}, false otherwise.
 */
private static boolean isWorkflowStopped(WorkflowContext ctx, WorkflowConfig cfg) {
 for (String job : cfg.getJobDag().getAllNodes()) {
  if (ctx.getJobState(job) != TaskState.STOPPED && ctx.getJobState(job) != null) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: org.apache.helix/helix-core

private Set<String> getInstancesAssignedToOtherJobs(String currentJobName,
  WorkflowConfig workflowCfg, ClusterDataCache cache) {
 Set<String> ret = new HashSet<String>();
 for (String jobName : workflowCfg.getJobDag().getAllNodes()) {
  if (jobName.equals(currentJobName)) {
   continue;
  }
  JobContext jobContext = cache.getJobContext(jobName);
  if (jobContext == null) {
   continue;
  }
  for (int partition : jobContext.getPartitionSet()) {
   TaskPartitionState partitionState = jobContext.getPartitionState(partition);
   if (partitionState == TaskPartitionState.INIT ||
     partitionState == TaskPartitionState.RUNNING) {
    ret.add(jobContext.getAssignedParticipant(partition));
   }
  }
 }
 return ret;
}

代码示例来源:origin: apache/helix

private Set<String> getInstancesAssignedToOtherJobs(String currentJobName,
  WorkflowConfig workflowCfg, ClusterDataCache cache) {
 Set<String> ret = new HashSet<String>();
 for (String jobName : workflowCfg.getJobDag().getAllNodes()) {
  if (jobName.equals(currentJobName)) {
   continue;
  }
  JobContext jobContext = cache.getJobContext(jobName);
  if (jobContext == null) {
   continue;
  }
  for (int partition : jobContext.getPartitionSet()) {
   TaskPartitionState partitionState = jobContext.getPartitionState(partition);
   if (partitionState == TaskPartitionState.INIT ||
     partitionState == TaskPartitionState.RUNNING) {
    ret.add(jobContext.getAssignedParticipant(partition));
   }
  }
 }
 return ret;
}

代码示例来源:origin: org.apache.helix/helix-core

/**
 * Checks if the workflow has completed.
 * @param ctx Workflow context containing job states
 * @param cfg Workflow config containing set of jobs
 * @return returns true if all tasks are {@link TaskState#COMPLETED}, false otherwise.
 */
private static boolean isWorkflowComplete(WorkflowContext ctx, WorkflowConfig cfg) {
 if (!cfg.isTerminable()) {
  return false;
 }
 for (String job : cfg.getJobDag().getAllNodes()) {
  if (ctx.getJobState(job) != TaskState.COMPLETED) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: apache/helix

/**
 * Checks if the workflow has completed.
 * @param ctx Workflow context containing job states
 * @param cfg Workflow config containing set of jobs
 * @return returns true if all tasks are {@link TaskState#COMPLETED}, false otherwise.
 */
private static boolean isWorkflowComplete(WorkflowContext ctx, WorkflowConfig cfg) {
 if (!cfg.isTerminable()) {
  return false;
 }
 for (String job : cfg.getJobDag().getAllNodes()) {
  if (ctx.getJobState(job) != TaskState.COMPLETED) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: apache/helix

private void verifyJobNotInQueue(String queueName, String namedSpacedJobName) {
  WorkflowConfig workflowCfg = _driver.getWorkflowConfig(queueName);
  JobDag dag = workflowCfg.getJobDag();
  Assert.assertFalse(dag.getAllNodes().contains(namedSpacedJobName));
  Assert.assertFalse(dag.getChildrenToParents().containsKey(namedSpacedJobName));
  Assert.assertFalse(dag.getParentsToChildren().containsKey(namedSpacedJobName));
 }
}

代码示例来源:origin: org.apache.helix/helix-core

/**
 * Checks if the workflow has been stopped.
 * @param ctx Workflow context containing task states
 * @param cfg Workflow config containing set of tasks
 * @return returns true if all tasks are {@link TaskState#STOPPED}, false otherwise.
 */
protected boolean isWorkflowStopped(WorkflowContext ctx, WorkflowConfig cfg) {
 for (String job : cfg.getJobDag().getAllNodes()) {
  TaskState jobState = ctx.getJobState(job);
  if (jobState != null
    && (jobState.equals(TaskState.IN_PROGRESS) || jobState.equals(TaskState.STOPPING))) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: apache/helix

/**
 * Checks if the workflow has been stopped.
 * @param ctx Workflow context containing task states
 * @param cfg Workflow config containing set of tasks
 * @return returns true if all tasks are {@link TaskState#STOPPED}, false otherwise.
 */
protected boolean isWorkflowStopped(WorkflowContext ctx, WorkflowConfig cfg) {
 for (String job : cfg.getJobDag().getAllNodes()) {
  TaskState jobState = ctx.getJobState(job);
  if (jobState != null
    && (jobState.equals(TaskState.IN_PROGRESS) || jobState.equals(TaskState.STOPPING))) {
   return false;
  }
 }
 return true;
}

代码示例来源:origin: apache/helix

@GET
public Response getJobs(@PathParam("clusterId") String clusterId,
  @PathParam("workflowName") String workflowName) {
 TaskDriver driver = getTaskDriver(clusterId);
 WorkflowConfig workflowConfig = driver.getWorkflowConfig(workflowName);
 ObjectNode root = JsonNodeFactory.instance.objectNode();
 if (workflowConfig == null) {
  return badRequest(String.format("Workflow %s is not found!", workflowName));
 }
 Set<String> jobs = workflowConfig.getJobDag().getAllNodes();
 root.put(Properties.id.name(), JobProperties.Jobs.name());
 ArrayNode jobsNode = root.putArray(JobProperties.Jobs.name());
 if (jobs != null) {
  jobsNode.addAll((ArrayNode) OBJECT_MAPPER.valueToTree(jobs));
 }
 return JSONRepresentation(root);
}

代码示例来源:origin: apache/helix

public WorkflowConfig(WorkflowConfig cfg, String workflowId) {
 this(workflowId, cfg.getJobDag(), cfg.getParallelJobs(), cfg.getTargetState(), cfg.getExpiry(),
   cfg.getFailureThreshold(), cfg.isTerminable(), cfg.getScheduleConfig(), cfg.getCapacity(),
   cfg.getWorkflowType(), cfg.isJobQueue(), cfg.getJobTypes(), cfg.getJobPurgeInterval(),
   cfg.isAllowOverlapJobAssignment(), cfg.getTimeout());
}

代码示例来源:origin: org.apache.helix/helix-core

public WorkflowConfig(WorkflowConfig cfg, String workflowId) {
 this(workflowId, cfg.getJobDag(), cfg.getParallelJobs(), cfg.getTargetState(), cfg.getExpiry(),
   cfg.getFailureThreshold(), cfg.isTerminable(), cfg.getScheduleConfig(), cfg.getCapacity(),
   cfg.getWorkflowType(), cfg.isJobQueue(), cfg.getJobTypes(), cfg.getJobPurgeInterval(),
   cfg.isAllowOverlapJobAssignment(), cfg.getTimeout());
}

代码示例来源:origin: org.apache.helix/helix-core

public Builder(WorkflowConfig workflowConfig) {
 _workflowId = workflowConfig.getWorkflowId();
 _taskDag = workflowConfig.getJobDag();
 _parallelJobs = workflowConfig.getParallelJobs();
 _targetState = workflowConfig.getTargetState();
 _expiry = workflowConfig.getExpiry();
 _isTerminable = workflowConfig.isTerminable();
 _scheduleConfig = workflowConfig.getScheduleConfig();
 _capacity = workflowConfig.getCapacity();
 _failureThreshold = workflowConfig.getFailureThreshold();
 _workflowType = workflowConfig.getWorkflowType();
 _isJobQueue = workflowConfig.isJobQueue();
 _jobTypes = workflowConfig.getJobTypes();
 _jobPurgeInterval = workflowConfig.getJobPurgeInterval();
 _allowOverlapJobAssignment = workflowConfig.isAllowOverlapJobAssignment();
 _timeout = workflowConfig.getTimeout();
}

代码示例来源:origin: apache/helix

public Builder(WorkflowConfig workflowConfig) {
 _workflowId = workflowConfig.getWorkflowId();
 _taskDag = workflowConfig.getJobDag();
 _parallelJobs = workflowConfig.getParallelJobs();
 _targetState = workflowConfig.getTargetState();
 _expiry = workflowConfig.getExpiry();
 _isTerminable = workflowConfig.isTerminable();
 _scheduleConfig = workflowConfig.getScheduleConfig();
 _capacity = workflowConfig.getCapacity();
 _failureThreshold = workflowConfig.getFailureThreshold();
 _workflowType = workflowConfig.getWorkflowType();
 _isJobQueue = workflowConfig.isJobQueue();
 _jobTypes = workflowConfig.getJobTypes();
 _jobPurgeInterval = workflowConfig.getJobPurgeInterval();
 _allowOverlapJobAssignment = workflowConfig.isAllowOverlapJobAssignment();
 _timeout = workflowConfig.getTimeout();
}

代码示例来源:origin: apache/helix

@Test
public void testJobQueueCleanUp() throws InterruptedException {
 String queueName = TestHelper.getTestMethodName();
 JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
 JobConfig.Builder jobBuilder =
   new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
     .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2)
     .setJobCommandConfigMap(ImmutableMap.of(MockTask.SUCCESS_COUNT_BEFORE_FAIL, "2"));
 for (int i = 0; i < 5; i++) {
  builder.enqueueJob("JOB" + i, jobBuilder);
 }
 _driver.start(builder.build());
 _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 4),
   TaskState.FAILED);
 _driver.cleanupQueue(queueName);
 Assert.assertEquals(_driver.getWorkflowConfig(queueName).getJobDag().size(), 0);
}

代码示例来源:origin: apache/helix

@Test public void testJobQueueNotCleanupRunningJobs() throws InterruptedException {
 String queueName = TestHelper.getTestMethodName();
 JobQueue.Builder builder = TaskTestUtil.buildJobQueue(queueName);
 JobConfig.Builder jobBuilder =
   new JobConfig.Builder().setTargetResource(WorkflowGenerator.DEFAULT_TGT_DB)
     .setCommand(MockTask.TASK_COMMAND).setMaxAttemptsPerTask(2);
 for (int i = 0; i < 3; i++) {
  builder.enqueueJob("JOB" + i, jobBuilder);
 }
 builder.enqueueJob("JOB" + 3,
   jobBuilder.setJobCommandConfigMap(ImmutableMap.of(MockTask.JOB_DELAY, "1000000")));
 builder.enqueueJob("JOB" + 4, jobBuilder);
 _driver.start(builder.build());
 _driver.pollForJobState(queueName, TaskUtil.getNamespacedJobName(queueName, "JOB" + 3),
   TaskState.IN_PROGRESS);
 _driver.cleanupQueue(queueName);
 Assert.assertEquals(_driver.getWorkflowConfig(queueName).getJobDag().size(), 2);
}

24 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com