gpt4 book ai didi

co.cask.cdap.api.workflow.WorkflowSpecification类的使用及代码示例

转载 作者:知者 更新时间:2024-03-26 04:51:05 27 4
gpt4 key购买 nike

本文整理了Java中co.cask.cdap.api.workflow.WorkflowSpecification类的一些代码示例,展示了WorkflowSpecification类的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。WorkflowSpecification类的具体详情如下:
包路径:co.cask.cdap.api.workflow.WorkflowSpecification
类名称:WorkflowSpecification

WorkflowSpecification介绍

[英]Specification for a Workflow
[中]工作流规范

代码示例

代码示例来源:origin: caskdata/cdap

@Override
 public WorkflowSpecification deserialize(JsonElement json, Type typeOfT,
                      JsonDeserializationContext context) throws JsonParseException {
  JsonObject jsonObj = json.getAsJsonObject();

  String className = jsonObj.get("className").getAsString();
  String name = jsonObj.get("name").getAsString();
  String description = jsonObj.get("description").getAsString();
  Map<String, Plugin> plugins = deserializeMap(jsonObj.get("plugins"), context, Plugin.class);
  Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
  List<WorkflowNode> nodes = deserializeList(jsonObj.get("nodes"), context, WorkflowNode.class);
  Map<String, DatasetCreationSpec> localDatasetSpec = deserializeMap(jsonObj.get("localDatasetSpecs"), context,
                                    DatasetCreationSpec.class);

  return new WorkflowSpecification(className, name, description, properties, nodes, localDatasetSpec, plugins);
 }
}

代码示例来源:origin: co.cask.cdap/cdap-proto

@Override
public JsonElement serialize(WorkflowSpecification src, Type typeOfSrc, JsonSerializationContext context) {
 JsonObject jsonObj = new JsonObject();
 jsonObj.add("className", new JsonPrimitive(src.getClassName()));
 jsonObj.add("name", new JsonPrimitive(src.getName()));
 jsonObj.add("description", new JsonPrimitive(src.getDescription()));
 jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class));
 jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class));
 jsonObj.add("nodes", serializeList(src.getNodes(), context, WorkflowNode.class));
 jsonObj.add("localDatasetSpecs", serializeMap(src.getLocalDatasetSpecs(), context, DatasetCreationSpec.class));
 return jsonObj;
}

代码示例来源:origin: cdapio/cdap

private Iterable<String> getWorkflowNodes() {
  if (ProgramType.WORKFLOW != programId.getType()) {
   return ImmutableSet.of();
  }
  Preconditions.checkArgument(programSpec instanceof WorkflowSpecification,
                "Expected programSpec %s to be of type WorkflowSpecification", programSpec);
  WorkflowSpecification workflowSpec = (WorkflowSpecification) this.programSpec;
  Set<String> workflowNodeNames = new HashSet<>();
  for (Map.Entry<String, WorkflowNode> entry : workflowSpec.getNodeIdMap().entrySet()) {
   WorkflowNode workflowNode = entry.getValue();
   WorkflowNodeType type = workflowNode.getType();
   // Fork nodes have integers as node ids. Ignore them in system metadata.
   if (WorkflowNodeType.FORK == type) {
    continue;
   }
   workflowNodeNames.add(entry.getKey());
  }
  return workflowNodeNames;
 }
}

代码示例来源:origin: cdapio/cdap

@Override
 public String toString() {
  return "WorkflowSpecification{" +
   "className='" + getClassName() + '\'' +
   ", name='" + getName() + '\'' +
   ", description='" + getDescription() + '\'' +
   ", plugins=" + getPlugins() +
   ", properties=" + properties +
   ", nodes=" + nodes +
   ", nodeIdMap=" + nodeIdMap +
   ", localDatasetSpecs=" + localDatasetSpecs +
   '}';
 }
}

代码示例来源:origin: cdapio/cdap

@Override
protected void run() throws Exception {
 LOG.info("Starting workflow execution for '{}' with Run id '{}'", workflowSpec.getName(), workflowRunId.getRun());
 LOG.trace("Workflow specification is {}", workflowSpec);
 workflowContext.setState(new ProgramState(ProgramStatus.RUNNING, null));
 executeAll(workflowSpec.getNodes().iterator(), program.getApplicationSpecification(),
       new InstantiatorFactory(false), program.getClassLoader(), basicWorkflowToken);
 if (runningThread != null) {
  workflowContext.setState(new ProgramState(ProgramStatus.COMPLETED, null));
 }
 LOG.info("Workflow '{}' with run id '{}' completed", workflowSpec.getName(), workflowRunId.getRun());
}

代码示例来源:origin: cdapio/cdap

private void verifyWorkflowNodeList(ApplicationSpecification appSpec, WorkflowSpecification workflowSpec,
                  List<WorkflowNode> nodeList, Set<String> existingNodeNames) {
 for (WorkflowNode n : nodeList) {
  if (existingNodeNames.contains(n.getNodeId())) {
   throw new RuntimeException(String.format("Node '%s' already exists in workflow '%s'.", n.getNodeId(),
                        workflowSpec.getName()));
  }
  existingNodeNames.add(n.getNodeId());
  verifyWorkflowNode(appSpec, workflowSpec, n, existingNodeNames);
 }
}

代码示例来源:origin: cdapio/cdap

SchedulableProgramType.SPARK);
Iterable<ScheduleProgramInfo> programInfos = spec.getNodeIdMap().values().stream()
 .filter(WorkflowActionNode.class::isInstance)
 .map(WorkflowActionNode.class::cast)
                     findDriverResources(spec.getNodes(), runnablesResources));
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), 1,
             runtimeArgs, defaultResources, 0);

代码示例来源:origin: co.cask.cdap/cdap-app-fabric

@DELETE
@Path("/apps/{app-id}/workflows/{workflow-id}/runs/{run-id}/localdatasets")
public void deleteWorkflowLocalDatasets(HttpRequest request, HttpResponder responder,
                   @PathParam("namespace-id") String namespaceId,
                   @PathParam("app-id") String applicationId,
                   @PathParam("workflow-id") String workflowId,
                   @PathParam("run-id") String runId) throws NotFoundException {
 WorkflowSpecification workflowSpec = getWorkflowSpecForValidRun(namespaceId, applicationId, workflowId, runId);
 Set<String> errorOnDelete = new HashSet<>();
 for (Map.Entry<String, DatasetCreationSpec> localDatasetEntry : workflowSpec.getLocalDatasetSpecs().entrySet()) {
  String mappedDatasetName = localDatasetEntry.getKey() + "." + runId;
  // try best to delete the local datasets.
  try {
   datasetFramework.deleteInstance(new DatasetId(namespaceId, mappedDatasetName));
  } catch (InstanceNotFoundException e) {
   // Dataset instance is already deleted. so its no-op.
  } catch (Throwable t) {
   errorOnDelete.add(mappedDatasetName);
   LOG.error("Failed to delete the Workflow local dataset {}. Reason - {}", mappedDatasetName, t.getMessage());
  }
 }
 if (errorOnDelete.isEmpty()) {
  responder.sendStatus(HttpResponseStatus.OK);
  return;
 }
 String errorMessage = "Failed to delete Workflow local datasets - " + Joiner.on(",").join(errorOnDelete);
 throw new RuntimeException(errorMessage);
}

代码示例来源:origin: cdapio/cdap

@SuppressWarnings("unchecked")
private Workflow initializeWorkflow() throws Exception {
 Class<?> clz = Class.forName(workflowSpec.getClassName(), true, program.getClassLoader());
 if (!Workflow.class.isAssignableFrom(clz)) {
  throw new IllegalStateException(String.format("%s is not Workflow.", clz));
 }
 Class<? extends Workflow> workflowClass = (Class<? extends Workflow>) clz;
 final Workflow workflow = new InstantiatorFactory(false).get(TypeToken.of(workflowClass)).create();
 // set metrics
 Reflections.visit(workflow, workflow.getClass(), new MetricsFieldSetter(workflowContext.getMetrics()));
 if (!(workflow instanceof ProgramLifecycle)) {
  return workflow;
 }
 final TransactionControl txControl =
  Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class,
                    workflow, "initialize", WorkflowContext.class);
 basicWorkflowToken.setCurrentNode(workflowSpec.getName());
 workflowContext.setState(new ProgramState(ProgramStatus.INITIALIZING, null));
 workflowContext.initializeProgram((ProgramLifecycle) workflow, txControl, false);
 workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken);
 return workflow;
}

代码示例来源:origin: cdapio/cdap

private void verifyWorkflowSpecifications(ApplicationSpecification appSpec, WorkflowSpecification workflowSpec) {
 Set<String> existingNodeNames = new HashSet<>();
 verifyWorkflowNodeList(appSpec, workflowSpec, workflowSpec.getNodes(), existingNodeNames);
}

代码示例来源:origin: cdapio/cdap

private void verifyWorkflowWithLocalDatasetSpecification(ApplicationSpecification appSpec) {
 WorkflowSpecification spec = appSpec.getWorkflows().get("WorkflowWithLocalDatasets");
 List<WorkflowNode> nodes = spec.getNodes();
 Assert.assertTrue(nodes.size() == 2);
                                      "SP1")));
 Map<String, DatasetCreationSpec> localDatasetSpecs = spec.getLocalDatasetSpecs();
 Assert.assertEquals(5, localDatasetSpecs.size());

代码示例来源:origin: co.cask.cdap/cdap-data-pipeline

workflowContext.getWorkflowSpecification().getProperty(Constants.CONNECTOR_DATASETS), STAGE_DATASET_MAP);

代码示例来源:origin: cdapio/cdap

private void verifyWorkflowFork(ApplicationSpecification appSpec, WorkflowSpecification workflowSpec,
                WorkflowNode node, Set<String> existingNodeNames) {
 WorkflowForkNode forkNode = (WorkflowForkNode) node;
 Preconditions.checkNotNull(forkNode.getBranches(), String.format("Fork is added in the Workflow '%s' without" +
                                   " any branches", workflowSpec.getName()));
 for (List<WorkflowNode> branch : forkNode.getBranches()) {
  verifyWorkflowNodeList(appSpec, workflowSpec, branch, existingNodeNames);
 }
}

代码示例来源:origin: co.cask.cdap/cdap-app-fabric

SchedulableProgramType.SPARK);
Iterable<ScheduleProgramInfo> programInfos = spec.getNodeIdMap().values().stream()
 .filter(WorkflowActionNode.class::isInstance)
 .map(WorkflowActionNode.class::cast)
                     findDriverResources(spec.getNodes(), runnablesResources));
launchConfig.addRunnable(spec.getName(), new WorkflowTwillRunnable(spec.getName()), 1,
             runtimeArgs, defaultResources, 0);

代码示例来源:origin: co.cask.cdap/cdap-app-fabric

@Override
protected void run() throws Exception {
 LOG.info("Starting workflow execution for '{}' with Run id '{}'", workflowSpec.getName(), workflowRunId.getRun());
 LOG.trace("Workflow specification is {}", workflowSpec);
 workflowContext.setState(new ProgramState(ProgramStatus.RUNNING, null));
 executeAll(workflowSpec.getNodes().iterator(), program.getApplicationSpecification(),
       new InstantiatorFactory(false), program.getClassLoader(), basicWorkflowToken);
 if (runningThread != null) {
  workflowContext.setState(new ProgramState(ProgramStatus.COMPLETED, null));
 }
 LOG.info("Workflow '{}' with run id '{}' completed", workflowSpec.getName(), workflowRunId.getRun());
}

代码示例来源:origin: cdapio/cdap

@DELETE
@Path("/apps/{app-id}/workflows/{workflow-id}/runs/{run-id}/localdatasets")
public void deleteWorkflowLocalDatasets(HttpRequest request, HttpResponder responder,
                   @PathParam("namespace-id") String namespaceId,
                   @PathParam("app-id") String applicationId,
                   @PathParam("workflow-id") String workflowId,
                   @PathParam("run-id") String runId) throws NotFoundException {
 WorkflowSpecification workflowSpec = getWorkflowSpecForValidRun(namespaceId, applicationId, workflowId, runId);
 Set<String> errorOnDelete = new HashSet<>();
 for (Map.Entry<String, DatasetCreationSpec> localDatasetEntry : workflowSpec.getLocalDatasetSpecs().entrySet()) {
  String mappedDatasetName = localDatasetEntry.getKey() + "." + runId;
  // try best to delete the local datasets.
  try {
   datasetFramework.deleteInstance(new DatasetId(namespaceId, mappedDatasetName));
  } catch (InstanceNotFoundException e) {
   // Dataset instance is already deleted. so its no-op.
  } catch (Throwable t) {
   errorOnDelete.add(mappedDatasetName);
   LOG.error("Failed to delete the Workflow local dataset {}. Reason - {}", mappedDatasetName, t.getMessage());
  }
 }
 if (errorOnDelete.isEmpty()) {
  responder.sendStatus(HttpResponseStatus.OK);
  return;
 }
 String errorMessage = "Failed to delete Workflow local datasets - " + Joiner.on(",").join(errorOnDelete);
 throw new RuntimeException(errorMessage);
}

代码示例来源:origin: co.cask.cdap/cdap-app-fabric

@SuppressWarnings("unchecked")
private Workflow initializeWorkflow() throws Exception {
 Class<?> clz = Class.forName(workflowSpec.getClassName(), true, program.getClassLoader());
 if (!Workflow.class.isAssignableFrom(clz)) {
  throw new IllegalStateException(String.format("%s is not Workflow.", clz));
 }
 Class<? extends Workflow> workflowClass = (Class<? extends Workflow>) clz;
 final Workflow workflow = new InstantiatorFactory(false).get(TypeToken.of(workflowClass)).create();
 // set metrics
 Reflections.visit(workflow, workflow.getClass(), new MetricsFieldSetter(workflowContext.getMetrics()));
 if (!(workflow instanceof ProgramLifecycle)) {
  return workflow;
 }
 final TransactionControl txControl =
  Transactions.getTransactionControl(workflowContext.getDefaultTxControl(), Workflow.class,
                    workflow, "initialize", WorkflowContext.class);
 basicWorkflowToken.setCurrentNode(workflowSpec.getName());
 workflowContext.setState(new ProgramState(ProgramStatus.INITIALIZING, null));
 workflowContext.initializeProgram((ProgramLifecycle) workflow, txControl, false);
 workflowStateWriter.setWorkflowToken(workflowRunId, basicWorkflowToken);
 return workflow;
}

代码示例来源:origin: co.cask.cdap/cdap-app-fabric

private void verifyWorkflowSpecifications(ApplicationSpecification appSpec, WorkflowSpecification workflowSpec) {
 Set<String> existingNodeNames = new HashSet<>();
 verifyWorkflowNodeList(appSpec, workflowSpec, workflowSpec.getNodes(), existingNodeNames);
}

代码示例来源:origin: co.cask.cdap/cdap-etl-batch

@TransactionPolicy(TransactionControl.EXPLICIT)
@Override
public void initialize(WorkflowContext context) throws Exception {
 super.initialize(context);
 postActions = new LinkedHashMap<>();
 BatchPipelineSpec batchPipelineSpec =
  GSON.fromJson(context.getWorkflowSpecification().getProperty("pipeline.spec"), BatchPipelineSpec.class);
 MacroEvaluator macroEvaluator = new DefaultMacroEvaluator(new BasicArguments(context.getToken(),
                                        context.getRuntimeArguments()),
                              context.getLogicalStartTime(), context,
                              context.getNamespace());
 postActionSpecs = new HashMap<>();
 for (ActionSpec actionSpec : batchPipelineSpec.getEndingActions()) {
  String name = actionSpec.getName();
  postActions.put(name, (PostAction) context.newPluginInstance(name, macroEvaluator));
  postActionSpecs.put(name, StageSpec.builder(name, actionSpec.getPluginSpec())
   .setProcessTimingEnabled(batchPipelineSpec.isProcessTimingEnabled())
   .setStageLoggingEnabled(batchPipelineSpec.isStageLoggingEnabled())
   .build());
 }
}

代码示例来源:origin: caskdata/cdap

@Override
public JsonElement serialize(WorkflowSpecification src, Type typeOfSrc, JsonSerializationContext context) {
 JsonObject jsonObj = new JsonObject();
 jsonObj.add("className", new JsonPrimitive(src.getClassName()));
 jsonObj.add("name", new JsonPrimitive(src.getName()));
 jsonObj.add("description", new JsonPrimitive(src.getDescription()));
 jsonObj.add("plugins", serializeMap(src.getPlugins(), context, Plugin.class));
 jsonObj.add("properties", serializeMap(src.getProperties(), context, String.class));
 jsonObj.add("nodes", serializeList(src.getNodes(), context, WorkflowNode.class));
 jsonObj.add("localDatasetSpecs", serializeMap(src.getLocalDatasetSpecs(), context, DatasetCreationSpec.class));
 return jsonObj;
}

27 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com