Mit Sammlungen den Überblick behalten
Sie können Inhalte basierend auf Ihren Einstellungen speichern und kategorisieren.
Im Gegensatz zu standardmäßigen Workflows, die eine zuvor erstellte Workflow-Vorlagenressource instanziieren, verwenden Inline-Workflows eine YAML-Datei oder eine eingebettete WorkflowTemplate-Definition zum Ausführen eines Workflows.
zoneUri: Geben Sie eine zone innerhalb der Region des Clusters an, z. B. "us-central1-b" oder lassen Sie ("") leer, um die Auto Zone-Platzierung von Dataproc zu verwenden.
clusterName: Clustername
HTTP-Methode und URL:
POST https://dataproc.googleapis.com/v1/projects/project-id/regions/region/workflowTemplates:instantiateInline
Das Erstellen von Inline-Workflows wird in der Google Cloud Console derzeit nicht unterstützt. Workflow-Vorlagen und instanziierte Workflows können auf der Dataproc-Seite Workflows aufgerufen werden.
import("context"
"fmt"
"io"
dataproc"cloud.google.com/go/dataproc/apiv1"
"google.golang.org/api/option"
dataprocpb"google.golang.org/genproto/googleapis/cloud/dataproc/v1"
)funcinstantiateInlineWorkflowTemplate(wio.Writer,projectID,regionstring)error{// projectID := "your-project-id"
// region := "us-central1"
ctx:=context.Background()// Create the cluster client.endpoint:=region+"-dataproc.googleapis.com:443"
workflowTemplateClient,err:=dataproc.NewWorkflowTemplateClient(ctx,option.WithEndpoint(endpoint))iferr!=nil{returnfmt.Errorf("dataproc.NewWorkflowTemplateClient:%v",err)}deferworkflowTemplateClient.Close()// Create jobs for the workflow.teragenJob:=&dataprocpb.OrderedJob{JobType:&dataprocpb.OrderedJob_HadoopJob{HadoopJob:&dataprocpb.HadoopJob{Driver:&dataprocpb.HadoopJob_MainJarFileUri{MainJarFileUri:"file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar",},Args:[]string{"teragen","1000","hdfs:///gen/",},},},StepId:"teragen",}terasortJob:=&dataprocpb.OrderedJob{JobType:&dataprocpb.OrderedJob_HadoopJob{HadoopJob:&dataprocpb.HadoopJob{Driver:&dataprocpb.HadoopJob_MainJarFileUri{MainJarFileUri:"file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar",},Args:[]string{"terasort","hdfs:///gen/","hdfs:///sort/",},},},StepId:"terasort",PrerequisiteStepIds:[]string{"teragen",},}// Create the cluster placement.clusterPlacement:=&dataprocpb.WorkflowTemplatePlacement{Placement:&dataprocpb.WorkflowTemplatePlacement_ManagedCluster{ManagedCluster:&dataprocpb.ManagedCluster{ClusterName:"my-managed-cluster",Config:&dataprocpb.ClusterConfig{GceClusterConfig:&dataprocpb.GceClusterConfig{// Leave "ZoneUri" empty for "Auto Zone Placement"
// ZoneUri: ""
ZoneUri:"us-central1-a",},},},},}// Create the Instantiate Inline Workflow Template Request.req:=&dataprocpb.InstantiateInlineWorkflowTemplateRequest{Parent:fmt.Sprintf("projects/%s/regions/%s",projectID,region),Template:&dataprocpb.WorkflowTemplate{Jobs:[]*dataprocpb.OrderedJob{teragenJob,terasortJob,},Placement:clusterPlacement,},}// Create the cluster.op,err:=workflowTemplateClient.InstantiateInlineWorkflowTemplate(ctx,req)iferr!=nil{returnfmt.Errorf("InstantiateInlineWorkflowTemplate:%v",err)}iferr:=op.Wait(ctx);err!=nil{returnfmt.Errorf("InstantiateInlineWorkflowTemplate.Wait:%v",err)}// Output a success message.fmt.Fprintf(w,"Workflowcreatedsuccessfully.")returnnil}
importcom.google.api.gax.longrunning.OperationFuture;importcom.google.cloud.dataproc.v1.ClusterConfig;importcom.google.cloud.dataproc.v1.GceClusterConfig;importcom.google.cloud.dataproc.v1.HadoopJob;importcom.google.cloud.dataproc.v1.ManagedCluster;importcom.google.cloud.dataproc.v1.OrderedJob;importcom.google.cloud.dataproc.v1.RegionName;importcom.google.cloud.dataproc.v1.WorkflowMetadata;importcom.google.cloud.dataproc.v1.WorkflowTemplate;importcom.google.cloud.dataproc.v1.WorkflowTemplatePlacement;importcom.google.cloud.dataproc.v1.WorkflowTemplateServiceClient;importcom.google.cloud.dataproc.v1.WorkflowTemplateServiceSettings;importcom.google.protobuf.Empty;importjava.io.IOException;importjava.util.concurrent.ExecutionException;publicclassInstantiateInlineWorkflowTemplate{publicstaticvoidinstantiateInlineWorkflowTemplate()throwsIOException,InterruptedException{// TODO(developer): Replace these variables before running the sample.StringprojectId="your-project-id";Stringregion="your-project-region";instantiateInlineWorkflowTemplate(projectId,region);}publicstaticvoidinstantiateInlineWorkflowTemplate(StringprojectId,Stringregion)throwsIOException,InterruptedException{StringmyEndpoint=String.format("%s-dataproc.googleapis.com:443",region);// Configure the settings for the workflow template service client.WorkflowTemplateServiceSettingsworkflowTemplateServiceSettings=WorkflowTemplateServiceSettings.newBuilder().setEndpoint(myEndpoint).build();// Create a workflow template service client with the configured settings. The client only// needs to be created once and can be reused for multiple requests. Using a try-with-resources// closes the client, but this can also be done manually with the .close() method.try(WorkflowTemplateServiceClientworkflowTemplateServiceClient=WorkflowTemplateServiceClient.create(workflowTemplateServiceSettings)){// Configure the jobs within the workflow.HadoopJobteragenHadoopJob=HadoopJob.newBuilder().setMainJarFileUri("file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar").addArgs("teragen").addArgs("1000").addArgs("hdfs:///gen/").build();OrderedJobteragen=OrderedJob.newBuilder().setHadoopJob(teragenHadoopJob).setStepId("teragen").build();HadoopJobterasortHadoopJob=HadoopJob.newBuilder().setMainJarFileUri("file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar").addArgs("terasort").addArgs("hdfs:///gen/").addArgs("hdfs:///sort/").build();OrderedJobterasort=OrderedJob.newBuilder().setHadoopJob(terasortHadoopJob).addPrerequisiteStepIds("teragen").setStepId("terasort").build();// Configure the cluster placement for the workflow.// Leave "ZoneUri" empty for "Auto Zone Placement".// GceClusterConfig gceClusterConfig =// GceClusterConfig.newBuilder().setZoneUri("").build();GceClusterConfiggceClusterConfig=GceClusterConfig.newBuilder().setZoneUri("us-central1-a").build();ClusterConfigclusterConfig=ClusterConfig.newBuilder().setGceClusterConfig(gceClusterConfig).build();ManagedClustermanagedCluster=ManagedCluster.newBuilder().setClusterName("my-managed-cluster").setConfig(clusterConfig).build();WorkflowTemplatePlacementworkflowTemplatePlacement=WorkflowTemplatePlacement.newBuilder().setManagedCluster(managedCluster).build();// Create the inline workflow template.WorkflowTemplateworkflowTemplate=WorkflowTemplate.newBuilder().addJobs(teragen).addJobs(terasort).setPlacement(workflowTemplatePlacement).build();// Submit the instantiated inline workflow template request.Stringparent=RegionName.format(projectId,region);OperationFuture<Empty,WorkflowMetadata>instantiateInlineWorkflowTemplateAsync=workflowTemplateServiceClient.instantiateInlineWorkflowTemplateAsync(parent,workflowTemplate);instantiateInlineWorkflowTemplateAsync.get();// Print out a success message.System.out.printf("Workflowransuccessfully.");}catch(ExecutionExceptione){System.err.println(String.format("Errorrunningworkflow:%s",e.getMessage()));}}}
constdataproc=require('@google-cloud/dataproc');// TODO(developer): Uncomment and set the following variables// projectId = 'YOUR_PROJECT_ID'
// region = 'YOUR_REGION'
// Create a client with the endpoint set to the desired regionconstclient=newdataproc.v1.WorkflowTemplateServiceClient({apiEndpoint:`${region}-dataproc.googleapis.com`,projectId:projectId,});asyncfunctioninstantiateInlineWorkflowTemplate(){// Create the formatted parent.constparent=client.regionPath(projectId,region);// Create the templateconsttemplate={jobs:[{hadoopJob:{mainJarFileUri:'file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar',args:['teragen','1000','hdfs:///gen/'],},stepId:'teragen',},{hadoopJob:{mainJarFileUri:'file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar',args:['terasort','hdfs:///gen/', 'hdfs:///sort/'],},stepId:'terasort',prerequisiteStepIds:['teragen'],},],placement:{managedCluster:{clusterName:'my-managed-cluster',config:{gceClusterConfig:{// Leave 'zoneUri' empty for 'Auto Zone Placement'
// zoneUri: ''
zoneUri:'us-central1-a',},},},},};constrequest={parent:parent,template:template,};// Submit the request to instantiate the workflow from an inline template.const[operation]=awaitclient.instantiateInlineWorkflowTemplate(request);awaitoperation.promise();// Output a success messageconsole.log('Workflowransuccessfully.');