From f3237b9e027a417a6f42bd1878d1029e6da2d44d Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 24 May 2022 09:57:07 +0000 Subject: [PATCH 1/2] fix: fixes for dynamic routing and streaming descriptors Use gapic-generator-typescript v2.14.5. PiperOrigin-RevId: 450616838 Source-Link: https://github.com/googleapis/googleapis/commit/7a47b72791e0b84d78beca4c2b26bec42ce31572 Source-Link: https://github.com/googleapis/googleapis-gen/commit/42cc6331bae0b99f61b8e01ae15b05211716c4f9 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDJjYzYzMzFiYWUwYjk5ZjYxYjhlMDFhZTE1YjA1MjExNzE2YzRmOSJ9 --- owl-bot-staging/v1/.eslintignore | 7 + owl-bot-staging/v1/.eslintrc.json | 3 + owl-bot-staging/v1/.gitignore | 14 + owl-bot-staging/v1/.jsdoc.js | 55 + owl-bot-staging/v1/.mocharc.js | 33 + owl-bot-staging/v1/.prettierrc.js | 22 + owl-bot-staging/v1/README.md | 1 + owl-bot-staging/v1/linkinator.config.json | 16 + owl-bot-staging/v1/package.json | 68 + .../dataproc/v1/autoscaling_policies.proto | 366 ++++ .../google/cloud/dataproc/v1/batches.proto | 372 ++++ .../google/cloud/dataproc/v1/clusters.proto | 1280 ++++++++++++ .../google/cloud/dataproc/v1/jobs.proto | 924 +++++++++ .../google/cloud/dataproc/v1/operations.proto | 118 ++ .../google/cloud/dataproc/v1/shared.proto | 341 ++++ .../dataproc/v1/workflow_templates.proto | 807 ++++++++ ...olicy_service.create_autoscaling_policy.js | 70 + ...olicy_service.delete_autoscaling_policy.js | 65 + ...g_policy_service.get_autoscaling_policy.js | 65 + ...olicy_service.list_autoscaling_policies.js | 77 + ...olicy_service.update_autoscaling_policy.js | 58 + .../v1/batch_controller.create_batch.js | 83 + .../v1/batch_controller.delete_batch.js | 58 + .../v1/batch_controller.get_batch.js | 58 + .../v1/batch_controller.list_batches.js | 71 + .../v1/cluster_controller.create_cluster.js | 86 + .../v1/cluster_controller.delete_cluster.js | 88 + .../v1/cluster_controller.diagnose_cluster.js | 70 + .../v1/cluster_controller.get_cluster.js | 69 + .../v1/cluster_controller.list_clusters.js | 92 + .../v1/cluster_controller.start_cluster.js | 88 + .../v1/cluster_controller.stop_cluster.js | 88 + .../v1/cluster_controller.update_cluster.js | 151 ++ .../generated/v1/job_controller.cancel_job.js | 69 + .../generated/v1/job_controller.delete_job.js | 69 + .../generated/v1/job_controller.get_job.js | 69 + .../generated/v1/job_controller.list_jobs.js | 99 + .../generated/v1/job_controller.submit_job.js | 82 + .../job_controller.submit_job_as_operation.js | 83 + .../generated/v1/job_controller.update_job.js | 84 + ...pet_metadata.google.cloud.dataproc.v1.json | 1535 ++++++++++++++ ...mplate_service.create_workflow_template.js | 70 + ...mplate_service.delete_workflow_template.js | 71 + ..._template_service.get_workflow_template.js | 71 + ...ce.instantiate_inline_workflow_template.js | 81 + ...e_service.instantiate_workflow_template.js | 89 + ...emplate_service.list_workflow_templates.js | 76 + ...mplate_service.update_workflow_template.js | 59 + owl-bot-staging/v1/src/index.ts | 33 + .../v1/autoscaling_policy_service_client.ts | 1163 +++++++++++ ...oscaling_policy_service_client_config.json | 51 + ...autoscaling_policy_service_proto_list.json | 9 + .../v1/src/v1/batch_controller_client.ts | 1100 +++++++++++ .../v1/batch_controller_client_config.json | 42 + .../src/v1/batch_controller_proto_list.json | 9 + .../v1/src/v1/cluster_controller_client.ts | 1755 +++++++++++++++++ .../v1/cluster_controller_client_config.json | 72 + .../src/v1/cluster_controller_proto_list.json | 9 + owl-bot-staging/v1/src/v1/gapic_metadata.json | 409 ++++ owl-bot-staging/v1/src/v1/index.ts | 23 + .../v1/src/v1/job_controller_client.ts | 1382 +++++++++++++ .../src/v1/job_controller_client_config.json | 69 + .../v1/src/v1/job_controller_proto_list.json | 9 + .../v1/workflow_template_service_client.ts | 1478 ++++++++++++++ ...rkflow_template_service_client_config.json | 69 + .../workflow_template_service_proto_list.json | 9 + .../system-test/fixtures/sample/src/index.js | 31 + .../system-test/fixtures/sample/src/index.ts | 56 + owl-bot-staging/v1/system-test/install.ts | 49 + .../gapic_autoscaling_policy_service_v1.ts | 1098 +++++++++++ .../v1/test/gapic_batch_controller_v1.ts | 1060 ++++++++++ .../v1/test/gapic_cluster_controller_v1.ts | 1720 ++++++++++++++++ .../v1/test/gapic_job_controller_v1.ts | 1330 +++++++++++++ .../gapic_workflow_template_service_v1.ts | 1402 +++++++++++++ owl-bot-staging/v1/tsconfig.json | 19 + owl-bot-staging/v1/webpack.config.js | 64 + 76 files changed, 22891 insertions(+) create mode 100644 owl-bot-staging/v1/.eslintignore create mode 100644 owl-bot-staging/v1/.eslintrc.json create mode 100644 owl-bot-staging/v1/.gitignore create mode 100644 owl-bot-staging/v1/.jsdoc.js create mode 100644 owl-bot-staging/v1/.mocharc.js create mode 100644 owl-bot-staging/v1/.prettierrc.js create mode 100644 owl-bot-staging/v1/README.md create mode 100644 owl-bot-staging/v1/linkinator.config.json create mode 100644 owl-bot-staging/v1/package.json create mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto create mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto create mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto create mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto create mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto create mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto create mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto create mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json create mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js create mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js create mode 100644 owl-bot-staging/v1/src/index.ts create mode 100644 owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts create mode 100644 owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json create mode 100644 owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json create mode 100644 owl-bot-staging/v1/src/v1/batch_controller_client.ts create mode 100644 owl-bot-staging/v1/src/v1/batch_controller_client_config.json create mode 100644 owl-bot-staging/v1/src/v1/batch_controller_proto_list.json create mode 100644 owl-bot-staging/v1/src/v1/cluster_controller_client.ts create mode 100644 owl-bot-staging/v1/src/v1/cluster_controller_client_config.json create mode 100644 owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json create mode 100644 owl-bot-staging/v1/src/v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/src/v1/index.ts create mode 100644 owl-bot-staging/v1/src/v1/job_controller_client.ts create mode 100644 owl-bot-staging/v1/src/v1/job_controller_client_config.json create mode 100644 owl-bot-staging/v1/src/v1/job_controller_proto_list.json create mode 100644 owl-bot-staging/v1/src/v1/workflow_template_service_client.ts create mode 100644 owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json create mode 100644 owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json create mode 100644 owl-bot-staging/v1/system-test/fixtures/sample/src/index.js create mode 100644 owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts create mode 100644 owl-bot-staging/v1/system-test/install.ts create mode 100644 owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts create mode 100644 owl-bot-staging/v1/test/gapic_batch_controller_v1.ts create mode 100644 owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts create mode 100644 owl-bot-staging/v1/test/gapic_job_controller_v1.ts create mode 100644 owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts create mode 100644 owl-bot-staging/v1/tsconfig.json create mode 100644 owl-bot-staging/v1/webpack.config.js diff --git a/owl-bot-staging/v1/.eslintignore b/owl-bot-staging/v1/.eslintignore new file mode 100644 index 00000000..cfc348ec --- /dev/null +++ b/owl-bot-staging/v1/.eslintignore @@ -0,0 +1,7 @@ +**/node_modules +**/.coverage +build/ +docs/ +protos/ +system-test/ +samples/generated/ diff --git a/owl-bot-staging/v1/.eslintrc.json b/owl-bot-staging/v1/.eslintrc.json new file mode 100644 index 00000000..78215349 --- /dev/null +++ b/owl-bot-staging/v1/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "./node_modules/gts" +} diff --git a/owl-bot-staging/v1/.gitignore b/owl-bot-staging/v1/.gitignore new file mode 100644 index 00000000..5d32b237 --- /dev/null +++ b/owl-bot-staging/v1/.gitignore @@ -0,0 +1,14 @@ +**/*.log +**/node_modules +.coverage +coverage +.nyc_output +docs/ +out/ +build/ +system-test/secrets.js +system-test/*key.json +*.lock +.DS_Store +package-lock.json +__pycache__ diff --git a/owl-bot-staging/v1/.jsdoc.js b/owl-bot-staging/v1/.jsdoc.js new file mode 100644 index 00000000..2fa0c393 --- /dev/null +++ b/owl-bot-staging/v1/.jsdoc.js @@ -0,0 +1,55 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +'use strict'; + +module.exports = { + opts: { + readme: './README.md', + package: './package.json', + template: './node_modules/jsdoc-fresh', + recurse: true, + verbose: true, + destination: './docs/' + }, + plugins: [ + 'plugins/markdown', + 'jsdoc-region-tag' + ], + source: { + excludePattern: '(^|\\/|\\\\)[._]', + include: [ + 'build/src', + 'protos' + ], + includePattern: '\\.js$' + }, + templates: { + copyright: 'Copyright 2022 Google LLC', + includeDate: false, + sourceFiles: false, + systemName: '@google-cloud/dataproc', + theme: 'lumen', + default: { + outputSourceFiles: false + } + }, + markdown: { + idInHeadings: true + } +}; diff --git a/owl-bot-staging/v1/.mocharc.js b/owl-bot-staging/v1/.mocharc.js new file mode 100644 index 00000000..481c522b --- /dev/null +++ b/owl-bot-staging/v1/.mocharc.js @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +const config = { + "enable-source-maps": true, + "throw-deprecation": true, + "timeout": 10000 +} +if (process.env.MOCHA_THROW_DEPRECATION === 'false') { + delete config['throw-deprecation']; +} +if (process.env.MOCHA_REPORTER) { + config.reporter = process.env.MOCHA_REPORTER; +} +if (process.env.MOCHA_REPORTER_OUTPUT) { + config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; +} +module.exports = config diff --git a/owl-bot-staging/v1/.prettierrc.js b/owl-bot-staging/v1/.prettierrc.js new file mode 100644 index 00000000..494e1478 --- /dev/null +++ b/owl-bot-staging/v1/.prettierrc.js @@ -0,0 +1,22 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +module.exports = { + ...require('gts/.prettierrc.json') +} diff --git a/owl-bot-staging/v1/README.md b/owl-bot-staging/v1/README.md new file mode 100644 index 00000000..3e8aa9df --- /dev/null +++ b/owl-bot-staging/v1/README.md @@ -0,0 +1 @@ +Dataproc: Nodejs Client diff --git a/owl-bot-staging/v1/linkinator.config.json b/owl-bot-staging/v1/linkinator.config.json new file mode 100644 index 00000000..befd23c8 --- /dev/null +++ b/owl-bot-staging/v1/linkinator.config.json @@ -0,0 +1,16 @@ +{ + "recurse": true, + "skip": [ + "https://codecov.io/gh/googleapis/", + "www.googleapis.com", + "img.shields.io", + "https://console.cloud.google.com/cloudshell", + "https://support.google.com" + ], + "silent": true, + "concurrency": 5, + "retry": true, + "retryErrors": true, + "retryErrorsCount": 5, + "retryErrorsJitter": 3000 +} diff --git a/owl-bot-staging/v1/package.json b/owl-bot-staging/v1/package.json new file mode 100644 index 00000000..5ab1b932 --- /dev/null +++ b/owl-bot-staging/v1/package.json @@ -0,0 +1,68 @@ +{ + "name": "@google-cloud/dataproc", + "version": "0.1.0", + "description": "Dataproc client for Node.js", + "repository": "googleapis/nodejs-dataproc", + "license": "Apache-2.0", + "author": "Google LLC", + "main": "build/src/index.js", + "files": [ + "build/src", + "build/protos" + ], + "keywords": [ + "google apis client", + "google api client", + "google apis", + "google api", + "google", + "google cloud platform", + "google cloud", + "cloud", + "google dataproc", + "dataproc", + "autoscaling policy service", + "batch controller", + "cluster controller", + "job controller", + "workflow template service" + ], + "scripts": { + "clean": "gts clean", + "compile": "tsc -p . && cp -r protos build/", + "compile-protos": "compileProtos src", + "docs": "jsdoc -c .jsdoc.js", + "predocs-test": "npm run docs", + "docs-test": "linkinator docs", + "fix": "gts fix", + "lint": "gts check", + "prepare": "npm run compile-protos && npm run compile", + "system-test": "c8 mocha build/system-test", + "test": "c8 mocha build/test" + }, + "dependencies": { + "google-gax": "^2.29.4" + }, + "devDependencies": { + "@types/mocha": "^9.1.0", + "@types/node": "^16.0.0", + "@types/sinon": "^10.0.8", + "c8": "^7.11.0", + "gts": "^3.1.0", + "jsdoc": "^3.6.7", + "jsdoc-fresh": "^1.1.1", + "jsdoc-region-tag": "^1.3.1", + "linkinator": "^3.0.0", + "mocha": "^9.1.4", + "null-loader": "^4.0.1", + "pack-n-play": "^1.0.0-2", + "sinon": "^13.0.0", + "ts-loader": "^9.2.6", + "typescript": "^4.5.5", + "webpack": "^5.67.0", + "webpack-cli": "^4.9.1" + }, + "engines": { + "node": ">=v10.24.0" + } +} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto new file mode 100644 index 00000000..18b2f7df --- /dev/null +++ b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto @@ -0,0 +1,366 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "AutoscalingPoliciesProto"; +option java_package = "com.google.cloud.dataproc.v1"; +option (google.api.resource_definition) = { + type: "dataproc.googleapis.com/Region" + pattern: "projects/{project}/regions/{region}" +}; + +// The API interface for managing autoscaling policies in the +// Dataproc API. +service AutoscalingPolicyService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates new autoscaling policy. + rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + body: "policy" + additional_bindings { + post: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" + body: "policy" + } + }; + option (google.api.method_signature) = "parent,policy"; + } + + // Updates (replaces) autoscaling policy. + // + // Disabled check for update_mask, because all updates will be full + // replacements. + rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + put: "/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" + body: "policy" + additional_bindings { + put: "/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}" + body: "policy" + } + }; + option (google.api.method_signature) = "policy"; + } + + // Retrieves autoscaling policy. + rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) returns (AutoscalingPolicy) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + additional_bindings { + get: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" + } + }; + option (google.api.method_signature) = "name"; + } + + // Lists autoscaling policies in the project. + rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) returns (ListAutoscalingPoliciesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" + additional_bindings { + get: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" + } + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes an autoscaling policy. It is an error to delete an autoscaling + // policy that is in use by one or more clusters. + rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" + additional_bindings { + delete: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" + } + }; + option (google.api.method_signature) = "name"; + } +} + +// Describes an autoscaling policy for Dataproc cluster autoscaler. +message AutoscalingPolicy { + option (google.api.resource) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + pattern: "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}" + pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" + }; + + // Required. The policy id. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + // + string id = 1; + + // Output only. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies`, the resource name of the + // policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Autoscaling algorithm for policy. + oneof algorithm { + BasicAutoscalingAlgorithm basic_algorithm = 3 [(google.api.field_behavior) = REQUIRED]; + } + + // Required. Describes how the autoscaler will operate for primary workers. + InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Describes how the autoscaler will operate for secondary workers. + InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels to associate with this autoscaling policy. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with an autoscaling policy. + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// Basic algorithm for autoscaling. +message BasicAutoscalingAlgorithm { + oneof config { + // Required. YARN autoscaling configuration. + BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; + } + + // Optional. Duration between scaling events. A scaling period starts after + // the update operation from the previous event has completed. + // + // Bounds: [2m, 1d]. Default: 2m. + google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Basic autoscaling configurations for YARN. +message BasicYarnAutoscalingConfig { + // Required. Timeout for YARN graceful decommissioning of Node Managers. + // Specifies the duration to wait for jobs to complete before forcefully + // removing workers (and potentially interrupting jobs). Only applicable to + // downscaling operations. + // + // Bounds: [0s, 1d]. + google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. Fraction of average YARN pending memory in the last cooldown period + // for which to add workers. A scale-up factor of 1.0 will result in scaling + // up so that there is no pending memory remaining after the update (more + // aggressive scaling). A scale-up factor closer to 0 will result in a smaller + // magnitude of scaling up (less aggressive scaling). + // See [How autoscaling + // works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works) + // for more information. + // + // Bounds: [0.0, 1.0]. + double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Fraction of average YARN pending memory in the last cooldown period + // for which to remove workers. A scale-down factor of 1 will result in + // scaling down so that there is no available memory remaining after the + // update (more aggressive scaling). A scale-down factor of 0 disables + // removing workers, which can be beneficial for autoscaling a single job. + // See [How autoscaling + // works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works) + // for more information. + // + // Bounds: [0.0, 1.0]. + double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Minimum scale-up threshold as a fraction of total cluster size + // before scaling occurs. For example, in a 20-worker cluster, a threshold of + // 0.1 means the autoscaler must recommend at least a 2-worker scale-up for + // the cluster to scale. A threshold of 0 means the autoscaler will scale up + // on any recommended change. + // + // Bounds: [0.0, 1.0]. Default: 0.0. + double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Minimum scale-down threshold as a fraction of total cluster size + // before scaling occurs. For example, in a 20-worker cluster, a threshold of + // 0.1 means the autoscaler must recommend at least a 2 worker scale-down for + // the cluster to scale. A threshold of 0 means the autoscaler will scale down + // on any recommended change. + // + // Bounds: [0.0, 1.0]. Default: 0.0. + double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Configuration for the size bounds of an instance group, including its +// proportional size to other groups. +message InstanceGroupAutoscalingPolicyConfig { + // Optional. Minimum number of instances for this group. + // + // Primary workers - Bounds: [2, max_instances]. Default: 2. + // Secondary workers - Bounds: [0, max_instances]. Default: 0. + int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Maximum number of instances for this group. Required for primary + // workers. Note that by default, clusters will not use secondary workers. + // Required for secondary workers if the minimum secondary instances is set. + // + // Primary workers - Bounds: [min_instances, ). + // Secondary workers - Bounds: [min_instances, ). Default: 0. + int32 max_instances = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Weight for the instance group, which is used to determine the + // fraction of total workers in the cluster from this instance group. + // For example, if primary workers have weight 2, and secondary workers have + // weight 1, the cluster will have approximately 2 primary workers for each + // secondary worker. + // + // The cluster may not reach the specified balance if constrained + // by min/max bounds or other autoscaling settings. For example, if + // `max_instances` for secondary workers is 0, then only primary workers will + // be added. The cluster can also be out of balance when created. + // + // If weight is not set on any instance group, the cluster will default to + // equal weight for all groups: the cluster will attempt to maintain an equal + // number of workers in each group within the configured size bounds for each + // group. If weight is set for one group only, the cluster will default to + // zero weight on the unset group. For example if weight is set only on + // primary workers, the cluster will use primary workers only and no + // secondary workers. + int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to create an autoscaling policy. +message CreateAutoscalingPolicyRequest { + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.create`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.create`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // Required. The autoscaling policy to create. + AutoscalingPolicy policy = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to fetch an autoscaling policy. +message GetAutoscalingPolicyRequest { + // Required. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.get`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; +} + +// A request to update an autoscaling policy. +message UpdateAutoscalingPolicyRequest { + // Required. The updated autoscaling policy. + AutoscalingPolicy policy = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to delete an autoscaling policy. +// +// Autoscaling policies in use by one or more clusters will not be deleted. +message DeleteAutoscalingPolicyRequest { + // Required. The "resource name" of the autoscaling policy, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + // + // * For `projects.locations.autoscalingPolicies.delete`, the resource name + // of the policy has the following format: + // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; +} + +// A request to list autoscaling policies in a project. +message ListAutoscalingPoliciesRequest { + // Required. The "resource name" of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.autoscalingPolicies.list`, the resource name + // of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.autoscalingPolicies.list`, the resource name + // of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/AutoscalingPolicy" + } + ]; + + // Optional. The maximum number of results to return in each response. + // Must be less than or equal to 1000. Defaults to 100. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The page token, returned by a previous call, to request the + // next page of results. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A response to a request to list autoscaling policies in a project. +message ListAutoscalingPoliciesResponse { + // Output only. Autoscaling policies list. + repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. This token is included in the response if there are more + // results to fetch. + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto new file mode 100644 index 00000000..eafb4e35 --- /dev/null +++ b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto @@ -0,0 +1,372 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/dataproc/v1/shared.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "BatchesProto"; +option java_package = "com.google.cloud.dataproc.v1"; + +// The BatchController provides methods to manage batch workloads. +service BatchController { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a batch workload that executes asynchronously. + rpc CreateBatch(CreateBatchRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/batches" + body: "batch" + }; + option (google.api.method_signature) = "parent,batch,batch_id"; + option (google.longrunning.operation_info) = { + response_type: "Batch" + metadata_type: "google.cloud.dataproc.v1.BatchOperationMetadata" + }; + } + + // Gets the batch workload resource representation. + rpc GetBatch(GetBatchRequest) returns (Batch) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/batches/*}" + }; + option (google.api.method_signature) = "name"; + } + + // Lists batch workloads. + rpc ListBatches(ListBatchesRequest) returns (ListBatchesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/batches" + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes the batch workload resource. If the batch is not in terminal state, + // the delete fails and the response returns `FAILED_PRECONDITION`. + rpc DeleteBatch(DeleteBatchRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/batches/*}" + }; + option (google.api.method_signature) = "name"; + } +} + +// A request to create a batch workload. +message CreateBatchRequest { + // Required. The parent resource where this batch will be created. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/Batch" + } + ]; + + // Required. The batch to create. + Batch batch = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The ID to use for the batch, which will become the final component of + // the batch's resource name. + // + // This value must be 4-63 characters. Valid characters are `/[a-z][0-9]-/`. + string batch_id = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A unique ID used to identify the request. If the service + // receives two + // [CreateBatchRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s + // with the same request_id, the second request is ignored and the + // Operation that corresponds to the first Batch created and stored + // in the backend is returned. + // + // Recommendation: Set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The value must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to get the resource representation for a batch workload. +message GetBatchRequest { + // Required. The name of the batch to retrieve. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/Batch" + } + ]; +} + +// A request to list batch workloads in a project. +message ListBatchesRequest { + // Required. The parent, which owns this collection of batches. + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/Batch" + } + ]; + + // Optional. The maximum number of batches to return in each response. + // The service may return fewer than this value. + // The default page size is 20; the maximum page size is 1000. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A page token received from a previous `ListBatches` call. + // Provide this token to retrieve the subsequent page. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A list of batch workloads. +message ListBatchesResponse { + // The batches from the specified collection. + repeated Batch batches = 1; + + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} + +// A request to delete a batch workload. +message DeleteBatchRequest { + // Required. The name of the batch resource to delete. + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/Batch" + } + ]; +} + +// A representation of a batch workload in the service. +message Batch { + option (google.api.resource) = { + type: "dataproc.googleapis.com/Batch" + pattern: "projects/{project}/locations/{location}/batches/{batch}" + }; + + // Historical state information. + message StateHistory { + // Output only. The state of the batch at this point in history. + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Details about the state at this point in history. + string state_message = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time when the batch entered the historical state. + google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + } + + // The batch state. + enum State { + // The batch state is unknown. + STATE_UNSPECIFIED = 0; + + // The batch is created before running. + PENDING = 1; + + // The batch is running. + RUNNING = 2; + + // The batch is cancelling. + CANCELLING = 3; + + // The batch cancellation was successful. + CANCELLED = 4; + + // The batch completed successfully. + SUCCEEDED = 5; + + // The batch is no longer running due to an error. + FAILED = 6; + } + + // Output only. The resource name of the batch. + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. A batch UUID (Unique Universal Identifier). The service + // generates this value when it creates the batch. + string uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time when the batch was created. + google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // The application/framework-specific portion of the batch configuration. + oneof batch_config { + // Optional. PySpark batch config. + PySparkBatch pyspark_batch = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Spark batch config. + SparkBatch spark_batch = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. SparkR batch config. + SparkRBatch spark_r_batch = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. SparkSql batch config. + SparkSqlBatch spark_sql_batch = 7 [(google.api.field_behavior) = OPTIONAL]; + } + + // Output only. Runtime information about batch execution. + RuntimeInfo runtime_info = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The state of the batch. + State state = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Batch state details, such as a failure + // description if the state is `FAILED`. + string state_message = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time when the batch entered a current state. + google.protobuf.Timestamp state_time = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The email address of the user who created the batch. + string creator = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. The labels to associate with this batch. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with a batch. + map labels = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Runtime configuration for the batch execution. + RuntimeConfig runtime_config = 14 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Environment configuration for the batch execution. + EnvironmentConfig environment_config = 15 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. The resource name of the operation associated with this batch. + string operation = 16 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Historical state information for the batch. + repeated StateHistory state_history = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A configuration for running an +// [Apache +// PySpark](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart.html) +// batch workload. +message PySparkBatch { + // Required. The HCFS URI of the main Python file to use as the Spark driver. Must + // be a .py file. + string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The arguments to pass to the driver. Do not include arguments + // that can be set as batch properties, such as `--conf`, since a collision + // can occur that causes an incorrect batch submission. + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS file URIs of Python files to pass to the PySpark + // framework. Supported file types: `.py`, `.egg`, and `.zip`. + repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to add to the classpath of the + // Spark driver and tasks. + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: + // `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// A configuration for running an [Apache Spark](http://spark.apache.org/) +// batch workload. +message SparkBatch { + // The specification of the main method to call to drive the Spark + // workload. Specify either the jar file that contains the main class or the + // main class name. To pass both a main jar and a main class in that jar, add + // the jar to `jar_file_uris`, and then specify the main class + // name in `main_class`. + oneof driver { + // Optional. The HCFS URI of the jar file that contains the main class. + string main_jar_file_uri = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The name of the driver main class. The jar file that contains the class + // must be in the classpath or specified in `jar_file_uris`. + string main_class = 2 [(google.api.field_behavior) = OPTIONAL]; + } + + // Optional. The arguments to pass to the driver. Do not include arguments + // that can be set as batch properties, such as `--conf`, since a collision + // can occur that causes an incorrect batch submission. + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to add to the classpath of the + // Spark driver and tasks. + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: + // `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// A configuration for running an +// [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html) +// batch workload. +message SparkRBatch { + // Required. The HCFS URI of the main R file to use as the driver. + // Must be a `.R` or `.r` file. + string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The arguments to pass to the Spark driver. Do not include arguments + // that can be set as batch properties, such as `--conf`, since a collision + // can occur that causes an incorrect batch submission. + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. + repeated string file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: + // `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. + repeated string archive_uris = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// A configuration for running +// [Apache Spark SQL](http://spark.apache.org/sql/) queries as a batch workload. +message SparkSqlBatch { + // Required. The HCFS URI of the script that contains Spark SQL queries to execute. + string query_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Mapping of query variable names to values (equivalent to the + // Spark SQL command: `SET name="value";`). + map query_variables = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. + repeated string jar_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto new file mode 100644 index 00000000..2b650c13 --- /dev/null +++ b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto @@ -0,0 +1,1280 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/dataproc/v1/shared.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "ClustersProto"; +option java_package = "com.google.cloud.dataproc.v1"; +option (google.api.resource_definition) = { + type: "container.googleapis.com/Cluster" + pattern: "projects/{project}/locations/{location}/clusters/{cluster}" +}; +option (google.api.resource_definition) = { + type: "metastore.googleapis.com/Service" + pattern: "projects/{project}/locations/{location}/services/{service}" +}; + +// The ClusterControllerService provides methods to manage clusters +// of Compute Engine instances. +service ClusterController { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates a cluster in a project. The returned + // [Operation.metadata][google.longrunning.Operation.metadata] will be + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/clusters" + body: "cluster" + }; + option (google.api.method_signature) = "project_id,region,cluster"; + option (google.longrunning.operation_info) = { + response_type: "Cluster" + metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" + }; + } + + // Updates a cluster in a project. The returned + // [Operation.metadata][google.longrunning.Operation.metadata] will be + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + // The cluster must be in a [`RUNNING`][google.cloud.dataproc.v1.ClusterStatus.State] state or an error + // is returned. + rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + patch: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + body: "cluster" + }; + option (google.api.method_signature) = "project_id,region,cluster_name,cluster,update_mask"; + option (google.longrunning.operation_info) = { + response_type: "Cluster" + metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" + }; + } + + // Stops a cluster in a project. + rpc StopCluster(StopClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:stop" + body: "*" + }; + option (google.longrunning.operation_info) = { + response_type: "Cluster" + metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" + }; + } + + // Starts a cluster in a project. + rpc StartCluster(StartClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:start" + body: "*" + }; + option (google.longrunning.operation_info) = { + response_type: "Cluster" + metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" + }; + } + + // Deletes a cluster in a project. The returned + // [Operation.metadata][google.longrunning.Operation.metadata] will be + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + delete: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + }; + option (google.api.method_signature) = "project_id,region,cluster_name"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" + }; + } + + // Gets the resource representation for a cluster in a project. + rpc GetCluster(GetClusterRequest) returns (Cluster) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" + }; + option (google.api.method_signature) = "project_id,region,cluster_name"; + } + + // Lists all regions/{region}/clusters in a project alphabetically. + rpc ListClusters(ListClustersRequest) returns (ListClustersResponse) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/regions/{region}/clusters" + }; + option (google.api.method_signature) = "project_id,region"; + option (google.api.method_signature) = "project_id,region,filter"; + } + + // Gets cluster diagnostic information. The returned + // [Operation.metadata][google.longrunning.Operation.metadata] will be + // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + // After the operation completes, + // [Operation.response][google.longrunning.Operation.response] + // contains + // [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose" + body: "*" + }; + option (google.api.method_signature) = "project_id,region,cluster_name"; + option (google.longrunning.operation_info) = { + response_type: "DiagnoseClusterResults" + metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" + }; + } +} + +// Describes the identifying information, config, and status of +// a Dataproc cluster +message Cluster { + // Required. The Google Cloud Platform project ID that the cluster belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster name. Cluster names within a project must be + // unique. Names of deleted clusters can be reused. + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The cluster config for a cluster of Compute Engine Instances. + // Note that Dataproc may set default values, and values may change + // when clusters are updated. + ClusterConfig config = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The virtual cluster config, used when creating a Dataproc cluster that + // does not directly control the underlying compute resources, for example, + // when creating a [Dataproc-on-GKE + // cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster). + // Note that Dataproc may set default values, and values may change when + // clusters are updated. Exactly one of config or virtualClusterConfig must be + // specified. + VirtualClusterConfig virtual_cluster_config = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The labels to associate with this cluster. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with a cluster. + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. Cluster status. + ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The previous cluster status. + repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. A cluster UUID (Unique Universal Identifier). Dataproc + // generates this value when it creates the cluster. + string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Contains cluster daemon metrics such as HDFS and YARN stats. + // + // **Beta Feature**: This report is available for testing purposes only. It + // may be changed before final release. + ClusterMetrics metrics = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// The cluster config. +message ClusterConfig { + // Optional. A Cloud Storage bucket used to stage job + // dependencies, config files, and job driver console output. + // If you do not specify a staging bucket, Cloud + // Dataproc will determine a Cloud Storage location (US, + // ASIA, or EU) for your cluster's staging bucket according to the + // Compute Engine zone where your cluster is deployed, and then create + // and manage this project-level, per-location bucket (see + // [Dataproc staging and temp + // buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). + // **This field requires a Cloud Storage bucket name, not a `gs://...` URI to + // a Cloud Storage bucket.** + string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, + // such as Spark and MapReduce history files. + // If you do not specify a temp bucket, + // Dataproc will determine a Cloud Storage location (US, + // ASIA, or EU) for your cluster's temp bucket according to the + // Compute Engine zone where your cluster is deployed, and then create + // and manage this project-level, per-location bucket. The default bucket has + // a TTL of 90 days, but you can use any TTL (or none) if you specify a + // bucket (see + // [Dataproc staging and temp + // buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). + // **This field requires a Cloud Storage bucket name, not a `gs://...` URI to + // a Cloud Storage bucket.** + string temp_bucket = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The shared Compute Engine config settings for + // all instances in a cluster. + GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Compute Engine config settings for + // the cluster's master instance. + InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Compute Engine config settings for + // the cluster's worker instances. + InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Compute Engine config settings for + // a cluster's secondary worker instances + InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The config settings for cluster software. + SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Commands to execute on each node after config is + // completed. By default, executables are run on master and all worker nodes. + // You can test a node's `role` metadata to run an executable on + // a master or worker node, as shown below using `curl` (you can also use + // `wget`): + // + // ROLE=$(curl -H Metadata-Flavor:Google + // http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) + // if [[ "${ROLE}" == 'Master' ]]; then + // ... master specific actions ... + // else + // ... worker specific actions ... + // fi + repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Encryption settings for the cluster. + EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Autoscaling config for the policy associated with the cluster. + // Cluster does not autoscale if this field is unset. + AutoscalingConfig autoscaling_config = 18 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Security settings for the cluster. + SecurityConfig security_config = 16 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Lifecycle setting for the cluster. + LifecycleConfig lifecycle_config = 17 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Port/endpoint configuration for this cluster + EndpointConfig endpoint_config = 19 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Metastore configuration. + MetastoreConfig metastore_config = 20 [(google.api.field_behavior) = OPTIONAL]; +} + +// Dataproc cluster config for a cluster that does not directly control the +// underlying compute resources, such as a [Dataproc-on-GKE +// cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster). +message VirtualClusterConfig { + // Optional. A Storage bucket used to stage job + // dependencies, config files, and job driver console output. + // If you do not specify a staging bucket, Cloud + // Dataproc will determine a Cloud Storage location (US, + // ASIA, or EU) for your cluster's staging bucket according to the + // Compute Engine zone where your cluster is deployed, and then create + // and manage this project-level, per-location bucket (see + // [Dataproc staging and temp + // buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). + // **This field requires a Cloud Storage bucket name, not a `gs://...` URI to + // a Cloud Storage bucket.** + string staging_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; + + oneof infrastructure_config { + // Required. The configuration for running the Dataproc cluster on Kubernetes. + KubernetesClusterConfig kubernetes_cluster_config = 6 [(google.api.field_behavior) = REQUIRED]; + } + + // Optional. Configuration of auxiliary services used by this cluster. + AuxiliaryServicesConfig auxiliary_services_config = 7 [(google.api.field_behavior) = OPTIONAL]; +} + +// Auxiliary services configuration for a Cluster. +message AuxiliaryServicesConfig { + // Optional. The Hive Metastore configuration for this workload. + MetastoreConfig metastore_config = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Spark History Server configuration for the workload. + SparkHistoryServerConfig spark_history_server_config = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Endpoint config for this cluster +message EndpointConfig { + // Output only. The map of port descriptions to URLs. Will only be populated + // if enable_http_port_access is true. + map http_ports = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. If true, enable http access to specific ports on the cluster + // from external sources. Defaults to false. + bool enable_http_port_access = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Autoscaling Policy config associated with the cluster. +message AutoscalingConfig { + // Optional. The autoscaling policy used by the cluster. + // + // Only resource names including projectid and location (region) are valid. + // Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` + // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` + // + // Note that the policy must be in the same project and Dataproc region. + string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; +} + +// Encryption settings for the cluster. +message EncryptionConfig { + // Optional. The Cloud KMS key name to use for PD disk encryption for all + // instances in the cluster. + string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; +} + +// Common config settings for resources of Compute Engine cluster +// instances, applicable to all instances in the cluster. +message GceClusterConfig { + // `PrivateIpv6GoogleAccess` controls whether and how Dataproc cluster nodes + // can communicate with Google Services through gRPC over IPv6. + // These values are directly mapped to corresponding values in the + // [Compute Engine Instance + // fields](https://cloud.google.com/compute/docs/reference/rest/v1/instances). + enum PrivateIpv6GoogleAccess { + // If unspecified, Compute Engine default behavior will apply, which + // is the same as [INHERIT_FROM_SUBNETWORK][google.cloud.dataproc.v1.GceClusterConfig.PrivateIpv6GoogleAccess.INHERIT_FROM_SUBNETWORK]. + PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0; + + // Private access to and from Google Services configuration + // inherited from the subnetwork configuration. This is the + // default Compute Engine behavior. + INHERIT_FROM_SUBNETWORK = 1; + + // Enables outbound private IPv6 access to Google Services from the Dataproc + // cluster. + OUTBOUND = 2; + + // Enables bidirectional private IPv6 access between Google Services and the + // Dataproc cluster. + BIDIRECTIONAL = 3; + } + + // Optional. The zone where the Compute Engine cluster will be located. + // On a create request, it is required in the "global" region. If omitted + // in a non-global Dataproc region, the service will pick a zone in the + // corresponding Compute Engine region. On a get request, zone will + // always be present. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` + // * `projects/[project_id]/zones/[zone]` + // * `us-central1-f` + string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Compute Engine network to be used for machine + // communications. Cannot be specified with subnetwork_uri. If neither + // `network_uri` nor `subnetwork_uri` is specified, the "default" network of + // the project is used, if it exists. Cannot be a "Custom Subnet Network" (see + // [Using Subnetworks](https://cloud.google.com/compute/docs/subnetworks) for + // more information). + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` + // * `projects/[project_id]/regions/global/default` + // * `default` + string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Compute Engine subnetwork to be used for machine + // communications. Cannot be specified with network_uri. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` + // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` + // * `sub0` + string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If true, all instances in the cluster will only have internal IP + // addresses. By default, clusters are not restricted to internal IP + // addresses, and will have ephemeral external IP addresses assigned to each + // instance. This `internal_ip_only` restriction can only be enabled for + // subnetwork enabled networks, and all off-cluster dependencies must be + // configured to be accessible without external IP addresses. + bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The type of IPv6 access for a cluster. + PrivateIpv6GoogleAccess private_ipv6_google_access = 12 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The [Dataproc service + // account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc) + // (also see [VM Data Plane + // identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) + // used by Dataproc cluster VM instances to access Google Cloud Platform + // services. + // + // If not specified, the + // [Compute Engine default service + // account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) + // is used. + string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The URIs of service account scopes to be included in + // Compute Engine instances. The following base set of scopes is always + // included: + // + // * https://www.googleapis.com/auth/cloud.useraccounts.readonly + // * https://www.googleapis.com/auth/devstorage.read_write + // * https://www.googleapis.com/auth/logging.write + // + // If no scopes are specified, the following defaults are also provided: + // + // * https://www.googleapis.com/auth/bigquery + // * https://www.googleapis.com/auth/bigtable.admin.table + // * https://www.googleapis.com/auth/bigtable.data + // * https://www.googleapis.com/auth/devstorage.full_control + repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; + + // The Compute Engine tags to add to all instances (see [Tagging + // instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). + repeated string tags = 4; + + // The Compute Engine metadata entries to add to all instances (see + // [Project and instance + // metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). + map metadata = 5; + + // Optional. Reservation Affinity for consuming Zonal reservation. + ReservationAffinity reservation_affinity = 11 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Node Group Affinity for sole-tenant clusters. + NodeGroupAffinity node_group_affinity = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Shielded Instance Config for clusters using [Compute Engine Shielded + // VMs](https://cloud.google.com/security/shielded-cloud/shielded-vm). + ShieldedInstanceConfig shielded_instance_config = 14 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Confidential Instance Config for clusters using [Confidential + // VMs](https://cloud.google.com/compute/confidential-vm/docs). + ConfidentialInstanceConfig confidential_instance_config = 15 [(google.api.field_behavior) = OPTIONAL]; +} + +// Node Group Affinity for clusters using sole-tenant node groups. +message NodeGroupAffinity { + // Required. The URI of a + // sole-tenant [node group + // resource](https://cloud.google.com/compute/docs/reference/rest/v1/nodeGroups) + // that the cluster will be created on. + // + // A full URL, partial URI, or node group name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` + // * `projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` + // * `node-group-1` + string node_group_uri = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Shielded Instance Config for clusters using [Compute Engine Shielded +// VMs](https://cloud.google.com/security/shielded-cloud/shielded-vm). +message ShieldedInstanceConfig { + // Optional. Defines whether instances have Secure Boot enabled. + bool enable_secure_boot = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Defines whether instances have the vTPM enabled. + bool enable_vtpm = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Defines whether instances have integrity monitoring enabled. + bool enable_integrity_monitoring = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Confidential Instance Config for clusters using [Confidential +// VMs](https://cloud.google.com/compute/confidential-vm/docs) +message ConfidentialInstanceConfig { + // Optional. Defines whether the instance should have confidential compute enabled. + bool enable_confidential_compute = 1 [(google.api.field_behavior) = OPTIONAL]; +} + +// The config settings for Compute Engine resources in +// an instance group, such as a master or worker group. +message InstanceGroupConfig { + // Controls the use of + // [preemptible instances] + // (https://cloud.google.com/compute/docs/instances/preemptible) + // within the group. + enum Preemptibility { + // Preemptibility is unspecified, the system will choose the + // appropriate setting for each instance group. + PREEMPTIBILITY_UNSPECIFIED = 0; + + // Instances are non-preemptible. + // + // This option is allowed for all instance groups and is the only valid + // value for Master and Worker instance groups. + NON_PREEMPTIBLE = 1; + + // Instances are preemptible. + // + // This option is allowed only for secondary worker groups. + PREEMPTIBLE = 2; + } + + // Optional. The number of VM instances in the instance group. + // For [HA + // cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) + // [master_config](#FIELDS.master_config) groups, **must be set to 3**. + // For standard cluster [master_config](#FIELDS.master_config) groups, + // **must be set to 1**. + int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. The list of instance names. Dataproc derives the names + // from `cluster_name`, `num_instances`, and the instance group. + repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. The Compute Engine image resource used for cluster instances. + // + // The URI can represent an image or image family. + // + // Image examples: + // + // * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` + // * `projects/[project_id]/global/images/[image-id]` + // * `image-id` + // + // Image family examples. Dataproc will use the most recent + // image from the family: + // + // * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` + // * `projects/[project_id]/global/images/family/[custom-image-family-name]` + // + // If the URI is unspecified, it will be inferred from + // `SoftwareConfig.image_version` or the system default. + string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Compute Engine machine type used for cluster instances. + // + // A full URL, partial URI, or short name are valid. Examples: + // + // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` + // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` + // * `n1-standard-2` + // + // **Auto Zone Exception**: If you are using the Dataproc + // [Auto Zone + // Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the machine type + // resource, for example, `n1-standard-2`. + string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Disk option config settings. + DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. Specifies that this instance group contains preemptible + // instances. + bool is_preemptible = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. Specifies the preemptibility of the instance group. + // + // The default value for master and worker groups is + // `NON_PREEMPTIBLE`. This default cannot be changed. + // + // The default value for secondary instances is + // `PREEMPTIBLE`. + Preemptibility preemptibility = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. The config for Compute Engine Instance Group + // Manager that manages this group. + // This is only used for preemptible instance groups. + ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. The Compute Engine accelerator configuration for these + // instances. + repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies the minimum cpu platform for the Instance Group. + // See [Dataproc -> Minimum CPU + // Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). + string min_cpu_platform = 9 [(google.api.field_behavior) = OPTIONAL]; +} + +// Specifies the resources used to actively manage an instance group. +message ManagedGroupConfig { + // Output only. The name of the Instance Template used for the Managed + // Instance Group. + string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The name of the Instance Group Manager for this group. + string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Specifies the type and number of accelerator cards attached to the instances +// of an instance. See [GPUs on Compute +// Engine](https://cloud.google.com/compute/docs/gpus/). +message AcceleratorConfig { + // Full URL, partial URI, or short name of the accelerator type resource to + // expose to this instance. See + // [Compute Engine + // AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). + // + // Examples: + // + // * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` + // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` + // * `nvidia-tesla-k80` + // + // **Auto Zone Exception**: If you are using the Dataproc + // [Auto Zone + // Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) + // feature, you must use the short name of the accelerator type + // resource, for example, `nvidia-tesla-k80`. + string accelerator_type_uri = 1; + + // The number of the accelerator cards of this type exposed to this instance. + int32 accelerator_count = 2; +} + +// Specifies the config of disk options for a group of VM instances. +message DiskConfig { + // Optional. Type of the boot disk (default is "pd-standard"). + // Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), + // "pd-ssd" (Persistent Disk Solid State Drive), + // or "pd-standard" (Persistent Disk Hard Disk Drive). + // See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types). + string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Size in GB of the boot disk (default is 500GB). + int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Number of attached SSDs, from 0 to 4 (default is 0). + // If SSDs are not attached, the boot disk is used to store runtime logs and + // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. + // If one or more SSDs are attached, this runtime bulk + // data is spread across them, and the boot disk contains only basic + // config and installed binaries. + int32 num_local_ssds = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Interface type of local SSDs (default is "scsi"). + // Valid values: "scsi" (Small Computer System Interface), + // "nvme" (Non-Volatile Memory Express). + // See [local SSD + // performance](https://cloud.google.com/compute/docs/disks/local-ssd#performance). + string local_ssd_interface = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Specifies an executable to run on a fully configured node and a +// timeout period for executable completion. +message NodeInitializationAction { + // Required. Cloud Storage URI of executable file. + string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Amount of time executable has to complete. Default is + // 10 minutes (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + // + // Cluster creation fails with an explanatory error message (the + // name of the executable that caused the error and the exceeded timeout + // period) if the executable is not completed at end of the timeout period. + google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// The status of a cluster and its instances. +message ClusterStatus { + // The cluster state. + enum State { + // The cluster state is unknown. + UNKNOWN = 0; + + // The cluster is being created and set up. It is not ready for use. + CREATING = 1; + + // The cluster is currently running and healthy. It is ready for use. + // + // **Note:** The cluster state changes from "creating" to "running" status + // after the master node(s), first two primary worker nodes (and the last + // primary worker node if primary workers > 2) are running. + RUNNING = 2; + + // The cluster encountered an error. It is not ready for use. + ERROR = 3; + + // The cluster has encountered an error while being updated. Jobs can + // be submitted to the cluster, but the cluster cannot be updated. + ERROR_DUE_TO_UPDATE = 9; + + // The cluster is being deleted. It cannot be used. + DELETING = 4; + + // The cluster is being updated. It continues to accept and process jobs. + UPDATING = 5; + + // The cluster is being stopped. It cannot be used. + STOPPING = 6; + + // The cluster is currently stopped. It is not ready for use. + STOPPED = 7; + + // The cluster is being started. It is not ready for use. + STARTING = 8; + } + + // The cluster substate. + enum Substate { + // The cluster substate is unknown. + UNSPECIFIED = 0; + + // The cluster is known to be in an unhealthy state + // (for example, critical daemons are not running or HDFS capacity is + // exhausted). + // + // Applies to RUNNING state. + UNHEALTHY = 1; + + // The agent-reported status is out of date (may occur if + // Dataproc loses communication with Agent). + // + // Applies to RUNNING state. + STALE_STATUS = 2; + } + + // Output only. The cluster's state. + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. Output only. Details of cluster's state. + string detail = 2 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Output only. Time when this state was entered (see JSON representation of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Additional state information that includes + // status reported by the agent. + Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Security related configuration, including encryption, Kerberos, etc. +message SecurityConfig { + // Optional. Kerberos related configuration. + KerberosConfig kerberos_config = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Identity related configuration, including service account based + // secure multi-tenancy user mappings. + IdentityConfig identity_config = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Specifies Kerberos related configuration. +message KerberosConfig { + // Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set + // this field to true to enable Kerberos on a cluster. + bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the root + // principal password. + string root_principal_password_uri = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The uri of the KMS key used to encrypt various sensitive + // files. + string kms_key_uri = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of the keystore file used for SSL + // encryption. If not provided, Dataproc will provide a self-signed + // certificate. + string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of the truststore file used for SSL + // encryption. If not provided, Dataproc will provide a self-signed + // certificate. + string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided keystore. For the self-signed certificate, + // this password is generated by Dataproc. + string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided key. For the self-signed certificate, this + // password is generated by Dataproc. + string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // password to the user provided truststore. For the self-signed certificate, + // this password is generated by Dataproc. + string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The remote realm the Dataproc on-cluster KDC will trust, should + // the user enable cross realm trust. + string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross + // realm trust relationship. + string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The admin server (IP or hostname) for the remote trusted realm in + // a cross realm trust relationship. + string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // shared password between the on-cluster Kerberos realm and the remote + // trusted realm, in a cross realm trust relationship. + string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud Storage URI of a KMS encrypted file containing the + // master key of the KDC database. + string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The lifetime of the ticket granting ticket, in hours. + // If not specified, or user specifies 0, then default value 10 + // will be used. + int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The name of the on-cluster Kerberos realm. + // If not specified, the uppercased domain of hostnames will be the realm. + string realm = 15 [(google.api.field_behavior) = OPTIONAL]; +} + +// Identity related configuration, including service account based +// secure multi-tenancy user mappings. +message IdentityConfig { + // Required. Map of user to service account. + map user_service_account_mapping = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Specifies the selection and config of software inside the cluster. +message SoftwareConfig { + // Optional. The version of software inside the cluster. It must be one of the + // supported [Dataproc + // Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), + // such as "1.2" (including a subminor version, such as "1.2.29"), or the + // ["preview" + // version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). + // If unspecified, it defaults to the latest Debian version. + string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The properties to set on daemon config files. + // + // Property keys are specified in `prefix:property` format, for example + // `core:hadoop.tmp.dir`. The following are supported prefixes + // and their mappings: + // + // * capacity-scheduler: `capacity-scheduler.xml` + // * core: `core-site.xml` + // * distcp: `distcp-default.xml` + // * hdfs: `hdfs-site.xml` + // * hive: `hive-site.xml` + // * mapred: `mapred-site.xml` + // * pig: `pig.properties` + // * spark: `spark-defaults.conf` + // * yarn: `yarn-site.xml` + // + // For more information, see [Cluster + // properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties). + map properties = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The set of components to activate on the cluster. + repeated Component optional_components = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Specifies the cluster auto-delete schedule configuration. +message LifecycleConfig { + // Optional. The duration to keep the cluster alive while idling (when no jobs + // are running). Passing this threshold will cause the cluster to be + // deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON + // representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Either the exact time the cluster should be deleted at or + // the cluster maximum age. + oneof ttl { + // Optional. The time when cluster will be auto-deleted (see JSON representation of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Timestamp auto_delete_time = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The lifetime duration of cluster. The cluster will be + // auto-deleted at the end of this period. Minimum value is 10 minutes; + // maximum value is 14 days (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Duration auto_delete_ttl = 3 [(google.api.field_behavior) = OPTIONAL]; + } + + // Output only. The time when cluster became idle (most recent job finished) + // and became eligible for deletion due to idleness (see JSON representation + // of + // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Specifies a Metastore configuration. +message MetastoreConfig { + // Required. Resource name of an existing Dataproc Metastore service. + // + // Example: + // + // * `projects/[project_id]/locations/[dataproc_region]/services/[service-name]` + string dataproc_metastore_service = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "metastore.googleapis.com/Service" + } + ]; +} + +// Contains cluster daemon metrics, such as HDFS and YARN stats. +// +// **Beta Feature**: This report is available for testing purposes only. It may +// be changed before final release. +message ClusterMetrics { + // The HDFS metrics. + map hdfs_metrics = 1; + + // The YARN metrics. + map yarn_metrics = 2; +} + +// A request to create a cluster. +message CreateClusterRequest { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster to create. + Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. A unique ID used to identify the request. If the server receives two + // [CreateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s + // with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend + // is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Failure action when primary worker creation fails. + FailureAction action_on_failed_primary_workers = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to update a cluster. +message UpdateClusterRequest { + // Required. The ID of the Google Cloud Platform project the + // cluster belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 5 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster name. + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The changes to the cluster. + Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Timeout for graceful YARN decomissioning. Graceful + // decommissioning allows removing nodes from the cluster without + // interrupting jobs in progress. Timeout specifies how long to wait for jobs + // in progress to finish before forcefully removing nodes (and potentially + // interrupting jobs). Default timeout is 0 (for forceful decommission), and + // the maximum allowed timeout is 1 day. (see JSON representation of + // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + // + // Only supported on Dataproc image versions 1.2 and higher. + google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Specifies the path, relative to `Cluster`, of + // the field to update. For example, to change the number of workers + // in a cluster to 5, the `update_mask` parameter would be + // specified as `config.worker_config.num_instances`, + // and the `PATCH` request body would specify the new value, as follows: + // + // { + // "config":{ + // "workerConfig":{ + // "numInstances":"5" + // } + // } + // } + // Similarly, to change the number of preemptible workers in a cluster to 5, + // the `update_mask` parameter would be + // `config.secondary_worker_config.num_instances`, and the `PATCH` request + // body would be set as follows: + // + // { + // "config":{ + // "secondaryWorkerConfig":{ + // "numInstances":"5" + // } + // } + // } + // Note: Currently, only the following fields can be updated: + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + // + //
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or + // change autoscaling policies
+ google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; + + // Optional. A unique ID used to identify the request. If the server + // receives two + // [UpdateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s + // with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the + // backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to stop a cluster. +message StopClusterRequest { + // Required. The ID of the Google Cloud Platform project the + // cluster belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster name. + string cluster_name = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Specifying the `cluster_uuid` means the RPC will fail + // (with error NOT_FOUND) if a cluster with the specified UUID does not exist. + string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A unique ID used to identify the request. If the server + // receives two + // [StopClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s + // with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the + // backend is returned. + // + // Recommendation: Set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to start a cluster. +message StartClusterRequest { + // Required. The ID of the Google Cloud Platform project the + // cluster belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster name. + string cluster_name = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Specifying the `cluster_uuid` means the RPC will fail + // (with error NOT_FOUND) if a cluster with the specified UUID does not exist. + string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A unique ID used to identify the request. If the server + // receives two + // [StartClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s + // with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the + // backend is returned. + // + // Recommendation: Set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to delete a cluster. +message DeleteClusterRequest { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster name. + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Specifying the `cluster_uuid` means the RPC should fail + // (with error NOT_FOUND) if cluster with specified UUID does not exist. + string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A unique ID used to identify the request. If the server + // receives two + // [DeleteClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s + // with the same id, then the second request will be ignored and the + // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the + // backend is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; +} + +// Request to get the resource representation for a cluster in a project. +message GetClusterRequest { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster name. + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to list the clusters in a project. +message ListClustersRequest { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 4 [(google.api.field_behavior) = REQUIRED]; + + // Optional. A filter constraining the clusters to list. Filters are + // case-sensitive and have the following syntax: + // + // field = value [AND [field = value]] ... + // + // where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, + // and `[KEY]` is a label key. **value** can be `*` to match all values. + // `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + // `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` + // contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` + // contains the `DELETING` and `ERROR` states. + // `clusterName` is the name of the cluster provided at creation time. + // Only the logical `AND` operator is supported; space-separated items are + // treated as having an implicit `AND` operator. + // + // Example filter: + // + // status.state = ACTIVE AND clusterName = mycluster + // AND labels.env = staging AND labels.starred = * + string filter = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The standard List page size. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The standard List page token. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// The list of all clusters in a project. +message ListClustersResponse { + // Output only. The clusters in the project. + repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. This token is included in the response if there are more + // results to fetch. To fetch additional results, provide this value as the + // `page_token` in a subsequent `ListClustersRequest`. + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A request to collect cluster diagnostic information. +message DiagnoseClusterRequest { + // Required. The ID of the Google Cloud Platform project that the cluster + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster name. + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// The location of diagnostic output. +message DiagnoseClusterResults { + // Output only. The Cloud Storage URI of the diagnostic output. + // The output report is a plain text file with a summary of collected + // diagnostics. + string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Reservation Affinity for consuming Zonal reservation. +message ReservationAffinity { + // Indicates whether to consume capacity from an reservation or not. + enum Type { + TYPE_UNSPECIFIED = 0; + + // Do not consume from any allocated capacity. + NO_RESERVATION = 1; + + // Consume any reservation available. + ANY_RESERVATION = 2; + + // Must consume from a specific reservation. Must specify key value fields + // for specifying the reservations. + SPECIFIC_RESERVATION = 3; + } + + // Optional. Type of reservation to consume + Type consume_reservation_type = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Corresponds to the label key of reservation resource. + string key = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Corresponds to the label values of reservation resource. + repeated string values = 3 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto new file mode 100644 index 00000000..823a12cf --- /dev/null +++ b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto @@ -0,0 +1,924 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "JobsProto"; +option java_package = "com.google.cloud.dataproc.v1"; + +// The JobController provides methods to manage jobs. +service JobController { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Submits a job to a cluster. + rpc SubmitJob(SubmitJobRequest) returns (Job) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/jobs:submit" + body: "*" + }; + option (google.api.method_signature) = "project_id,region,job"; + } + + // Submits job to a cluster. + rpc SubmitJobAsOperation(SubmitJobRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/jobs:submitAsOperation" + body: "*" + }; + option (google.api.method_signature) = "project_id, region, job"; + option (google.longrunning.operation_info) = { + response_type: "Job" + metadata_type: "JobMetadata" + }; + } + + // Gets the resource representation for a job in a project. + rpc GetJob(GetJobRequest) returns (Job) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + }; + option (google.api.method_signature) = "project_id,region,job_id"; + } + + // Lists regions/{region}/jobs in a project. + rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { + option (google.api.http) = { + get: "/v1/projects/{project_id}/regions/{region}/jobs" + }; + option (google.api.method_signature) = "project_id,region"; + option (google.api.method_signature) = "project_id,region,filter"; + } + + // Updates a job in a project. + rpc UpdateJob(UpdateJobRequest) returns (Job) { + option (google.api.http) = { + patch: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + body: "job" + }; + } + + // Starts a job cancellation request. To access the job resource + // after cancellation, call + // [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) + // or + // [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). + rpc CancelJob(CancelJobRequest) returns (Job) { + option (google.api.http) = { + post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" + body: "*" + }; + option (google.api.method_signature) = "project_id,region,job_id"; + } + + // Deletes the job from the project. If the job is active, the delete fails, + // and the response returns `FAILED_PRECONDITION`. + rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" + }; + option (google.api.method_signature) = "project_id,region,job_id"; + } +} + +// The runtime logging config of the job. +message LoggingConfig { + // The Log4j level for job execution. When running an + // [Apache Hive](https://hive.apache.org/) job, Cloud + // Dataproc configures the Hive client to an equivalent verbosity level. + enum Level { + // Level is unspecified. Use default level for log4j. + LEVEL_UNSPECIFIED = 0; + + // Use ALL level for log4j. + ALL = 1; + + // Use TRACE level for log4j. + TRACE = 2; + + // Use DEBUG level for log4j. + DEBUG = 3; + + // Use INFO level for log4j. + INFO = 4; + + // Use WARN level for log4j. + WARN = 5; + + // Use ERROR level for log4j. + ERROR = 6; + + // Use FATAL level for log4j. + FATAL = 7; + + // Turn off log4j. + OFF = 8; + } + + // The per-package log levels for the driver. This may include + // "root" package name to configure rootLogger. + // Examples: + // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' + map driver_log_levels = 2; +} + +// A Dataproc job for running +// [Apache Hadoop +// MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) +// jobs on [Apache Hadoop +// YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html). +message HadoopJob { + // Required. Indicates the location of the driver's main class. Specify + // either the jar file that contains the main class or the main class name. + // To specify both, add the jar file to `jar_file_uris`, and then specify + // the main class name in this property. + oneof driver { + // The HCFS URI of the jar file containing the main class. + // Examples: + // 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' + // 'hdfs:/tmp/test-samples/custom-wordcount.jar' + // 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar' + string main_jar_file_uri = 1; + + // The name of the driver's main class. The jar file containing the class + // must be in the default CLASSPATH or specified in `jar_file_uris`. + string main_class = 2; + } + + // Optional. The arguments to pass to the driver. Do not + // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as + // job properties, since a collision may occur that causes an incorrect job + // submission. + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Jar file URIs to add to the CLASSPATHs of the + // Hadoop driver and tasks. + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied + // to the working directory of Hadoop drivers and distributed tasks. Useful + // for naively parallel tasks. + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted in the working directory of + // Hadoop drivers and tasks. Supported file types: + // .jar, .tar, .tar.gz, .tgz, or .zip. + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure Hadoop. + // Properties that conflict with values set by the Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site and + // classes in user code. + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job for running [Apache Spark](http://spark.apache.org/) +// applications on YARN. +message SparkJob { + // Required. The specification of the main method to call to drive the job. + // Specify either the jar file that contains the main class or the main class + // name. To pass both a main jar and a main class in that jar, add the jar to + // `CommonJob.jar_file_uris`, and then specify the main class name in + // `main_class`. + oneof driver { + // The HCFS URI of the jar file that contains the main class. + string main_jar_file_uri = 1; + + // The name of the driver's main class. The jar file that contains the class + // must be in the default CLASSPATH or specified in `jar_file_uris`. + string main_class = 2; + } + + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the + // Spark driver and tasks. + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. Useful for naively parallel tasks. + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: + // .jar, .tar, .tar.gz, .tgz, and .zip. + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure Spark. + // Properties that conflict with values set by the Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job for running +// [Apache +// PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) +// applications on YARN. +message PySparkJob { + // Required. The HCFS URI of the main Python file to use as the driver. Must + // be a .py file. + string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS file URIs of Python files to pass to the PySpark + // framework. Supported file types: .py, .egg, and .zip. + repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the + // Python driver and tasks. + repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. Useful for naively parallel tasks. + repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: + // .jar, .tar, .tar.gz, .tgz, and .zip. + repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure PySpark. + // Properties that conflict with values set by the Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + map properties = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; +} + +// A list of queries to run on a cluster. +message QueryList { + // Required. The queries to execute. You do not need to end a query expression + // with a semicolon. Multiple queries can be specified in one + // string by separating each with a semicolon. Here is an example of a + // Dataproc API snippet that uses a QueryList to specify a HiveJob: + // + // "hiveJob": { + // "queryList": { + // "queries": [ + // "query1", + // "query2", + // "query3;query4", + // ] + // } + // } + repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// A Dataproc job for running [Apache Hive](https://hive.apache.org/) +// queries on YARN. +message HiveJob { + // Required. The sequence of Hive queries to execute, specified as either + // an HCFS file URI or a list of queries. + oneof queries { + // The HCFS URI of the script that contains Hive queries. + string query_file_uri = 1; + + // A list of queries. + QueryList query_list = 2; + } + + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Mapping of query variable names to values (equivalent to the + // Hive command: `SET name="value";`). + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names and values, used to configure Hive. + // Properties that conflict with values set by the Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, + // /etc/hive/conf/hive-site.xml, and classes in user code. + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to add to the CLASSPATH of the + // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes + // and UDFs. + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job for running [Apache Spark +// SQL](http://spark.apache.org/sql/) queries. +message SparkSqlJob { + // Required. The sequence of Spark SQL queries to execute, specified as + // either an HCFS file URI or as a list of queries. + oneof queries { + // The HCFS URI of the script that contains SQL queries. + string query_file_uri = 1; + + // A list of queries. + QueryList query_list = 2; + } + + // Optional. Mapping of query variable names to values (equivalent to the + // Spark SQL command: SET `name="value";`). + map script_variables = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure + // Spark SQL's SparkConf. Properties that conflict with values set by the + // Dataproc API may be overwritten. + map properties = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. + repeated string jar_file_uris = 56 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job for running [Apache Pig](https://pig.apache.org/) +// queries on YARN. +message PigJob { + // Required. The sequence of Pig queries to execute, specified as an HCFS + // file URI or a list of queries. + oneof queries { + // The HCFS URI of the script that contains the Pig queries. + string query_file_uri = 1; + + // A list of queries. + QueryList query_list = 2; + } + + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Mapping of query variable names to values (equivalent to the Pig + // command: `name=[value]`). + map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure Pig. + // Properties that conflict with values set by the Dataproc API may be + // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, + // /etc/pig/conf/pig.properties, and classes in user code. + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of jar files to add to the CLASSPATH of + // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. + repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job for running +// [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html) +// applications on YARN. +message SparkRJob { + // Required. The HCFS URI of the main R file to use as the driver. + // Must be a .R file. + string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The arguments to pass to the driver. Do not include arguments, + // such as `--conf`, that can be set as job properties, since a collision may + // occur that causes an incorrect job submission. + repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of files to be placed in the working directory of + // each executor. Useful for naively parallel tasks. + repeated string file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. HCFS URIs of archives to be extracted into the working directory + // of each executor. Supported file types: + // .jar, .tar, .tar.gz, .tgz, and .zip. + repeated string archive_uris = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, used to configure SparkR. + // Properties that conflict with values set by the Dataproc API may be + // overwritten. Can include properties set in + // /etc/spark/conf/spark-defaults.conf and classes in user code. + map properties = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job for running [Presto](https://prestosql.io/) queries. +// **IMPORTANT**: The [Dataproc Presto Optional +// Component](https://cloud.google.com/dataproc/docs/concepts/components/presto) +// must be enabled when the cluster is created to submit a Presto job to the +// cluster. +message PrestoJob { + // Required. The sequence of Presto queries to execute, specified as + // either an HCFS file URI or as a list of queries. + oneof queries { + // The HCFS URI of the script that contains SQL queries. + string query_file_uri = 1; + + // A list of queries. + QueryList query_list = 2; + } + + // Optional. Whether to continue executing queries if a query fails. + // The default value is `false`. Setting to `true` can be useful when + // executing independent parallel queries. + bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The format in which query output will be displayed. See the + // Presto documentation for supported output formats + string output_format = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Presto client tags to attach to this query + repeated string client_tags = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values. Used to set Presto + // [session properties](https://prestodb.io/docs/current/sql/set-session.html) + // Equivalent to using the --session flag in the Presto CLI + map properties = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The runtime log config for job execution. + LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; +} + +// Dataproc job config. +message JobPlacement { + // Required. The name of the cluster where the job will be submitted. + string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Output only. A cluster UUID generated by the Dataproc service when + // the job is submitted. + string cluster_uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. Cluster labels to identify a cluster where the job will be submitted. + map cluster_labels = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Dataproc job status. +message JobStatus { + // The job state. + enum State { + // The job state is unknown. + STATE_UNSPECIFIED = 0; + + // The job is pending; it has been submitted, but is not yet running. + PENDING = 1; + + // Job has been received by the service and completed initial setup; + // it will soon be submitted to the cluster. + SETUP_DONE = 8; + + // The job is running on the cluster. + RUNNING = 2; + + // A CancelJob request has been received, but is pending. + CANCEL_PENDING = 3; + + // Transient in-flight resources have been canceled, and the request to + // cancel the running job has been issued to the cluster. + CANCEL_STARTED = 7; + + // The job cancellation was successful. + CANCELLED = 4; + + // The job has completed successfully. + DONE = 5; + + // The job has completed, but encountered an error. + ERROR = 6; + + // Job attempt has failed. The detail field contains failure details for + // this attempt. + // + // Applies to restartable jobs only. + ATTEMPT_FAILURE = 9; + } + + // The job substate. + enum Substate { + // The job substate is unknown. + UNSPECIFIED = 0; + + // The Job is submitted to the agent. + // + // Applies to RUNNING state. + SUBMITTED = 1; + + // The Job has been received and is awaiting execution (it may be waiting + // for a condition to be met). See the "details" field for the reason for + // the delay. + // + // Applies to RUNNING state. + QUEUED = 2; + + // The agent-reported status is out of date, which may be caused by a + // loss of communication between the agent and Dataproc. If the + // agent does not send a timely update, the job will fail. + // + // Applies to RUNNING state. + STALE_STATUS = 3; + } + + // Output only. A state message specifying the overall job state. + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. Output only. Job state details, such as an error + // description if the state is ERROR. + string details = 2 [ + (google.api.field_behavior) = OUTPUT_ONLY, + (google.api.field_behavior) = OPTIONAL + ]; + + // Output only. The time when this state was entered. + google.protobuf.Timestamp state_start_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Additional state information, which includes + // status reported by the agent. + Substate substate = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Encapsulates the full scoping used to reference a job. +message JobReference { + // Optional. The ID of the Google Cloud Platform project that the job belongs to. If + // specified, must match the request project ID. + string project_id = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The job ID, which must be unique within the project. + // + // The ID must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), or hyphens (-). The maximum length is 100 characters. + // + // If not specified by the caller, the job ID will be provided by the server. + string job_id = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A YARN application created by a job. Application information is a subset of +// org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. +// +// **Beta Feature**: This report is available for testing purposes only. It may +// be changed before final release. +message YarnApplication { + // The application state, corresponding to + // YarnProtos.YarnApplicationStateProto. + enum State { + // Status is unspecified. + STATE_UNSPECIFIED = 0; + + // Status is NEW. + NEW = 1; + + // Status is NEW_SAVING. + NEW_SAVING = 2; + + // Status is SUBMITTED. + SUBMITTED = 3; + + // Status is ACCEPTED. + ACCEPTED = 4; + + // Status is RUNNING. + RUNNING = 5; + + // Status is FINISHED. + FINISHED = 6; + + // Status is FAILED. + FAILED = 7; + + // Status is KILLED. + KILLED = 8; + } + + // Required. The application name. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The application state. + State state = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The numerical progress of the application, from 1 to 100. + float progress = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or + // TimelineServer that provides application-specific information. The URL uses + // the internal hostname, and requires a proxy server for resolution and, + // possibly, access. + string tracking_url = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// A Dataproc job resource. +message Job { + // Optional. The fully qualified reference to the job, which can be used to + // obtain the equivalent REST path of the job resource. If this property + // is not specified when a job is created, the server generates a + // job_id. + JobReference reference = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. Job information, including how, when, and where to + // run the job. + JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The application/framework-specific portion of the job. + oneof type_job { + // Optional. Job is a Hadoop job. + HadoopJob hadoop_job = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Spark job. + SparkJob spark_job = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a PySpark job. + PySparkJob pyspark_job = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Hive job. + HiveJob hive_job = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Pig job. + PigJob pig_job = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a SparkR job. + SparkRJob spark_r_job = 21 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a SparkSql job. + SparkSqlJob spark_sql_job = 12 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Presto job. + PrestoJob presto_job = 23 [(google.api.field_behavior) = OPTIONAL]; + } + + // Output only. The job status. Additional application-specific + // status information may be contained in the type_job + // and yarn_applications fields. + JobStatus status = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The previous job status. + repeated JobStatus status_history = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The collection of YARN applications spun up by this job. + // + // **Beta** Feature: This report is available for testing purposes only. It + // may be changed before final release. + repeated YarnApplication yarn_applications = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. A URI pointing to the location of the stdout of the job's + // driver program. + string driver_output_resource_uri = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. If present, the location of miscellaneous control files + // which may be used as part of job setup and handling. If not present, + // control files may be placed in the same location as `driver_output_uri`. + string driver_control_files_uri = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. The labels to associate with this job. + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to [RFC + // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be + // associated with a job. + map labels = 18 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job scheduling configuration. + JobScheduling scheduling = 20 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. A UUID that uniquely identifies a job within the project + // over time. This is in contrast to a user-settable reference.job_id that + // may be reused over time. + string job_uuid = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Indicates whether the job is completed. If the value is `false`, + // the job is still in progress. If `true`, the job is completed, and + // `status.state` field will indicate if it was successful, failed, + // or cancelled. + bool done = 24 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Job scheduling options. +message JobScheduling { + // Optional. Maximum number of times per hour a driver may be restarted as + // a result of driver exiting with non-zero code before job is + // reported failed. + // + // A job may be reported as thrashing if driver exits with non-zero code + // 4 times within 10 minute window. + // + // Maximum value is 10. + // + // **Note:** Currently, this restartable job option is + // not supported in Dataproc + // [workflow + // template](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template) + // jobs. + int32 max_failures_per_hour = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Maximum number of times in total a driver may be restarted as a result of + // driver exiting with non-zero code before job is reported failed. + // Maximum value is 240. + // + // **Note:** Currently, this restartable job option is + // not supported in Dataproc + // [workflow + // template](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template) + // jobs. + int32 max_failures_total = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to submit a job. +message SubmitJobRequest { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The job resource. + Job job = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. A unique id used to identify the request. If the server + // receives two + // [SubmitJobRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s + // with the same id, then the second request will be ignored and the + // first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend + // is returned. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Job Operation metadata. +message JobMetadata { + // Output only. The job id. + string job_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Most recent job status. + JobStatus status = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Operation type. + string operation_type = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Job submission time. + google.protobuf.Timestamp start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A request to get the resource representation for a job in a project. +message GetJobRequest { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The job ID. + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to list jobs in a project. +message ListJobsRequest { + // A matcher that specifies categories of job states. + enum JobStateMatcher { + // Match all jobs, regardless of state. + ALL = 0; + + // Only match jobs in non-terminal states: PENDING, RUNNING, or + // CANCEL_PENDING. + ACTIVE = 1; + + // Only match jobs in terminal states: CANCELLED, DONE, or ERROR. + NON_ACTIVE = 2; + } + + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 6 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The number of results to return in each response. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The page token, returned by a previous call, to request the + // next page of results. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. If set, the returned jobs list includes only jobs that were + // submitted to the named cluster. + string cluster_name = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Specifies enumerated categories of jobs to list. + // (default = match ALL jobs). + // + // If `filter` is provided, `jobStateMatcher` will be ignored. + JobStateMatcher job_state_matcher = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A filter constraining the jobs to list. Filters are + // case-sensitive and have the following syntax: + // + // [field = value] AND [field [= value]] ... + // + // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label + // key. **value** can be `*` to match all values. + // `status.state` can be either `ACTIVE` or `NON_ACTIVE`. + // Only the logical `AND` operator is supported; space-separated items are + // treated as having an implicit `AND` operator. + // + // Example filter: + // + // status.state = ACTIVE AND labels.env = staging AND labels.starred = * + string filter = 7 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to update a job. +message UpdateJobRequest { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The job ID. + string job_id = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The changes to the job. + Job job = 4 [(google.api.field_behavior) = REQUIRED]; + + // Required. Specifies the path, relative to Job, of + // the field to update. For example, to update the labels of a Job the + // update_mask parameter would be specified as + // labels, and the `PATCH` request body would specify the new + // value. Note: Currently, labels is the only + // field that can be updated. + google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; +} + +// A list of jobs in a project. +message ListJobsResponse { + // Output only. Jobs list. + repeated Job jobs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. This token is included in the response if there are more results + // to fetch. To fetch additional results, provide this value as the + // `page_token` in a subsequent ListJobsRequest. + string next_page_token = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to cancel a job. +message CancelJobRequest { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The job ID. + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to delete a job. +message DeleteJobRequest { + // Required. The ID of the Google Cloud Platform project that the job + // belongs to. + string project_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Dataproc region in which to handle the request. + string region = 3 [(google.api.field_behavior) = REQUIRED]; + + // Required. The job ID. + string job_id = 2 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto new file mode 100644 index 00000000..e12bd299 --- /dev/null +++ b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto @@ -0,0 +1,118 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/field_behavior.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "OperationsProto"; +option java_package = "com.google.cloud.dataproc.v1"; + +// Metadata describing the Batch operation. +message BatchOperationMetadata { + // Operation type for Batch resources + enum BatchOperationType { + // Batch operation type is unknown. + BATCH_OPERATION_TYPE_UNSPECIFIED = 0; + + // Batch operation type. + BATCH = 1; + } + + // Name of the batch for the operation. + string batch = 1; + + // Batch UUID for the operation. + string batch_uuid = 2; + + // The time when the operation was created. + google.protobuf.Timestamp create_time = 3; + + // The time when the operation finished. + google.protobuf.Timestamp done_time = 4; + + // The operation type. + BatchOperationType operation_type = 6; + + // Short description of the operation. + string description = 7; + + // Labels associated with the operation. + map labels = 8; + + // Warnings encountered during operation execution. + repeated string warnings = 9; +} + +// The status of the operation. +message ClusterOperationStatus { + // The operation state. + enum State { + // Unused. + UNKNOWN = 0; + + // The operation has been created. + PENDING = 1; + + // The operation is running. + RUNNING = 2; + + // The operation is done; either cancelled or completed. + DONE = 3; + } + + // Output only. A message containing the operation state. + State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. A message containing the detailed operation state. + string inner_state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. A message containing any operation metadata details. + string details = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time this state was entered. + google.protobuf.Timestamp state_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// Metadata describing the operation. +message ClusterOperationMetadata { + // Output only. Name of the cluster for the operation. + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Cluster UUID for the operation. + string cluster_uuid = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Current operation status. + ClusterOperationStatus status = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The previous operation status. + repeated ClusterOperationStatus status_history = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The operation type. + string operation_type = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Short description of operation. + string description = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Labels associated with the operation + map labels = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Errors encountered during operation execution. + repeated string warnings = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; +} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto new file mode 100644 index 00000000..18796915 --- /dev/null +++ b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto @@ -0,0 +1,341 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/field_behavior.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "SharedProto"; +option java_package = "com.google.cloud.dataproc.v1"; + +// Runtime configuration for a workload. +message RuntimeConfig { + // Optional. Version of the batch runtime. + string version = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Optional custom container image for the job runtime environment. If + // not specified, a default container image will be used. + string container_image = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A mapping of property names to values, which are used to configure workload + // execution. + map properties = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// Environment configuration for a workload. +message EnvironmentConfig { + // Optional. Execution configuration for a workload. + ExecutionConfig execution_config = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Peripherals configuration that workload has access to. + PeripheralsConfig peripherals_config = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Execution configuration for a workload. +message ExecutionConfig { + // Optional. Service account that used to execute workload. + string service_account = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Network configuration for workload execution. + oneof network { + // Optional. Network URI to connect workload to. + string network_uri = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Subnetwork URI to connect workload to. + string subnetwork_uri = 5 [(google.api.field_behavior) = OPTIONAL]; + } + + // Optional. Tags used for network traffic control. + repeated string network_tags = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The Cloud KMS key to use for encryption. + string kms_key = 7 [(google.api.field_behavior) = OPTIONAL]; +} + +// Spark History Server configuration for the workload. +message SparkHistoryServerConfig { + // Optional. Resource name of an existing Dataproc Cluster to act as a Spark History + // Server for the workload. + // + // Example: + // + // * `projects/[project_id]/regions/[region]/clusters/[cluster_name]` + string dataproc_cluster = 1 [ + (google.api.field_behavior) = OPTIONAL + ]; +} + +// Auxiliary services configuration for a workload. +message PeripheralsConfig { + // Optional. Resource name of an existing Dataproc Metastore service. + // + // Example: + // + // * `projects/[project_id]/locations/[region]/services/[service_id]` + string metastore_service = 1 [ + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. The Spark History Server configuration for the workload. + SparkHistoryServerConfig spark_history_server_config = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// Runtime information about workload execution. +message RuntimeInfo { + // Output only. Map of remote access endpoints (such as web interfaces and APIs) to their + // URIs. + map endpoints = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. A URI pointing to the location of the stdout and stderr of the workload. + string output_uri = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. A URI pointing to the location of the diagnostics tarball. + string diagnostic_output_uri = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// The cluster's GKE config. +message GkeClusterConfig { + // Optional. A target GKE cluster to deploy to. It must be in the same project and + // region as the Dataproc cluster (the GKE cluster can be zonal or regional). + // Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}' + string gke_cluster_target = 2 [ + (google.api.field_behavior) = OPTIONAL + ]; + + // Optional. GKE NodePools where workloads will be scheduled. At least one node pool + // must be assigned the 'default' role. Each role can be given to only a + // single NodePoolTarget. All NodePools must have the same location settings. + // If a nodePoolTarget is not specified, Dataproc constructs a default + // nodePoolTarget. + repeated GkeNodePoolTarget node_pool_target = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// The configuration for running the Dataproc cluster on Kubernetes. +message KubernetesClusterConfig { + // Optional. A namespace within the Kubernetes cluster to deploy into. If this namespace + // does not exist, it is created. If it exists, Dataproc + // verifies that another Dataproc VirtualCluster is not installed + // into it. If not specified, the name of the Dataproc Cluster is used. + string kubernetes_namespace = 1 [(google.api.field_behavior) = OPTIONAL]; + + oneof config { + // Required. The configuration for running the Dataproc cluster on GKE. + GkeClusterConfig gke_cluster_config = 2 [(google.api.field_behavior) = REQUIRED]; + } + + // Optional. The software configuration for this Dataproc cluster running on Kubernetes. + KubernetesSoftwareConfig kubernetes_software_config = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// The software configuration for this Dataproc cluster running on Kubernetes. +message KubernetesSoftwareConfig { + // The components that should be installed in this Dataproc cluster. The key + // must be a string from the KubernetesComponent enumeration. The value is + // the version of the software to be installed. + // At least one entry must be specified. + map component_version = 1; + + // The properties to set on daemon config files. + // + // Property keys are specified in `prefix:property` format, for example + // `spark:spark.kubernetes.container.image`. The following are supported + // prefixes and their mappings: + // + // * spark: `spark-defaults.conf` + // + // For more information, see [Cluster + // properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties). + map properties = 2; +} + +// GKE NodePools that Dataproc workloads run on. +message GkeNodePoolTarget { + // `Role` specifies whose tasks will run on the NodePool. The roles can be + // specific to workloads. Exactly one GkeNodePoolTarget within the + // VirtualCluster must have 'default' role, which is used to run all workloads + // that are not associated with a NodePool. + enum Role { + // Role is unspecified. + ROLE_UNSPECIFIED = 0; + + // Any roles that are not directly assigned to a NodePool run on the + // `default` role's NodePool. + DEFAULT = 1; + + // Run controllers and webhooks. + CONTROLLER = 2; + + // Run spark driver. + SPARK_DRIVER = 3; + + // Run spark executors. + SPARK_EXECUTOR = 4; + } + + // Required. The target GKE NodePool. + // Format: + // 'projects/{project}/locations/{location}/clusters/{cluster}/nodePools/{node_pool}' + string node_pool = 1 [ + (google.api.field_behavior) = REQUIRED + ]; + + // Required. The types of role for a GKE NodePool + repeated Role roles = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The configuration for the GKE NodePool. + // + // If specified, Dataproc attempts to create a NodePool with the + // specified shape. If one with the same name already exists, it is + // verified against all specified fields. If a field differs, the + // virtual cluster creation will fail. + // + // If omitted, any NodePool with the specified name is used. If a + // NodePool with the specified name does not exist, Dataproc create a NodePool + // with default values. + GkeNodePoolConfig node_pool_config = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// The configuration of a GKE NodePool used by a [Dataproc-on-GKE +// cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster). +message GkeNodePoolConfig { + // Parameters that describe cluster nodes. + message GkeNodeConfig { + // Optional. The name of a Compute Engine [machine + // type](https://cloud.google.com/compute/docs/machine-types). + string machine_type = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Whether the nodes are created as [preemptible VM + // instances](https://cloud.google.com/compute/docs/instances/preemptible). + bool preemptible = 10 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The number of local SSD disks to attach to the node, which is limited by + // the maximum number of disks allowable per zone (see [Adding Local + // SSDs](https://cloud.google.com/compute/docs/disks/local-ssd)). + int32 local_ssd_count = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A list of [hardware + // accelerators](https://cloud.google.com/compute/docs/gpus) to attach to + // each node. + repeated GkeNodePoolAcceleratorConfig accelerators = 11 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. [Minimum CPU + // platform](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform) + // to be used by this instance. The instance may be scheduled on the + // specified or a newer CPU platform. Specify the friendly names of CPU + // platforms, such as "Intel Haswell"` or Intel Sandy Bridge". + string min_cpu_platform = 13 [(google.api.field_behavior) = OPTIONAL]; + } + + // A GkeNodeConfigAcceleratorConfig represents a Hardware Accelerator request + // for a NodePool. + message GkeNodePoolAcceleratorConfig { + // The number of accelerator cards exposed to an instance. + int64 accelerator_count = 1; + + // The accelerator type resource namename (see GPUs on Compute Engine). + string accelerator_type = 2; + } + + // GkeNodePoolAutoscaling contains information the cluster autoscaler needs to + // adjust the size of the node pool to the current cluster usage. + message GkeNodePoolAutoscalingConfig { + // The minimum number of nodes in the NodePool. Must be >= 0 and <= + // max_node_count. + int32 min_node_count = 2; + + // The maximum number of nodes in the NodePool. Must be >= min_node_count. + // **Note:** Quota must be sufficient to scale up the cluster. + int32 max_node_count = 3; + } + + // Optional. The node pool configuration. + GkeNodeConfig config = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The list of Compute Engine + // [zones](https://cloud.google.com/compute/docs/zones#available) where + // NodePool's nodes will be located. + // + // **Note:** Currently, only one zone may be specified. + // + // If a location is not specified during NodePool creation, Dataproc will + // choose a location. + repeated string locations = 13 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The autoscaler configuration for this NodePool. The autoscaler is enabled + // only when a valid configuration is present. + GkeNodePoolAutoscalingConfig autoscaling = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Cluster components that can be activated. +enum Component { + // Unspecified component. Specifying this will cause Cluster creation to fail. + COMPONENT_UNSPECIFIED = 0; + + // The Anaconda python distribution. The Anaconda component is not supported + // in the Dataproc + // 2.0 + // image. The 2.0 image is pre-installed with Miniconda. + ANACONDA = 5; + + // Docker + DOCKER = 13; + + // The Druid query engine. (alpha) + DRUID = 9; + + // Flink + FLINK = 14; + + // HBase. (beta) + HBASE = 11; + + // The Hive Web HCatalog (the REST service for accessing HCatalog). + HIVE_WEBHCAT = 3; + + // The Jupyter Notebook. + JUPYTER = 1; + + // The Presto query engine. + PRESTO = 6; + + // The Ranger service. + RANGER = 12; + + // The Solr service. + SOLR = 10; + + // The Zeppelin notebook. + ZEPPELIN = 4; + + // The Zookeeper service. + ZOOKEEPER = 8; +} + +// Actions in response to failure of a resource associated with a cluster. +enum FailureAction { + // When FailureAction is unspecified, failure action defaults to NO_ACTION. + FAILURE_ACTION_UNSPECIFIED = 0; + + // Take no action on failure to create a cluster resource. NO_ACTION is the + // default. + NO_ACTION = 1; + + // Delete the failed cluster resource. + DELETE = 2; +} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto new file mode 100644 index 00000000..416ba26d --- /dev/null +++ b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto @@ -0,0 +1,807 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.cloud.dataproc.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/cloud/dataproc/v1/clusters.proto"; +import "google/cloud/dataproc/v1/jobs.proto"; +import "google/longrunning/operations.proto"; +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; + +option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; +option java_multiple_files = true; +option java_outer_classname = "WorkflowTemplatesProto"; +option java_package = "com.google.cloud.dataproc.v1"; + +// The API interface for managing Workflow Templates in the +// Dataproc API. +service WorkflowTemplateService { + option (google.api.default_host) = "dataproc.googleapis.com"; + option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; + + // Creates new workflow template. + rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/workflowTemplates" + body: "template" + additional_bindings { + post: "/v1/{parent=projects/*/regions/*}/workflowTemplates" + body: "template" + } + }; + option (google.api.method_signature) = "parent,template"; + } + + // Retrieves the latest workflow template. + // + // Can retrieve previously instantiated template by specifying optional + // version parameter. + rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) returns (WorkflowTemplate) { + option (google.api.http) = { + get: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" + additional_bindings { + get: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" + } + }; + option (google.api.method_signature) = "name"; + } + + // Instantiates a template and begins execution. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate" + body: "*" + additional_bindings { + post: "/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate" + body: "*" + } + }; + option (google.api.method_signature) = "name"; + option (google.api.method_signature) = "name,parameters"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; + } + + // Instantiates a template and begins execution. + // + // This method is equivalent to executing the sequence + // [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], + // [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. + // + // The returned Operation can be used to track execution of + // workflow by polling + // [operations.get][google.longrunning.Operations.GetOperation]. + // The Operation will complete when entire workflow is finished. + // + // The running workflow can be aborted via + // [operations.cancel][google.longrunning.Operations.CancelOperation]. + // This will cause any inflight jobs to be cancelled and workflow-owned + // clusters to be deleted. + // + // The [Operation.metadata][google.longrunning.Operation.metadata] will be + // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + // Also see [Using + // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + // + // On successful completion, + // [Operation.response][google.longrunning.Operation.response] will be + // [Empty][google.protobuf.Empty]. + rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) { + option (google.api.http) = { + post: "/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" + body: "template" + additional_bindings { + post: "/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline" + body: "template" + } + }; + option (google.api.method_signature) = "parent,template"; + option (google.longrunning.operation_info) = { + response_type: "google.protobuf.Empty" + metadata_type: "WorkflowMetadata" + }; + } + + // Updates (replaces) workflow template. The updated template + // must contain version that matches the current server version. + rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) returns (WorkflowTemplate) { + option (google.api.http) = { + put: "/v1/{template.name=projects/*/locations/*/workflowTemplates/*}" + body: "template" + additional_bindings { + put: "/v1/{template.name=projects/*/regions/*/workflowTemplates/*}" + body: "template" + } + }; + option (google.api.method_signature) = "template"; + } + + // Lists workflows that match the specified filter in the request. + rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) returns (ListWorkflowTemplatesResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*/locations/*}/workflowTemplates" + additional_bindings { + get: "/v1/{parent=projects/*/regions/*}/workflowTemplates" + } + }; + option (google.api.method_signature) = "parent"; + } + + // Deletes a workflow template. It does not cancel in-progress workflows. + rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" + additional_bindings { + delete: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" + } + }; + option (google.api.method_signature) = "name"; + } +} + +// A Dataproc workflow template resource. +message WorkflowTemplate { + option (google.api.resource) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" + pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" + history: ORIGINALLY_SINGLE_PATTERN + }; + + string id = 2 [(google.api.field_behavior) = REQUIRED]; + + // Output only. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. Used to perform a consistent read-modify-write. + // + // This field should be left blank for a `CreateWorkflowTemplate` request. It + // is required for an `UpdateWorkflowTemplate` request, and must match the + // current server version. A typical update template flow would fetch the + // current template with a `GetWorkflowTemplate` request, which will return + // the current template with the `version` field filled in with the + // current server version. The user updates other fields in the template, + // then returns it as part of the `UpdateWorkflowTemplate` request. + int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Output only. The time template was created. + google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The time template was last updated. + google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Optional. The labels to associate with this template. These labels + // will be propagated to all jobs and clusters created by the workflow + // instance. + // + // Label **keys** must contain 1 to 63 characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // Label **values** may be empty, but, if present, must contain 1 to 63 + // characters, and must conform to + // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). + // + // No more than 32 labels can be associated with a template. + map labels = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Required. WorkflowTemplate scheduling information. + WorkflowTemplatePlacement placement = 7 [(google.api.field_behavior) = REQUIRED]; + + // Required. The Directed Acyclic Graph of Jobs to submit. + repeated OrderedJob jobs = 8 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Template parameters whose values are substituted into the + // template. Values for parameters must be provided when the template is + // instantiated. + repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Timeout duration for the DAG of jobs, expressed in seconds (see + // [JSON representation of + // duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + // The timeout duration must be from 10 minutes ("600s") to 24 hours + // ("86400s"). The timer begins when the first job is submitted. If the + // workflow is running at the end of the timeout period, any remaining jobs + // are cancelled, the workflow is ended, and if the workflow was running on a + // [managed + // cluster](/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), + // the cluster is deleted. + google.protobuf.Duration dag_timeout = 10 [(google.api.field_behavior) = OPTIONAL]; +} + +// Specifies workflow execution target. +// +// Either `managed_cluster` or `cluster_selector` is required. +message WorkflowTemplatePlacement { + // Required. Specifies where workflow executes; either on a managed + // cluster or an existing cluster chosen by labels. + oneof placement { + // A cluster that is managed by the workflow. + ManagedCluster managed_cluster = 1; + + // Optional. A selector that chooses target cluster for jobs based + // on metadata. + // + // The selector is evaluated at the time each job is submitted. + ClusterSelector cluster_selector = 2; + } +} + +// Cluster that is managed by the workflow. +message ManagedCluster { + // Required. The cluster name prefix. A unique cluster name will be formed by + // appending a random suffix. + // + // The name must contain only lower-case letters (a-z), numbers (0-9), + // and hyphens (-). Must begin with a letter. Cannot begin or end with + // hyphen. Must consist of between 2 and 35 characters. + string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // Required. The cluster configuration. + ClusterConfig config = 3 [(google.api.field_behavior) = REQUIRED]; + + // Optional. The labels to associate with this cluster. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given cluster. + map labels = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// A selector that chooses target cluster for jobs based on metadata. +message ClusterSelector { + // Optional. The zone where workflow process executes. This parameter does not + // affect the selection of the cluster. + // + // If unspecified, the zone of the first cluster matching the selector + // is used. + string zone = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Required. The cluster labels. Cluster must have all labels + // to match. + map cluster_labels = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A job executed by the workflow. +message OrderedJob { + // Required. The step id. The id must be unique among all jobs + // within the template. + // + // The step id is used as prefix for job id, as job + // `goog-dataproc-workflow-step-id` label, and in + // [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other + // steps. + // + // The id must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). Cannot begin or end with underscore + // or hyphen. Must consist of between 3 and 50 characters. + string step_id = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The job definition. + oneof job_type { + // Optional. Job is a Hadoop job. + HadoopJob hadoop_job = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Spark job. + SparkJob spark_job = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a PySpark job. + PySparkJob pyspark_job = 4 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Hive job. + HiveJob hive_job = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Pig job. + PigJob pig_job = 6 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a SparkR job. + SparkRJob spark_r_job = 11 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a SparkSql job. + SparkSqlJob spark_sql_job = 7 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job is a Presto job. + PrestoJob presto_job = 12 [(google.api.field_behavior) = OPTIONAL]; + } + + // Optional. The labels to associate with this job. + // + // Label keys must be between 1 and 63 characters long, and must conform to + // the following regular expression: + // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} + // + // Label values must be between 1 and 63 characters long, and must conform to + // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} + // + // No more than 32 labels can be associated with a given job. + map labels = 8 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Job scheduling configuration. + JobScheduling scheduling = 9 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The optional list of prerequisite job step_ids. + // If not specified, the job will start at the beginning of workflow. + repeated string prerequisite_step_ids = 10 [(google.api.field_behavior) = OPTIONAL]; +} + +// A configurable parameter that replaces one or more fields in the template. +// Parameterizable fields: +// - Labels +// - File uris +// - Job properties +// - Job arguments +// - Script variables +// - Main class (in HadoopJob and SparkJob) +// - Zone (in ClusterSelector) +message TemplateParameter { + // Required. Parameter name. + // The parameter name is used as the key, and paired with the + // parameter value, which are passed to the template when the template + // is instantiated. + // The name must contain only capital letters (A-Z), numbers (0-9), and + // underscores (_), and must not start with a number. The maximum length is + // 40 characters. + string name = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. Paths to all fields that the parameter replaces. + // A field is allowed to appear in at most one parameter's list of field + // paths. + // + // A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. + // For example, a field path that references the zone field of a workflow + // template's cluster selector would be specified as + // `placement.clusterSelector.zone`. + // + // Also, field paths can reference fields using the following syntax: + // + // * Values in maps can be referenced by key: + // * labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * placement.managedCluster.labels['key'] + // * placement.clusterSelector.clusterLabels['key'] + // * jobs['step-id'].labels['key'] + // + // * Jobs in the jobs list can be referenced by step-id: + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * jobs['step-id'].hiveJob.queryFileUri + // * jobs['step-id'].pySparkJob.mainPythonFileUri + // * jobs['step-id'].hadoopJob.jarFileUris[0] + // * jobs['step-id'].hadoopJob.archiveUris[0] + // * jobs['step-id'].hadoopJob.fileUris[0] + // * jobs['step-id'].pySparkJob.pythonFileUris[0] + // + // * Items in repeated fields can be referenced by a zero-based index: + // * jobs['step-id'].sparkJob.args[0] + // + // * Other examples: + // * jobs['step-id'].hadoopJob.properties['key'] + // * jobs['step-id'].hadoopJob.args[0] + // * jobs['step-id'].hiveJob.scriptVariables['key'] + // * jobs['step-id'].hadoopJob.mainJarFileUri + // * placement.clusterSelector.zone + // + // It may not be possible to parameterize maps and repeated fields in their + // entirety since only individual map values and individual items in repeated + // fields can be referenced. For example, the following field paths are + // invalid: + // + // - placement.clusterSelector.clusterLabels + // - jobs['step-id'].sparkJob.args + repeated string fields = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. Brief description of the parameter. + // Must not exceed 1024 characters. + string description = 3 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Validation rules to be applied to this parameter's value. + ParameterValidation validation = 4 [(google.api.field_behavior) = OPTIONAL]; +} + +// Configuration for parameter validation. +message ParameterValidation { + // Required. The type of validation to be performed. + oneof validation_type { + // Validation based on regular expressions. + RegexValidation regex = 1; + + // Validation based on a list of allowed values. + ValueValidation values = 2; + } +} + +// Validation based on regular expressions. +message RegexValidation { + // Required. RE2 regular expressions used to validate the parameter's value. + // The value must match the regex in its entirety (substring + // matches are not sufficient). + repeated string regexes = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// Validation based on a list of allowed values. +message ValueValidation { + // Required. List of allowed values for the parameter. + repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// A Dataproc workflow template resource. +message WorkflowMetadata { + // The operation state. + enum State { + // Unused. + UNKNOWN = 0; + + // The operation has been created. + PENDING = 1; + + // The operation is running. + RUNNING = 2; + + // The operation is done; either cancelled or completed. + DONE = 3; + } + + // Output only. The resource name of the workflow template as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The version of template at the time of + // workflow instantiation. + int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The create cluster operation metadata. + ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The workflow graph. + WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The delete cluster operation metadata. + ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The workflow state. + State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The name of the target cluster. + string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Map from parameter names to values that were used for those parameters. + map parameters = 8; + + // Output only. Workflow start time. + google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Workflow end time. + google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The UUID of target cluster. + string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The timeout duration for the DAG of jobs, expressed in seconds (see + // [JSON representation of + // duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + google.protobuf.Duration dag_timeout = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. DAG start time, only set for workflows with [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] when DAG + // begins. + google.protobuf.Timestamp dag_start_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. DAG end time, only set for workflows with [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] when DAG ends. + google.protobuf.Timestamp dag_end_time = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// The cluster operation triggered by a workflow. +message ClusterOperation { + // Output only. The id of the cluster operation. + string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Error, if operation failed. + string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Indicates the operation is done. + bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// The workflow graph. +message WorkflowGraph { + // Output only. The workflow nodes. + repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// The workflow node. +message WorkflowNode { + // The workflow node state. + enum NodeState { + // State is unspecified. + NODE_STATE_UNSPECIFIED = 0; + + // The node is awaiting prerequisite node to finish. + BLOCKED = 1; + + // The node is runnable but not running. + RUNNABLE = 2; + + // The node is running. + RUNNING = 3; + + // The node completed successfully. + COMPLETED = 4; + + // The node failed. A node can be marked FAILED because + // its ancestor or peer failed. + FAILED = 5; + } + + // Output only. The name of the node. + string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. Node's prerequisite nodes. + repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The job id; populated after the node enters RUNNING state. + string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The node state. + NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. The error detail. + string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A request to create a workflow template. +message CreateWorkflowTemplateRequest { + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.create`, the resource name of the + // region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.create`, the resource name of + // the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; + + // Required. The Dataproc workflow template to create. + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to fetch a workflow template. +message GetWorkflowTemplateRequest { + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.get`, the resource name of the + // template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; + + // Optional. The version of workflow template to retrieve. Only previously + // instantiated versions can be retrieved. + // + // If unspecified, retrieves the current version. + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to instantiate a workflow template. +message InstantiateWorkflowTemplateRequest { + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; + + // Optional. The version of workflow template to instantiate. If specified, + // the workflow will be instantiated only if the current version of + // the workflow template has the supplied version. + // + // This option cannot be used to instantiate a previous version of + // workflow template. + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. Map from parameter names to values that should be used for those + // parameters. Values may not exceed 1000 characters. + map parameters = 6 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to instantiate an inline workflow template. +message InstantiateInlineWorkflowTemplateRequest { + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,instantiateinline`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.instantiateinline`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; + + // Required. The workflow template to instantiate. + WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; + + // Optional. A tag that prevents multiple concurrent workflow + // instances with the same tag from running. This mitigates risk of + // concurrent instances started due to retries. + // + // It is recommended to always set this value to a + // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + // + // The tag must contain only letters (a-z, A-Z), numbers (0-9), + // underscores (_), and hyphens (-). The maximum length is 40 characters. + string request_id = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A request to update a workflow template. +message UpdateWorkflowTemplateRequest { + // Required. The updated workflow template. + // + // The `template.version` field must match the current version. + WorkflowTemplate template = 1 [(google.api.field_behavior) = REQUIRED]; +} + +// A request to list workflow templates in a project. +message ListWorkflowTemplatesRequest { + // Required. The resource name of the region or location, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates,list`, the resource + // name of the region has the following format: + // `projects/{project_id}/regions/{region}` + // + // * For `projects.locations.workflowTemplates.list`, the + // resource name of the location has the following format: + // `projects/{project_id}/locations/{location}` + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + child_type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; + + // Optional. The maximum number of results to return in each response. + int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; + + // Optional. The page token, returned by a previous call, to request the + // next page of results. + string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; +} + +// A response to a request to list workflow templates in a project. +message ListWorkflowTemplatesResponse { + // Output only. WorkflowTemplates list. + repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // Output only. This token is included in the response if there are more + // results to fetch. To fetch additional results, provide this value as the + // page_token in a subsequent ListWorkflowTemplatesRequest. + string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; +} + +// A request to delete a workflow template. +// +// Currently started workflows will remain running. +message DeleteWorkflowTemplateRequest { + // Required. The resource name of the workflow template, as described + // in https://cloud.google.com/apis/design/resource_names. + // + // * For `projects.regions.workflowTemplates.delete`, the resource name + // of the template has the following format: + // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + // + // * For `projects.locations.workflowTemplates.instantiate`, the resource name + // of the template has the following format: + // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "dataproc.googleapis.com/WorkflowTemplate" + } + ]; + + // Optional. The version of workflow template to delete. If specified, + // will only delete the template if the current server version matches + // specified version. + int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; +} diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js new file mode 100644 index 00000000..0d0ad0fc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js @@ -0,0 +1,70 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, policy) { + // [START dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The "resource name" of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.autoscalingPolicies.create`, the resource name + * of the region has the following format: + * `projects/{project_id}/regions/{region}` + * * For `projects.locations.autoscalingPolicies.create`, the resource name + * of the location has the following format: + * `projects/{project_id}/locations/{location}` + */ + // const parent = 'abc123' + /** + * Required. The autoscaling policy to create. + */ + // const policy = {} + + // Imports the Dataproc library + const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new AutoscalingPolicyServiceClient(); + + async function callCreateAutoscalingPolicy() { + // Construct request + const request = { + parent, + policy, + }; + + // Run request + const response = await dataprocClient.createAutoscalingPolicy(request); + console.log(response); + } + + callCreateAutoscalingPolicy(); + // [END dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js new file mode 100644 index 00000000..86e57f8b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js @@ -0,0 +1,65 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The "resource name" of the autoscaling policy, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.autoscalingPolicies.delete`, the resource name + * of the policy has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + * * For `projects.locations.autoscalingPolicies.delete`, the resource name + * of the policy has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + */ + // const name = 'abc123' + + // Imports the Dataproc library + const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new AutoscalingPolicyServiceClient(); + + async function callDeleteAutoscalingPolicy() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await dataprocClient.deleteAutoscalingPolicy(request); + console.log(response); + } + + callDeleteAutoscalingPolicy(); + // [END dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js new file mode 100644 index 00000000..76a64c9e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js @@ -0,0 +1,65 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The "resource name" of the autoscaling policy, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.autoscalingPolicies.get`, the resource name + * of the policy has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + * * For `projects.locations.autoscalingPolicies.get`, the resource name + * of the policy has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + */ + // const name = 'abc123' + + // Imports the Dataproc library + const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new AutoscalingPolicyServiceClient(); + + async function callGetAutoscalingPolicy() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await dataprocClient.getAutoscalingPolicy(request); + console.log(response); + } + + callGetAutoscalingPolicy(); + // [END dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js new file mode 100644 index 00000000..991caee2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js @@ -0,0 +1,77 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The "resource name" of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.autoscalingPolicies.list`, the resource name + * of the region has the following format: + * `projects/{project_id}/regions/{region}` + * * For `projects.locations.autoscalingPolicies.list`, the resource name + * of the location has the following format: + * `projects/{project_id}/locations/{location}` + */ + // const parent = 'abc123' + /** + * Optional. The maximum number of results to return in each response. + * Must be less than or equal to 1000. Defaults to 100. + */ + // const pageSize = 1234 + /** + * Optional. The page token, returned by a previous call, to request the + * next page of results. + */ + // const pageToken = 'abc123' + + // Imports the Dataproc library + const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new AutoscalingPolicyServiceClient(); + + async function callListAutoscalingPolicies() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await dataprocClient.listAutoscalingPoliciesAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListAutoscalingPolicies(); + // [END dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js new file mode 100644 index 00000000..95b50b8c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js @@ -0,0 +1,58 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(policy) { + // [START dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The updated autoscaling policy. + */ + // const policy = {} + + // Imports the Dataproc library + const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new AutoscalingPolicyServiceClient(); + + async function callUpdateAutoscalingPolicy() { + // Construct request + const request = { + policy, + }; + + // Run request + const response = await dataprocClient.updateAutoscalingPolicy(request); + console.log(response); + } + + callUpdateAutoscalingPolicy(); + // [END dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js new file mode 100644 index 00000000..398e78f6 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js @@ -0,0 +1,83 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, batch) { + // [START dataproc_v1_generated_BatchController_CreateBatch_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The parent resource where this batch will be created. + */ + // const parent = 'abc123' + /** + * Required. The batch to create. + */ + // const batch = {} + /** + * Optional. The ID to use for the batch, which will become the final component of + * the batch's resource name. + * This value must be 4-63 characters. Valid characters are `/[a-z][0-9]-/`. + */ + // const batchId = 'abc123' + /** + * Optional. A unique ID used to identify the request. If the service + * receives two + * CreateBatchRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s + * with the same request_id, the second request is ignored and the + * Operation that corresponds to the first Batch created and stored + * in the backend is returned. + * Recommendation: Set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The value must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {BatchControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new BatchControllerClient(); + + async function callCreateBatch() { + // Construct request + const request = { + parent, + batch, + }; + + // Run request + const [operation] = await dataprocClient.createBatch(request); + const [response] = await operation.promise(); + console.log(response); + } + + callCreateBatch(); + // [END dataproc_v1_generated_BatchController_CreateBatch_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js new file mode 100644 index 00000000..66c9596e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js @@ -0,0 +1,58 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START dataproc_v1_generated_BatchController_DeleteBatch_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the batch resource to delete. + */ + // const name = 'abc123' + + // Imports the Dataproc library + const {BatchControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new BatchControllerClient(); + + async function callDeleteBatch() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await dataprocClient.deleteBatch(request); + console.log(response); + } + + callDeleteBatch(); + // [END dataproc_v1_generated_BatchController_DeleteBatch_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js new file mode 100644 index 00000000..2cb9bb34 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js @@ -0,0 +1,58 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START dataproc_v1_generated_BatchController_GetBatch_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The name of the batch to retrieve. + */ + // const name = 'abc123' + + // Imports the Dataproc library + const {BatchControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new BatchControllerClient(); + + async function callGetBatch() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await dataprocClient.getBatch(request); + console.log(response); + } + + callGetBatch(); + // [END dataproc_v1_generated_BatchController_GetBatch_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js new file mode 100644 index 00000000..4170a038 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js @@ -0,0 +1,71 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START dataproc_v1_generated_BatchController_ListBatches_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The parent, which owns this collection of batches. + */ + // const parent = 'abc123' + /** + * Optional. The maximum number of batches to return in each response. + * The service may return fewer than this value. + * The default page size is 20; the maximum page size is 1000. + */ + // const pageSize = 1234 + /** + * Optional. A page token received from a previous `ListBatches` call. + * Provide this token to retrieve the subsequent page. + */ + // const pageToken = 'abc123' + + // Imports the Dataproc library + const {BatchControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new BatchControllerClient(); + + async function callListBatches() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await dataprocClient.listBatchesAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListBatches(); + // [END dataproc_v1_generated_BatchController_ListBatches_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js new file mode 100644 index 00000000..d0623a49 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js @@ -0,0 +1,86 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, cluster) { + // [START dataproc_v1_generated_ClusterController_CreateCluster_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The cluster to create. + */ + // const cluster = {} + /** + * Optional. A unique ID used to identify the request. If the server receives two + * CreateClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s + * with the same id, then the second request will be ignored and the + * first google.longrunning.Operation google.longrunning.Operation created and stored in the backend + * is returned. + * It is recommended to always set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + /** + * Optional. Failure action when primary worker creation fails. + */ + // const actionOnFailedPrimaryWorkers = {} + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callCreateCluster() { + // Construct request + const request = { + projectId, + region, + cluster, + }; + + // Run request + const [operation] = await dataprocClient.createCluster(request); + const [response] = await operation.promise(); + console.log(response); + } + + callCreateCluster(); + // [END dataproc_v1_generated_ClusterController_CreateCluster_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js new file mode 100644 index 00000000..a832f7d9 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js @@ -0,0 +1,88 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, clusterName) { + // [START dataproc_v1_generated_ClusterController_DeleteCluster_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The cluster name. + */ + // const clusterName = 'abc123' + /** + * Optional. Specifying the `cluster_uuid` means the RPC should fail + * (with error NOT_FOUND) if cluster with specified UUID does not exist. + */ + // const clusterUuid = 'abc123' + /** + * Optional. A unique ID used to identify the request. If the server + * receives two + * DeleteClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s + * with the same id, then the second request will be ignored and the + * first google.longrunning.Operation google.longrunning.Operation created and stored in the + * backend is returned. + * It is recommended to always set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callDeleteCluster() { + // Construct request + const request = { + projectId, + region, + clusterName, + }; + + // Run request + const [operation] = await dataprocClient.deleteCluster(request); + const [response] = await operation.promise(); + console.log(response); + } + + callDeleteCluster(); + // [END dataproc_v1_generated_ClusterController_DeleteCluster_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js new file mode 100644 index 00000000..11c39954 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js @@ -0,0 +1,70 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, clusterName) { + // [START dataproc_v1_generated_ClusterController_DiagnoseCluster_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The cluster name. + */ + // const clusterName = 'abc123' + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callDiagnoseCluster() { + // Construct request + const request = { + projectId, + region, + clusterName, + }; + + // Run request + const [operation] = await dataprocClient.diagnoseCluster(request); + const [response] = await operation.promise(); + console.log(response); + } + + callDiagnoseCluster(); + // [END dataproc_v1_generated_ClusterController_DiagnoseCluster_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js new file mode 100644 index 00000000..b6886881 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js @@ -0,0 +1,69 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, clusterName) { + // [START dataproc_v1_generated_ClusterController_GetCluster_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The cluster name. + */ + // const clusterName = 'abc123' + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callGetCluster() { + // Construct request + const request = { + projectId, + region, + clusterName, + }; + + // Run request + const response = await dataprocClient.getCluster(request); + console.log(response); + } + + callGetCluster(); + // [END dataproc_v1_generated_ClusterController_GetCluster_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js new file mode 100644 index 00000000..00e7a70a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js @@ -0,0 +1,92 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region) { + // [START dataproc_v1_generated_ClusterController_ListClusters_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Optional. A filter constraining the clusters to list. Filters are + * case-sensitive and have the following syntax: + * field = value AND field = value ... + * where **field** is one of `status.state`, `clusterName`, or `labels.KEY`, + * and `[KEY]` is a label key. **value** can be `*` to match all values. + * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` + * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` + * contains the `DELETING` and `ERROR` states. + * `clusterName` is the name of the cluster provided at creation time. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * Example filter: + * status.state = ACTIVE AND clusterName = mycluster + * AND labels.env = staging AND labels.starred = * + */ + // const filter = 'abc123' + /** + * Optional. The standard List page size. + */ + // const pageSize = 1234 + /** + * Optional. The standard List page token. + */ + // const pageToken = 'abc123' + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callListClusters() { + // Construct request + const request = { + projectId, + region, + }; + + // Run request + const iterable = await dataprocClient.listClustersAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListClusters(); + // [END dataproc_v1_generated_ClusterController_ListClusters_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js new file mode 100644 index 00000000..9da8f138 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js @@ -0,0 +1,88 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, clusterName) { + // [START dataproc_v1_generated_ClusterController_StartCluster_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project the + * cluster belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The cluster name. + */ + // const clusterName = 'abc123' + /** + * Optional. Specifying the `cluster_uuid` means the RPC will fail + * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. + */ + // const clusterUuid = 'abc123' + /** + * Optional. A unique ID used to identify the request. If the server + * receives two + * StartClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s + * with the same id, then the second request will be ignored and the + * first google.longrunning.Operation google.longrunning.Operation created and stored in the + * backend is returned. + * Recommendation: Set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callStartCluster() { + // Construct request + const request = { + projectId, + region, + clusterName, + }; + + // Run request + const [operation] = await dataprocClient.startCluster(request); + const [response] = await operation.promise(); + console.log(response); + } + + callStartCluster(); + // [END dataproc_v1_generated_ClusterController_StartCluster_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js new file mode 100644 index 00000000..5fe7e713 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js @@ -0,0 +1,88 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, clusterName) { + // [START dataproc_v1_generated_ClusterController_StopCluster_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project the + * cluster belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The cluster name. + */ + // const clusterName = 'abc123' + /** + * Optional. Specifying the `cluster_uuid` means the RPC will fail + * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. + */ + // const clusterUuid = 'abc123' + /** + * Optional. A unique ID used to identify the request. If the server + * receives two + * StopClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s + * with the same id, then the second request will be ignored and the + * first google.longrunning.Operation google.longrunning.Operation created and stored in the + * backend is returned. + * Recommendation: Set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callStopCluster() { + // Construct request + const request = { + projectId, + region, + clusterName, + }; + + // Run request + const [operation] = await dataprocClient.stopCluster(request); + const [response] = await operation.promise(); + console.log(response); + } + + callStopCluster(); + // [END dataproc_v1_generated_ClusterController_StopCluster_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js new file mode 100644 index 00000000..89efb902 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js @@ -0,0 +1,151 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, clusterName, cluster, updateMask) { + // [START dataproc_v1_generated_ClusterController_UpdateCluster_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project the + * cluster belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The cluster name. + */ + // const clusterName = 'abc123' + /** + * Required. The changes to the cluster. + */ + // const cluster = {} + /** + * Optional. Timeout for graceful YARN decomissioning. Graceful + * decommissioning allows removing nodes from the cluster without + * interrupting jobs in progress. Timeout specifies how long to wait for jobs + * in progress to finish before forcefully removing nodes (and potentially + * interrupting jobs). Default timeout is 0 (for forceful decommission), and + * the maximum allowed timeout is 1 day. (see JSON representation of + * Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)). + * Only supported on Dataproc image versions 1.2 and higher. + */ + // const gracefulDecommissionTimeout = {} + /** + * Required. Specifies the path, relative to `Cluster`, of + * the field to update. For example, to change the number of workers + * in a cluster to 5, the `update_mask` parameter would be + * specified as `config.worker_config.num_instances`, + * and the `PATCH` request body would specify the new value, as follows: + * { + * "config":{ + * "workerConfig":{ + * "numInstances":"5" + * } + * } + * } + * Similarly, to change the number of preemptible workers in a cluster to 5, + * the `update_mask` parameter would be + * `config.secondary_worker_config.num_instances`, and the `PATCH` request + * body would be set as follows: + * { + * "config":{ + * "secondaryWorkerConfig":{ + * "numInstances":"5" + * } + * } + * } + * Note: Currently, only the following fields can be updated: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or + * change autoscaling policies
+ */ + // const updateMask = {} + /** + * Optional. A unique ID used to identify the request. If the server + * receives two + * UpdateClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s + * with the same id, then the second request will be ignored and the + * first google.longrunning.Operation google.longrunning.Operation created and stored in the + * backend is returned. + * It is recommended to always set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new ClusterControllerClient(); + + async function callUpdateCluster() { + // Construct request + const request = { + projectId, + region, + clusterName, + cluster, + updateMask, + }; + + // Run request + const [operation] = await dataprocClient.updateCluster(request); + const [response] = await operation.promise(); + console.log(response); + } + + callUpdateCluster(); + // [END dataproc_v1_generated_ClusterController_UpdateCluster_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js new file mode 100644 index 00000000..a51f789c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js @@ -0,0 +1,69 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, jobId) { + // [START dataproc_v1_generated_JobController_CancelJob_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The job ID. + */ + // const jobId = 'abc123' + + // Imports the Dataproc library + const {JobControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new JobControllerClient(); + + async function callCancelJob() { + // Construct request + const request = { + projectId, + region, + jobId, + }; + + // Run request + const response = await dataprocClient.cancelJob(request); + console.log(response); + } + + callCancelJob(); + // [END dataproc_v1_generated_JobController_CancelJob_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js new file mode 100644 index 00000000..4f794f47 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js @@ -0,0 +1,69 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, jobId) { + // [START dataproc_v1_generated_JobController_DeleteJob_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The job ID. + */ + // const jobId = 'abc123' + + // Imports the Dataproc library + const {JobControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new JobControllerClient(); + + async function callDeleteJob() { + // Construct request + const request = { + projectId, + region, + jobId, + }; + + // Run request + const response = await dataprocClient.deleteJob(request); + console.log(response); + } + + callDeleteJob(); + // [END dataproc_v1_generated_JobController_DeleteJob_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js new file mode 100644 index 00000000..20135e2a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js @@ -0,0 +1,69 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, jobId) { + // [START dataproc_v1_generated_JobController_GetJob_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The job ID. + */ + // const jobId = 'abc123' + + // Imports the Dataproc library + const {JobControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new JobControllerClient(); + + async function callGetJob() { + // Construct request + const request = { + projectId, + region, + jobId, + }; + + // Run request + const response = await dataprocClient.getJob(request); + console.log(response); + } + + callGetJob(); + // [END dataproc_v1_generated_JobController_GetJob_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js new file mode 100644 index 00000000..fd0ddc7b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js @@ -0,0 +1,99 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region) { + // [START dataproc_v1_generated_JobController_ListJobs_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Optional. The number of results to return in each response. + */ + // const pageSize = 1234 + /** + * Optional. The page token, returned by a previous call, to request the + * next page of results. + */ + // const pageToken = 'abc123' + /** + * Optional. If set, the returned jobs list includes only jobs that were + * submitted to the named cluster. + */ + // const clusterName = 'abc123' + /** + * Optional. Specifies enumerated categories of jobs to list. + * (default = match ALL jobs). + * If `filter` is provided, `jobStateMatcher` will be ignored. + */ + // const jobStateMatcher = {} + /** + * Optional. A filter constraining the jobs to list. Filters are + * case-sensitive and have the following syntax: + * field = value AND field = value ... + * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label + * key. **value** can be `*` to match all values. + * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * Example filter: + * status.state = ACTIVE AND labels.env = staging AND labels.starred = * + */ + // const filter = 'abc123' + + // Imports the Dataproc library + const {JobControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new JobControllerClient(); + + async function callListJobs() { + // Construct request + const request = { + projectId, + region, + }; + + // Run request + const iterable = await dataprocClient.listJobsAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListJobs(); + // [END dataproc_v1_generated_JobController_ListJobs_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js new file mode 100644 index 00000000..bc03f366 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js @@ -0,0 +1,82 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, job) { + // [START dataproc_v1_generated_JobController_SubmitJob_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The job resource. + */ + // const job = {} + /** + * Optional. A unique id used to identify the request. If the server + * receives two + * SubmitJobRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s + * with the same id, then the second request will be ignored and the + * first Job google.cloud.dataproc.v1.Job created and stored in the backend + * is returned. + * It is recommended to always set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The id must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {JobControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new JobControllerClient(); + + async function callSubmitJob() { + // Construct request + const request = { + projectId, + region, + job, + }; + + // Run request + const response = await dataprocClient.submitJob(request); + console.log(response); + } + + callSubmitJob(); + // [END dataproc_v1_generated_JobController_SubmitJob_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js new file mode 100644 index 00000000..8f613395 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js @@ -0,0 +1,83 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, job) { + // [START dataproc_v1_generated_JobController_SubmitJobAsOperation_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The job resource. + */ + // const job = {} + /** + * Optional. A unique id used to identify the request. If the server + * receives two + * SubmitJobRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s + * with the same id, then the second request will be ignored and the + * first Job google.cloud.dataproc.v1.Job created and stored in the backend + * is returned. + * It is recommended to always set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The id must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {JobControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new JobControllerClient(); + + async function callSubmitJobAsOperation() { + // Construct request + const request = { + projectId, + region, + job, + }; + + // Run request + const [operation] = await dataprocClient.submitJobAsOperation(request); + const [response] = await operation.promise(); + console.log(response); + } + + callSubmitJobAsOperation(); + // [END dataproc_v1_generated_JobController_SubmitJobAsOperation_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js new file mode 100644 index 00000000..0ba0c750 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js @@ -0,0 +1,84 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(projectId, region, jobId, job, updateMask) { + // [START dataproc_v1_generated_JobController_UpdateJob_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + */ + // const projectId = 'abc123' + /** + * Required. The Dataproc region in which to handle the request. + */ + // const region = 'us-central1' + /** + * Required. The job ID. + */ + // const jobId = 'abc123' + /** + * Required. The changes to the job. + */ + // const job = {} + /** + * Required. Specifies the path, relative to Job, of + * the field to update. For example, to update the labels of a Job the + * update_mask parameter would be specified as + * labels, and the `PATCH` request body would specify the new + * value. Note: Currently, labels is the only + * field that can be updated. + */ + // const updateMask = {} + + // Imports the Dataproc library + const {JobControllerClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new JobControllerClient(); + + async function callUpdateJob() { + // Construct request + const request = { + projectId, + region, + jobId, + job, + updateMask, + }; + + // Run request + const response = await dataprocClient.updateJob(request); + console.log(response); + } + + callUpdateJob(); + // [END dataproc_v1_generated_JobController_UpdateJob_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json b/owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json new file mode 100644 index 00000000..f5e99f6f --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json @@ -0,0 +1,1535 @@ +{ + "clientLibrary": { + "name": "nodejs-dataproc", + "version": "0.1.0", + "language": "TYPESCRIPT", + "apis": [ + { + "id": "google.cloud.dataproc.v1", + "version": "v1" + } + ] + }, + "snippets": [ + { + "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async", + "title": "dataproc createAutoscalingPolicy Sample", + "origin": "API_DEFINITION", + "description": " Creates new autoscaling policy.", + "canonical": true, + "file": "autoscaling_policy_service.create_autoscaling_policy.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 62, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "policy", + "type": ".google.cloud.dataproc.v1.AutoscalingPolicy" + } + ], + "resultType": ".google.cloud.dataproc.v1.AutoscalingPolicy", + "client": { + "shortName": "AutoscalingPolicyServiceClient", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" + }, + "method": { + "shortName": "CreateAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", + "service": { + "shortName": "AutoscalingPolicyService", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async", + "title": "dataproc updateAutoscalingPolicy Sample", + "origin": "API_DEFINITION", + "description": " Updates (replaces) autoscaling policy. Disabled check for update_mask, because all updates will be full replacements.", + "canonical": true, + "file": "autoscaling_policy_service.update_autoscaling_policy.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 50, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "UpdateAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", + "async": true, + "parameters": [ + { + "name": "policy", + "type": ".google.cloud.dataproc.v1.AutoscalingPolicy" + } + ], + "resultType": ".google.cloud.dataproc.v1.AutoscalingPolicy", + "client": { + "shortName": "AutoscalingPolicyServiceClient", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" + }, + "method": { + "shortName": "UpdateAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", + "service": { + "shortName": "AutoscalingPolicyService", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async", + "title": "dataproc getAutoscalingPolicy Sample", + "origin": "API_DEFINITION", + "description": " Retrieves autoscaling policy.", + "canonical": true, + "file": "autoscaling_policy_service.get_autoscaling_policy.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 57, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.AutoscalingPolicy", + "client": { + "shortName": "AutoscalingPolicyServiceClient", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" + }, + "method": { + "shortName": "GetAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", + "service": { + "shortName": "AutoscalingPolicyService", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async", + "title": "dataproc listAutoscalingPolicies Sample", + "origin": "API_DEFINITION", + "description": " Lists autoscaling policies in the project.", + "canonical": true, + "file": "autoscaling_policy_service.list_autoscaling_policies.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 69, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListAutoscalingPolicies", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.ListAutoscalingPoliciesResponse", + "client": { + "shortName": "AutoscalingPolicyServiceClient", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" + }, + "method": { + "shortName": "ListAutoscalingPolicies", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", + "service": { + "shortName": "AutoscalingPolicyService", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async", + "title": "dataproc deleteAutoscalingPolicy Sample", + "origin": "API_DEFINITION", + "description": " Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by one or more clusters.", + "canonical": true, + "file": "autoscaling_policy_service.delete_autoscaling_policy.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 57, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "AutoscalingPolicyServiceClient", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" + }, + "method": { + "shortName": "DeleteAutoscalingPolicy", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", + "service": { + "shortName": "AutoscalingPolicyService", + "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_async", + "title": "dataproc createBatch Sample", + "origin": "API_DEFINITION", + "description": " Creates a batch workload that executes asynchronously.", + "canonical": true, + "file": "batch_controller.create_batch.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 75, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateBatch", + "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "batch", + "type": ".google.cloud.dataproc.v1.Batch" + }, + { + "name": "batch_id", + "type": "TYPE_STRING" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "BatchControllerClient", + "fullName": "google.cloud.dataproc.v1.BatchControllerClient" + }, + "method": { + "shortName": "CreateBatch", + "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", + "service": { + "shortName": "BatchController", + "fullName": "google.cloud.dataproc.v1.BatchController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_BatchController_GetBatch_async", + "title": "dataproc getBatch Sample", + "origin": "API_DEFINITION", + "description": " Gets the batch workload resource representation.", + "canonical": true, + "file": "batch_controller.get_batch.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 50, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetBatch", + "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.Batch", + "client": { + "shortName": "BatchControllerClient", + "fullName": "google.cloud.dataproc.v1.BatchControllerClient" + }, + "method": { + "shortName": "GetBatch", + "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", + "service": { + "shortName": "BatchController", + "fullName": "google.cloud.dataproc.v1.BatchController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_BatchController_ListBatches_async", + "title": "dataproc listBatches Sample", + "origin": "API_DEFINITION", + "description": " Lists batch workloads.", + "canonical": true, + "file": "batch_controller.list_batches.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 63, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListBatches", + "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.ListBatchesResponse", + "client": { + "shortName": "BatchControllerClient", + "fullName": "google.cloud.dataproc.v1.BatchControllerClient" + }, + "method": { + "shortName": "ListBatches", + "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", + "service": { + "shortName": "BatchController", + "fullName": "google.cloud.dataproc.v1.BatchController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_async", + "title": "dataproc deleteBatch Sample", + "origin": "API_DEFINITION", + "description": " Deletes the batch workload resource. If the batch is not in terminal state, the delete fails and the response returns `FAILED_PRECONDITION`.", + "canonical": true, + "file": "batch_controller.delete_batch.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 50, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteBatch", + "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "BatchControllerClient", + "fullName": "google.cloud.dataproc.v1.BatchControllerClient" + }, + "method": { + "shortName": "DeleteBatch", + "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", + "service": { + "shortName": "BatchController", + "fullName": "google.cloud.dataproc.v1.BatchController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_async", + "title": "dataproc createCluster Sample", + "origin": "API_DEFINITION", + "description": " Creates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).", + "canonical": true, + "file": "cluster_controller.create_cluster.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 78, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "cluster", + "type": ".google.cloud.dataproc.v1.Cluster" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + }, + { + "name": "action_on_failed_primary_workers", + "type": ".google.cloud.dataproc.v1.FailureAction" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "CreateCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_async", + "title": "dataproc updateCluster Sample", + "origin": "API_DEFINITION", + "description": " Updates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). The cluster must be in a [`RUNNING`][google.cloud.dataproc.v1.ClusterStatus.State] state or an error is returned.", + "canonical": true, + "file": "cluster_controller.update_cluster.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 143, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "UpdateCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "cluster_name", + "type": "TYPE_STRING" + }, + { + "name": "cluster", + "type": ".google.cloud.dataproc.v1.Cluster" + }, + { + "name": "graceful_decommission_timeout", + "type": ".google.protobuf.Duration" + }, + { + "name": "update_mask", + "type": ".google.protobuf.FieldMask" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "UpdateCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_async", + "title": "dataproc stopCluster Sample", + "origin": "API_DEFINITION", + "description": " Stops a cluster in a project.", + "canonical": true, + "file": "cluster_controller.stop_cluster.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 80, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StopCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "cluster_name", + "type": "TYPE_STRING" + }, + { + "name": "cluster_uuid", + "type": "TYPE_STRING" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "StopCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_async", + "title": "dataproc startCluster Sample", + "origin": "API_DEFINITION", + "description": " Starts a cluster in a project.", + "canonical": true, + "file": "cluster_controller.start_cluster.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 80, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "StartCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "cluster_name", + "type": "TYPE_STRING" + }, + { + "name": "cluster_uuid", + "type": "TYPE_STRING" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "StartCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_async", + "title": "dataproc deleteCluster Sample", + "origin": "API_DEFINITION", + "description": " Deletes a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).", + "canonical": true, + "file": "cluster_controller.delete_cluster.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 80, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "cluster_name", + "type": "TYPE_STRING" + }, + { + "name": "cluster_uuid", + "type": "TYPE_STRING" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "DeleteCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_async", + "title": "dataproc getCluster Sample", + "origin": "API_DEFINITION", + "description": " Gets the resource representation for a cluster in a project.", + "canonical": true, + "file": "cluster_controller.get_cluster.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 61, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "cluster_name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.Cluster", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "GetCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_async", + "title": "dataproc listClusters Sample", + "origin": "API_DEFINITION", + "description": " Lists all regions/{region}/clusters in a project alphabetically.", + "canonical": true, + "file": "cluster_controller.list_clusters.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 84, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListClusters", + "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "filter", + "type": "TYPE_STRING" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.ListClustersResponse", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "ListClusters", + "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_async", + "title": "dataproc diagnoseCluster Sample", + "origin": "API_DEFINITION", + "description": " Gets cluster diagnostic information. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). After the operation completes, [Operation.response][google.longrunning.Operation.response] contains [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults).", + "canonical": true, + "file": "cluster_controller.diagnose_cluster.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 62, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DiagnoseCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "cluster_name", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "ClusterControllerClient", + "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" + }, + "method": { + "shortName": "DiagnoseCluster", + "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", + "service": { + "shortName": "ClusterController", + "fullName": "google.cloud.dataproc.v1.ClusterController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_JobController_SubmitJob_async", + "title": "dataproc submitJob Sample", + "origin": "API_DEFINITION", + "description": " Submits a job to a cluster.", + "canonical": true, + "file": "job_controller.submit_job.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 74, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SubmitJob", + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "job", + "type": ".google.cloud.dataproc.v1.Job" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.Job", + "client": { + "shortName": "JobControllerClient", + "fullName": "google.cloud.dataproc.v1.JobControllerClient" + }, + "method": { + "shortName": "SubmitJob", + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", + "service": { + "shortName": "JobController", + "fullName": "google.cloud.dataproc.v1.JobController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_async", + "title": "dataproc submitJobAsOperation Sample", + "origin": "API_DEFINITION", + "description": " Submits job to a cluster.", + "canonical": true, + "file": "job_controller.submit_job_as_operation.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 75, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "SubmitJobAsOperation", + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "job", + "type": ".google.cloud.dataproc.v1.Job" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "JobControllerClient", + "fullName": "google.cloud.dataproc.v1.JobControllerClient" + }, + "method": { + "shortName": "SubmitJobAsOperation", + "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", + "service": { + "shortName": "JobController", + "fullName": "google.cloud.dataproc.v1.JobController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_JobController_GetJob_async", + "title": "dataproc getJob Sample", + "origin": "API_DEFINITION", + "description": " Gets the resource representation for a job in a project.", + "canonical": true, + "file": "job_controller.get_job.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 61, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetJob", + "fullName": "google.cloud.dataproc.v1.JobController.GetJob", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "job_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.Job", + "client": { + "shortName": "JobControllerClient", + "fullName": "google.cloud.dataproc.v1.JobControllerClient" + }, + "method": { + "shortName": "GetJob", + "fullName": "google.cloud.dataproc.v1.JobController.GetJob", + "service": { + "shortName": "JobController", + "fullName": "google.cloud.dataproc.v1.JobController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_JobController_ListJobs_async", + "title": "dataproc listJobs Sample", + "origin": "API_DEFINITION", + "description": " Lists regions/{region}/jobs in a project.", + "canonical": true, + "file": "job_controller.list_jobs.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 91, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListJobs", + "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + }, + { + "name": "cluster_name", + "type": "TYPE_STRING" + }, + { + "name": "job_state_matcher", + "type": ".google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher" + }, + { + "name": "filter", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.ListJobsResponse", + "client": { + "shortName": "JobControllerClient", + "fullName": "google.cloud.dataproc.v1.JobControllerClient" + }, + "method": { + "shortName": "ListJobs", + "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", + "service": { + "shortName": "JobController", + "fullName": "google.cloud.dataproc.v1.JobController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_JobController_UpdateJob_async", + "title": "dataproc updateJob Sample", + "origin": "API_DEFINITION", + "description": " Updates a job in a project.", + "canonical": true, + "file": "job_controller.update_job.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 76, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "UpdateJob", + "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "job_id", + "type": "TYPE_STRING" + }, + { + "name": "job", + "type": ".google.cloud.dataproc.v1.Job" + }, + { + "name": "update_mask", + "type": ".google.protobuf.FieldMask" + } + ], + "resultType": ".google.cloud.dataproc.v1.Job", + "client": { + "shortName": "JobControllerClient", + "fullName": "google.cloud.dataproc.v1.JobControllerClient" + }, + "method": { + "shortName": "UpdateJob", + "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", + "service": { + "shortName": "JobController", + "fullName": "google.cloud.dataproc.v1.JobController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_JobController_CancelJob_async", + "title": "dataproc cancelJob Sample", + "origin": "API_DEFINITION", + "description": " Starts a job cancellation request. To access the job resource after cancellation, call [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) or [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).", + "canonical": true, + "file": "job_controller.cancel_job.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 61, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CancelJob", + "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "job_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.Job", + "client": { + "shortName": "JobControllerClient", + "fullName": "google.cloud.dataproc.v1.JobControllerClient" + }, + "method": { + "shortName": "CancelJob", + "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", + "service": { + "shortName": "JobController", + "fullName": "google.cloud.dataproc.v1.JobController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_JobController_DeleteJob_async", + "title": "dataproc deleteJob Sample", + "origin": "API_DEFINITION", + "description": " Deletes the job from the project. If the job is active, the delete fails, and the response returns `FAILED_PRECONDITION`.", + "canonical": true, + "file": "job_controller.delete_job.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 61, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteJob", + "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", + "async": true, + "parameters": [ + { + "name": "project_id", + "type": "TYPE_STRING" + }, + { + "name": "region", + "type": "TYPE_STRING" + }, + { + "name": "job_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "JobControllerClient", + "fullName": "google.cloud.dataproc.v1.JobControllerClient" + }, + "method": { + "shortName": "DeleteJob", + "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", + "service": { + "shortName": "JobController", + "fullName": "google.cloud.dataproc.v1.JobController" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async", + "title": "dataproc createWorkflowTemplate Sample", + "origin": "API_DEFINITION", + "description": " Creates new workflow template.", + "canonical": true, + "file": "workflow_template_service.create_workflow_template.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 62, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "CreateWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "template", + "type": ".google.cloud.dataproc.v1.WorkflowTemplate" + } + ], + "resultType": ".google.cloud.dataproc.v1.WorkflowTemplate", + "client": { + "shortName": "WorkflowTemplateServiceClient", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" + }, + "method": { + "shortName": "CreateWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", + "service": { + "shortName": "WorkflowTemplateService", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async", + "title": "dataproc getWorkflowTemplate Sample", + "origin": "API_DEFINITION", + "description": " Retrieves the latest workflow template. Can retrieve previously instantiated template by specifying optional version parameter.", + "canonical": true, + "file": "workflow_template_service.get_workflow_template.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 63, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "GetWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "version", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.cloud.dataproc.v1.WorkflowTemplate", + "client": { + "shortName": "WorkflowTemplateServiceClient", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" + }, + "method": { + "shortName": "GetWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", + "service": { + "shortName": "WorkflowTemplateService", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async", + "title": "dataproc instantiateWorkflowTemplate Sample", + "origin": "API_DEFINITION", + "description": " Instantiates a template and begins execution. The returned Operation can be used to track execution of workflow by polling [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when entire workflow is finished. The running workflow can be aborted via [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). Also see [Using WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty].", + "canonical": true, + "file": "workflow_template_service.instantiate_workflow_template.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 81, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "InstantiateWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "version", + "type": "TYPE_INT32" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + }, + { + "name": "parameters", + "type": "TYPE_MESSAGE[]" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "WorkflowTemplateServiceClient", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" + }, + "method": { + "shortName": "InstantiateWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", + "service": { + "shortName": "WorkflowTemplateService", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async", + "title": "dataproc instantiateInlineWorkflowTemplate Sample", + "origin": "API_DEFINITION", + "description": " Instantiates a template and begins execution. This method is equivalent to executing the sequence [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. The returned Operation can be used to track execution of workflow by polling [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when entire workflow is finished. The running workflow can be aborted via [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). Also see [Using WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty].", + "canonical": true, + "file": "workflow_template_service.instantiate_inline_workflow_template.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 73, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "InstantiateInlineWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "template", + "type": ".google.cloud.dataproc.v1.WorkflowTemplate" + }, + { + "name": "request_id", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.longrunning.Operation", + "client": { + "shortName": "WorkflowTemplateServiceClient", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" + }, + "method": { + "shortName": "InstantiateInlineWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", + "service": { + "shortName": "WorkflowTemplateService", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async", + "title": "dataproc updateWorkflowTemplate Sample", + "origin": "API_DEFINITION", + "description": " Updates (replaces) workflow template. The updated template must contain version that matches the current server version.", + "canonical": true, + "file": "workflow_template_service.update_workflow_template.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 51, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "UpdateWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", + "async": true, + "parameters": [ + { + "name": "template", + "type": ".google.cloud.dataproc.v1.WorkflowTemplate" + } + ], + "resultType": ".google.cloud.dataproc.v1.WorkflowTemplate", + "client": { + "shortName": "WorkflowTemplateServiceClient", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" + }, + "method": { + "shortName": "UpdateWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", + "service": { + "shortName": "WorkflowTemplateService", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async", + "title": "dataproc listWorkflowTemplates Sample", + "origin": "API_DEFINITION", + "description": " Lists workflows that match the specified filter in the request.", + "canonical": true, + "file": "workflow_template_service.list_workflow_templates.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 68, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "ListWorkflowTemplates", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", + "async": true, + "parameters": [ + { + "name": "parent", + "type": "TYPE_STRING" + }, + { + "name": "page_size", + "type": "TYPE_INT32" + }, + { + "name": "page_token", + "type": "TYPE_STRING" + } + ], + "resultType": ".google.cloud.dataproc.v1.ListWorkflowTemplatesResponse", + "client": { + "shortName": "WorkflowTemplateServiceClient", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" + }, + "method": { + "shortName": "ListWorkflowTemplates", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", + "service": { + "shortName": "WorkflowTemplateService", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" + } + } + } + }, + { + "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async", + "title": "dataproc deleteWorkflowTemplate Sample", + "origin": "API_DEFINITION", + "description": " Deletes a workflow template. It does not cancel in-progress workflows.", + "canonical": true, + "file": "workflow_template_service.delete_workflow_template.js", + "language": "JAVASCRIPT", + "segments": [ + { + "start": 25, + "end": 63, + "type": "FULL" + } + ], + "clientMethod": { + "shortName": "DeleteWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", + "async": true, + "parameters": [ + { + "name": "name", + "type": "TYPE_STRING" + }, + { + "name": "version", + "type": "TYPE_INT32" + } + ], + "resultType": ".google.protobuf.Empty", + "client": { + "shortName": "WorkflowTemplateServiceClient", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" + }, + "method": { + "shortName": "DeleteWorkflowTemplate", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", + "service": { + "shortName": "WorkflowTemplateService", + "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" + } + } + } + } + ] +} diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js new file mode 100644 index 00000000..facd28fd --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js @@ -0,0 +1,70 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, template) { + // [START dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.workflowTemplates.create`, the resource name of the + * region has the following format: + * `projects/{project_id}/regions/{region}` + * * For `projects.locations.workflowTemplates.create`, the resource name of + * the location has the following format: + * `projects/{project_id}/locations/{location}` + */ + // const parent = 'abc123' + /** + * Required. The Dataproc workflow template to create. + */ + // const template = {} + + // Imports the Dataproc library + const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new WorkflowTemplateServiceClient(); + + async function callCreateWorkflowTemplate() { + // Construct request + const request = { + parent, + template, + }; + + // Run request + const response = await dataprocClient.createWorkflowTemplate(request); + console.log(response); + } + + callCreateWorkflowTemplate(); + // [END dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js new file mode 100644 index 00000000..9a9dec08 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js @@ -0,0 +1,71 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The resource name of the workflow template, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.workflowTemplates.delete`, the resource name + * of the template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * * For `projects.locations.workflowTemplates.instantiate`, the resource name + * of the template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + */ + // const name = 'abc123' + /** + * Optional. The version of workflow template to delete. If specified, + * will only delete the template if the current server version matches + * specified version. + */ + // const version = 1234 + + // Imports the Dataproc library + const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new WorkflowTemplateServiceClient(); + + async function callDeleteWorkflowTemplate() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await dataprocClient.deleteWorkflowTemplate(request); + console.log(response); + } + + callDeleteWorkflowTemplate(); + // [END dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js new file mode 100644 index 00000000..0013a450 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js @@ -0,0 +1,71 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The resource name of the workflow template, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.workflowTemplates.get`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * * For `projects.locations.workflowTemplates.get`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + */ + // const name = 'abc123' + /** + * Optional. The version of workflow template to retrieve. Only previously + * instantiated versions can be retrieved. + * If unspecified, retrieves the current version. + */ + // const version = 1234 + + // Imports the Dataproc library + const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new WorkflowTemplateServiceClient(); + + async function callGetWorkflowTemplate() { + // Construct request + const request = { + name, + }; + + // Run request + const response = await dataprocClient.getWorkflowTemplate(request); + console.log(response); + } + + callGetWorkflowTemplate(); + // [END dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js new file mode 100644 index 00000000..6aec41bc --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js @@ -0,0 +1,81 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent, template) { + // [START dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.workflowTemplates,instantiateinline`, the resource + * name of the region has the following format: + * `projects/{project_id}/regions/{region}` + * * For `projects.locations.workflowTemplates.instantiateinline`, the + * resource name of the location has the following format: + * `projects/{project_id}/locations/{location}` + */ + // const parent = 'abc123' + /** + * Required. The workflow template to instantiate. + */ + // const template = {} + /** + * Optional. A tag that prevents multiple concurrent workflow + * instances with the same tag from running. This mitigates risk of + * concurrent instances started due to retries. + * It is recommended to always set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The tag must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + + // Imports the Dataproc library + const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new WorkflowTemplateServiceClient(); + + async function callInstantiateInlineWorkflowTemplate() { + // Construct request + const request = { + parent, + template, + }; + + // Run request + const [operation] = await dataprocClient.instantiateInlineWorkflowTemplate(request); + const [response] = await operation.promise(); + console.log(response); + } + + callInstantiateInlineWorkflowTemplate(); + // [END dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js new file mode 100644 index 00000000..ee97d2a0 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js @@ -0,0 +1,89 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(name) { + // [START dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The resource name of the workflow template, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.workflowTemplates.instantiate`, the resource name + * of the template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * * For `projects.locations.workflowTemplates.instantiate`, the resource name + * of the template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + */ + // const name = 'abc123' + /** + * Optional. The version of workflow template to instantiate. If specified, + * the workflow will be instantiated only if the current version of + * the workflow template has the supplied version. + * This option cannot be used to instantiate a previous version of + * workflow template. + */ + // const version = 1234 + /** + * Optional. A tag that prevents multiple concurrent workflow + * instances with the same tag from running. This mitigates risk of + * concurrent instances started due to retries. + * It is recommended to always set this value to a + * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). + * The tag must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + */ + // const requestId = 'abc123' + /** + * Optional. Map from parameter names to values that should be used for those + * parameters. Values may not exceed 1000 characters. + */ + // const parameters = 1234 + + // Imports the Dataproc library + const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new WorkflowTemplateServiceClient(); + + async function callInstantiateWorkflowTemplate() { + // Construct request + const request = { + name, + }; + + // Run request + const [operation] = await dataprocClient.instantiateWorkflowTemplate(request); + const [response] = await operation.promise(); + console.log(response); + } + + callInstantiateWorkflowTemplate(); + // [END dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js new file mode 100644 index 00000000..0facd90a --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js @@ -0,0 +1,76 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(parent) { + // [START dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * * For `projects.regions.workflowTemplates,list`, the resource + * name of the region has the following format: + * `projects/{project_id}/regions/{region}` + * * For `projects.locations.workflowTemplates.list`, the + * resource name of the location has the following format: + * `projects/{project_id}/locations/{location}` + */ + // const parent = 'abc123' + /** + * Optional. The maximum number of results to return in each response. + */ + // const pageSize = 1234 + /** + * Optional. The page token, returned by a previous call, to request the + * next page of results. + */ + // const pageToken = 'abc123' + + // Imports the Dataproc library + const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new WorkflowTemplateServiceClient(); + + async function callListWorkflowTemplates() { + // Construct request + const request = { + parent, + }; + + // Run request + const iterable = await dataprocClient.listWorkflowTemplatesAsync(request); + for await (const response of iterable) { + console.log(response); + } + } + + callListWorkflowTemplates(); + // [END dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js new file mode 100644 index 00000000..c961fda8 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js @@ -0,0 +1,59 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + + +'use strict'; + +function main(template) { + // [START dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async] + /** + * TODO(developer): Uncomment these variables before running the sample. + */ + /** + * Required. The updated workflow template. + * The `template.version` field must match the current version. + */ + // const template = {} + + // Imports the Dataproc library + const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; + + // Instantiates a client + const dataprocClient = new WorkflowTemplateServiceClient(); + + async function callUpdateWorkflowTemplate() { + // Construct request + const request = { + template, + }; + + // Run request + const response = await dataprocClient.updateWorkflowTemplate(request); + console.log(response); + } + + callUpdateWorkflowTemplate(); + // [END dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async] +} + +process.on('unhandledRejection', err => { + console.error(err.message); + process.exitCode = 1; +}); +main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/src/index.ts b/owl-bot-staging/v1/src/index.ts new file mode 100644 index 00000000..a4342783 --- /dev/null +++ b/owl-bot-staging/v1/src/index.ts @@ -0,0 +1,33 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as v1 from './v1'; +const AutoscalingPolicyServiceClient = v1.AutoscalingPolicyServiceClient; +type AutoscalingPolicyServiceClient = v1.AutoscalingPolicyServiceClient; +const BatchControllerClient = v1.BatchControllerClient; +type BatchControllerClient = v1.BatchControllerClient; +const ClusterControllerClient = v1.ClusterControllerClient; +type ClusterControllerClient = v1.ClusterControllerClient; +const JobControllerClient = v1.JobControllerClient; +type JobControllerClient = v1.JobControllerClient; +const WorkflowTemplateServiceClient = v1.WorkflowTemplateServiceClient; +type WorkflowTemplateServiceClient = v1.WorkflowTemplateServiceClient; +export {v1, AutoscalingPolicyServiceClient, BatchControllerClient, ClusterControllerClient, JobControllerClient, WorkflowTemplateServiceClient}; +export default {v1, AutoscalingPolicyServiceClient, BatchControllerClient, ClusterControllerClient, JobControllerClient, WorkflowTemplateServiceClient}; +import * as protos from '../protos/protos'; +export {protos} diff --git a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts new file mode 100644 index 00000000..79c04832 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts @@ -0,0 +1,1163 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/autoscaling_policy_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './autoscaling_policy_service_client_config.json'; + +const version = require('../../../package.json').version; + +/** + * The API interface for managing autoscaling policies in the + * Dataproc API. + * @class + * @memberof v1 + */ +export class AutoscalingPolicyServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + autoscalingPolicyServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of AutoscalingPolicyServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof AutoscalingPolicyServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + batchPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/batches/{batch}' + ), + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), + projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' + ), + projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' + ), + projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' + ), + projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listAutoscalingPolicies: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'policies') + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.dataproc.v1.AutoscalingPolicyService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.autoscalingPolicyServiceStub) { + return this.autoscalingPolicyServiceStub; + } + + // Put together the "service stub" for + // google.cloud.dataproc.v1.AutoscalingPolicyService. + this.autoscalingPolicyServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.AutoscalingPolicyService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.dataproc.v1.AutoscalingPolicyService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const autoscalingPolicyServiceStubMethods = + ['createAutoscalingPolicy', 'updateAutoscalingPolicy', 'getAutoscalingPolicy', 'listAutoscalingPolicies', 'deleteAutoscalingPolicy']; + for (const methodName of autoscalingPolicyServiceStubMethods) { + const callPromise = this.autoscalingPolicyServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.autoscalingPolicyServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'dataproc.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'dataproc.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates new autoscaling policy. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The "resource name" of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.autoscalingPolicies.create`, the resource name + * of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.autoscalingPolicies.create`, the resource name + * of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {google.cloud.dataproc.v1.AutoscalingPolicy} request.policy + * Required. The autoscaling policy to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js + * region_tag:dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async + */ + createAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|undefined, {}|undefined + ]>; + createAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + createAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + createAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createAutoscalingPolicy(request, options, callback); + } +/** + * Updates (replaces) autoscaling policy. + * + * Disabled check for update_mask, because all updates will be full + * replacements. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.dataproc.v1.AutoscalingPolicy} request.policy + * Required. The updated autoscaling policy. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js + * region_tag:dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async + */ + updateAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|undefined, {}|undefined + ]>; + updateAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + updateAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + updateAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'policy.name': request.policy!.name || '', + }); + this.initialize(); + return this.innerApiCalls.updateAutoscalingPolicy(request, options, callback); + } +/** + * Retrieves autoscaling policy. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The "resource name" of the autoscaling policy, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.autoscalingPolicies.get`, the resource name + * of the policy has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + * + * * For `projects.locations.autoscalingPolicies.get`, the resource name + * of the policy has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js + * region_tag:dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async + */ + getAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|undefined, {}|undefined + ]>; + getAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + getAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + getAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy, + protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getAutoscalingPolicy(request, options, callback); + } +/** + * Deletes an autoscaling policy. It is an error to delete an autoscaling + * policy that is in use by one or more clusters. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The "resource name" of the autoscaling policy, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.autoscalingPolicies.delete`, the resource name + * of the policy has the following format: + * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` + * + * * For `projects.locations.autoscalingPolicies.delete`, the resource name + * of the policy has the following format: + * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js + * region_tag:dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async + */ + deleteAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|undefined, {}|undefined + ]>; + deleteAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + deleteAutoscalingPolicy( + request: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): void; + deleteAutoscalingPolicy( + request?: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteAutoscalingPolicy(request, options, callback); + } + + /** + * Lists autoscaling policies in the project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The "resource name" of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.autoscalingPolicies.list`, the resource name + * of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.autoscalingPolicies.list`, the resource name + * of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {number} [request.pageSize] + * Optional. The maximum number of results to return in each response. + * Must be less than or equal to 1000. Defaults to 100. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listAutoscalingPoliciesAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listAutoscalingPolicies( + request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy[], + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest|null, + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse + ]>; + listAutoscalingPolicies( + request: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IAutoscalingPolicy>): void; + listAutoscalingPolicies( + request: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IAutoscalingPolicy>): void; + listAutoscalingPolicies( + request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IAutoscalingPolicy>, + callback?: PaginationCallback< + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IAutoscalingPolicy>): + Promise<[ + protos.google.cloud.dataproc.v1.IAutoscalingPolicy[], + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest|null, + protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listAutoscalingPolicies(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The "resource name" of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.autoscalingPolicies.list`, the resource name + * of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.autoscalingPolicies.list`, the resource name + * of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {number} [request.pageSize] + * Optional. The maximum number of results to return in each response. + * Must be less than or equal to 1000. Defaults to 100. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listAutoscalingPoliciesAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listAutoscalingPoliciesStream( + request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listAutoscalingPolicies']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listAutoscalingPolicies.createStream( + this.innerApiCalls.listAutoscalingPolicies as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listAutoscalingPolicies`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The "resource name" of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.autoscalingPolicies.list`, the resource name + * of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.autoscalingPolicies.list`, the resource name + * of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {number} [request.pageSize] + * Optional. The maximum number of results to return in each response. + * Must be less than or equal to 1000. Defaults to 100. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js + * region_tag:dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async + */ + listAutoscalingPoliciesAsync( + request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listAutoscalingPolicies']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listAutoscalingPolicies.asyncIterate( + this.innerApiCalls['listAutoscalingPolicies'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified batch resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} batch + * @returns {string} Resource name string. + */ + batchPath(project:string,location:string,batch:string) { + return this.pathTemplates.batchPathTemplate.render({ + project: project, + location: location, + batch: batch, + }); + } + + /** + * Parse the project from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the project. + */ + matchProjectFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).project; + } + + /** + * Parse the location from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the location. + */ + matchLocationFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).location; + } + + /** + * Parse the batch from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the batch. + */ + matchBatchFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).batch; + } + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project:string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + + /** + * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ + project: project, + location: location, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; + } + + /** + * Parse the location from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; + } + + /** + * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectLocationWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ + project: project, + location: location, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; + } + + /** + * Parse the location from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; + } + + /** + * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; + } + + /** + * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ + project: project, + region: region, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; + } + + /** + * Parse the region from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; + } + + /** + * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectRegionWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ + project: project, + region: region, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; + } + + /** + * Parse the region from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; + } + + /** + * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.autoscalingPolicyServiceStub && !this._terminated) { + return this.autoscalingPolicyServiceStub.then(stub => { + this._terminated = true; + stub.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json new file mode 100644 index 00000000..09bd8922 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json @@ -0,0 +1,51 @@ +{ + "interfaces": { + "google.cloud.dataproc.v1.AutoscalingPolicyService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateAutoscalingPolicy": { + "timeout_millis": 600000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "UpdateAutoscalingPolicy": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "GetAutoscalingPolicy": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "ListAutoscalingPolicies": { + "timeout_millis": 600000, + "retry_codes_name": "idempotent", + "retry_params_name": "default" + }, + "DeleteAutoscalingPolicy": { + "timeout_millis": 600000, + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json new file mode 100644 index 00000000..b26a9be7 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json @@ -0,0 +1,9 @@ +[ + "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", + "../../protos/google/cloud/dataproc/v1/batches.proto", + "../../protos/google/cloud/dataproc/v1/clusters.proto", + "../../protos/google/cloud/dataproc/v1/jobs.proto", + "../../protos/google/cloud/dataproc/v1/operations.proto", + "../../protos/google/cloud/dataproc/v1/shared.proto", + "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" +] diff --git a/owl-bot-staging/v1/src/v1/batch_controller_client.ts b/owl-bot-staging/v1/src/v1/batch_controller_client.ts new file mode 100644 index 00000000..3c4c4168 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/batch_controller_client.ts @@ -0,0 +1,1100 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/batch_controller_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './batch_controller_client_config.json'; +import { operationsProtos } from 'google-gax'; +const version = require('../../../package.json').version; + +/** + * The BatchController provides methods to manage batch workloads. + * @class + * @memberof v1 + */ +export class BatchControllerClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + operationsClient: gax.OperationsClient; + batchControllerStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of BatchControllerClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof BatchControllerClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + batchPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/batches/{batch}' + ), + locationPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}' + ), + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), + projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' + ), + projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' + ), + projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' + ), + projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listBatches: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'batches') + }; + + const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); + + // This API contains "long-running operations", which return a + // an Operation object that allows for tracking of the operation, + // rather than holding a request open. + + this.operationsClient = this._gaxModule.lro({ + auth: this.auth, + grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined + }).operationsClient(opts); + const createBatchResponse = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.Batch') as gax.protobuf.Type; + const createBatchMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.BatchOperationMetadata') as gax.protobuf.Type; + + this.descriptors.longrunning = { + createBatch: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + createBatchResponse.decode.bind(createBatchResponse), + createBatchMetadata.decode.bind(createBatchMetadata)) + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.dataproc.v1.BatchController', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.batchControllerStub) { + return this.batchControllerStub; + } + + // Put together the "service stub" for + // google.cloud.dataproc.v1.BatchController. + this.batchControllerStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.BatchController') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.dataproc.v1.BatchController, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const batchControllerStubMethods = + ['createBatch', 'getBatch', 'listBatches', 'deleteBatch']; + for (const methodName of batchControllerStubMethods) { + const callPromise = this.batchControllerStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + this.descriptors.longrunning[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.batchControllerStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'dataproc.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'dataproc.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Gets the batch workload resource representation. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The name of the batch to retrieve. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Batch]{@link google.cloud.dataproc.v1.Batch}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/batch_controller.get_batch.js + * region_tag:dataproc_v1_generated_BatchController_GetBatch_async + */ + getBatch( + request?: protos.google.cloud.dataproc.v1.IGetBatchRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IBatch, + protos.google.cloud.dataproc.v1.IGetBatchRequest|undefined, {}|undefined + ]>; + getBatch( + request: protos.google.cloud.dataproc.v1.IGetBatchRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IBatch, + protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, + {}|null|undefined>): void; + getBatch( + request: protos.google.cloud.dataproc.v1.IGetBatchRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IBatch, + protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, + {}|null|undefined>): void; + getBatch( + request?: protos.google.cloud.dataproc.v1.IGetBatchRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IBatch, + protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IBatch, + protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IBatch, + protos.google.cloud.dataproc.v1.IGetBatchRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getBatch(request, options, callback); + } +/** + * Deletes the batch workload resource. If the batch is not in terminal state, + * the delete fails and the response returns `FAILED_PRECONDITION`. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The name of the batch resource to delete. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/batch_controller.delete_batch.js + * region_tag:dataproc_v1_generated_BatchController_DeleteBatch_async + */ + deleteBatch( + request?: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteBatchRequest|undefined, {}|undefined + ]>; + deleteBatch( + request: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, + {}|null|undefined>): void; + deleteBatch( + request: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, + {}|null|undefined>): void; + deleteBatch( + request?: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteBatchRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteBatch(request, options, callback); + } + +/** + * Creates a batch workload that executes asynchronously. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent resource where this batch will be created. + * @param {google.cloud.dataproc.v1.Batch} request.batch + * Required. The batch to create. + * @param {string} [request.batchId] + * Optional. The ID to use for the batch, which will become the final component of + * the batch's resource name. + * + * This value must be 4-63 characters. Valid characters are `/{@link 0-9|a-z}-/`. + * @param {string} [request.requestId] + * Optional. A unique ID used to identify the request. If the service + * receives two + * [CreateBatchRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s + * with the same request_id, the second request is ignored and the + * Operation that corresponds to the first Batch created and stored + * in the backend is returned. + * + * Recommendation: Set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The value must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/batch_controller.create_batch.js + * region_tag:dataproc_v1_generated_BatchController_CreateBatch_async + */ + createBatch( + request?: protos.google.cloud.dataproc.v1.ICreateBatchRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + createBatch( + request: protos.google.cloud.dataproc.v1.ICreateBatchRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + createBatch( + request: protos.google.cloud.dataproc.v1.ICreateBatchRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + createBatch( + request?: protos.google.cloud.dataproc.v1.ICreateBatchRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createBatch(request, options, callback); + } +/** + * Check the status of the long running operation returned by `createBatch()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/batch_controller.create_batch.js + * region_tag:dataproc_v1_generated_BatchController_CreateBatch_async + */ + async checkCreateBatchProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.createBatch, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } + /** + * Lists batch workloads. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent, which owns this collection of batches. + * @param {number} [request.pageSize] + * Optional. The maximum number of batches to return in each response. + * The service may return fewer than this value. + * The default page size is 20; the maximum page size is 1000. + * @param {string} [request.pageToken] + * Optional. A page token received from a previous `ListBatches` call. + * Provide this token to retrieve the subsequent page. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [Batch]{@link google.cloud.dataproc.v1.Batch}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listBatchesAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listBatches( + request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IBatch[], + protos.google.cloud.dataproc.v1.IListBatchesRequest|null, + protos.google.cloud.dataproc.v1.IListBatchesResponse + ]>; + listBatches( + request: protos.google.cloud.dataproc.v1.IListBatchesRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListBatchesRequest, + protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IBatch>): void; + listBatches( + request: protos.google.cloud.dataproc.v1.IListBatchesRequest, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListBatchesRequest, + protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IBatch>): void; + listBatches( + request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.dataproc.v1.IListBatchesRequest, + protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IBatch>, + callback?: PaginationCallback< + protos.google.cloud.dataproc.v1.IListBatchesRequest, + protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IBatch>): + Promise<[ + protos.google.cloud.dataproc.v1.IBatch[], + protos.google.cloud.dataproc.v1.IListBatchesRequest|null, + protos.google.cloud.dataproc.v1.IListBatchesResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listBatches(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent, which owns this collection of batches. + * @param {number} [request.pageSize] + * Optional. The maximum number of batches to return in each response. + * The service may return fewer than this value. + * The default page size is 20; the maximum page size is 1000. + * @param {string} [request.pageToken] + * Optional. A page token received from a previous `ListBatches` call. + * Provide this token to retrieve the subsequent page. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [Batch]{@link google.cloud.dataproc.v1.Batch} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listBatchesAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listBatchesStream( + request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listBatches']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listBatches.createStream( + this.innerApiCalls.listBatches as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listBatches`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The parent, which owns this collection of batches. + * @param {number} [request.pageSize] + * Optional. The maximum number of batches to return in each response. + * The service may return fewer than this value. + * The default page size is 20; the maximum page size is 1000. + * @param {string} [request.pageToken] + * Optional. A page token received from a previous `ListBatches` call. + * Provide this token to retrieve the subsequent page. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [Batch]{@link google.cloud.dataproc.v1.Batch}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v1/batch_controller.list_batches.js + * region_tag:dataproc_v1_generated_BatchController_ListBatches_async + */ + listBatchesAsync( + request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listBatches']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listBatches.asyncIterate( + this.innerApiCalls['listBatches'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified batch resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} batch + * @returns {string} Resource name string. + */ + batchPath(project:string,location:string,batch:string) { + return this.pathTemplates.batchPathTemplate.render({ + project: project, + location: location, + batch: batch, + }); + } + + /** + * Parse the project from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the project. + */ + matchProjectFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).project; + } + + /** + * Parse the location from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the location. + */ + matchLocationFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).location; + } + + /** + * Parse the batch from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the batch. + */ + matchBatchFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).batch; + } + + /** + * Return a fully-qualified location resource name string. + * + * @param {string} project + * @param {string} location + * @returns {string} Resource name string. + */ + locationPath(project:string,location:string) { + return this.pathTemplates.locationPathTemplate.render({ + project: project, + location: location, + }); + } + + /** + * Parse the project from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the project. + */ + matchProjectFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).project; + } + + /** + * Parse the location from Location resource. + * + * @param {string} locationName + * A fully-qualified path representing Location resource. + * @returns {string} A string representing the location. + */ + matchLocationFromLocationName(locationName: string) { + return this.pathTemplates.locationPathTemplate.match(locationName).location; + } + + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project:string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + + /** + * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ + project: project, + location: location, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; + } + + /** + * Parse the location from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; + } + + /** + * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectLocationWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ + project: project, + location: location, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; + } + + /** + * Parse the location from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; + } + + /** + * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; + } + + /** + * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ + project: project, + region: region, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; + } + + /** + * Parse the region from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; + } + + /** + * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectRegionWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ + project: project, + region: region, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; + } + + /** + * Parse the region from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; + } + + /** + * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.batchControllerStub && !this._terminated) { + return this.batchControllerStub.then(stub => { + this._terminated = true; + stub.close(); + this.operationsClient.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v1/src/v1/batch_controller_client_config.json b/owl-bot-staging/v1/src/v1/batch_controller_client_config.json new file mode 100644 index 00000000..a451087c --- /dev/null +++ b/owl-bot-staging/v1/src/v1/batch_controller_client_config.json @@ -0,0 +1,42 @@ +{ + "interfaces": { + "google.cloud.dataproc.v1.BatchController": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateBatch": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "GetBatch": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "ListBatches": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteBatch": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v1/src/v1/batch_controller_proto_list.json b/owl-bot-staging/v1/src/v1/batch_controller_proto_list.json new file mode 100644 index 00000000..b26a9be7 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/batch_controller_proto_list.json @@ -0,0 +1,9 @@ +[ + "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", + "../../protos/google/cloud/dataproc/v1/batches.proto", + "../../protos/google/cloud/dataproc/v1/clusters.proto", + "../../protos/google/cloud/dataproc/v1/jobs.proto", + "../../protos/google/cloud/dataproc/v1/operations.proto", + "../../protos/google/cloud/dataproc/v1/shared.proto", + "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" +] diff --git a/owl-bot-staging/v1/src/v1/cluster_controller_client.ts b/owl-bot-staging/v1/src/v1/cluster_controller_client.ts new file mode 100644 index 00000000..fce6fc31 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/cluster_controller_client.ts @@ -0,0 +1,1755 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/cluster_controller_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './cluster_controller_client_config.json'; +import { operationsProtos } from 'google-gax'; +const version = require('../../../package.json').version; + +/** + * The ClusterControllerService provides methods to manage clusters + * of Compute Engine instances. + * @class + * @memberof v1 + */ +export class ClusterControllerClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + operationsClient: gax.OperationsClient; + clusterControllerStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of ClusterControllerClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof ClusterControllerClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + batchPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/batches/{batch}' + ), + projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' + ), + projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' + ), + projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' + ), + projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' + ), + servicePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/services/{service}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listClusters: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'clusters') + }; + + const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); + + // This API contains "long-running operations", which return a + // an Operation object that allows for tracking of the operation, + // rather than holding a request open. + + this.operationsClient = this._gaxModule.lro({ + auth: this.auth, + grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined + }).operationsClient(opts); + const createClusterResponse = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; + const createClusterMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; + const updateClusterResponse = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; + const updateClusterMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; + const stopClusterResponse = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; + const stopClusterMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; + const startClusterResponse = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; + const startClusterMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; + const deleteClusterResponse = protoFilesRoot.lookup( + '.google.protobuf.Empty') as gax.protobuf.Type; + const deleteClusterMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; + const diagnoseClusterResponse = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.DiagnoseClusterResults') as gax.protobuf.Type; + const diagnoseClusterMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; + + this.descriptors.longrunning = { + createCluster: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + createClusterResponse.decode.bind(createClusterResponse), + createClusterMetadata.decode.bind(createClusterMetadata)), + updateCluster: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + updateClusterResponse.decode.bind(updateClusterResponse), + updateClusterMetadata.decode.bind(updateClusterMetadata)), + stopCluster: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + stopClusterResponse.decode.bind(stopClusterResponse), + stopClusterMetadata.decode.bind(stopClusterMetadata)), + startCluster: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + startClusterResponse.decode.bind(startClusterResponse), + startClusterMetadata.decode.bind(startClusterMetadata)), + deleteCluster: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + deleteClusterResponse.decode.bind(deleteClusterResponse), + deleteClusterMetadata.decode.bind(deleteClusterMetadata)), + diagnoseCluster: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + diagnoseClusterResponse.decode.bind(diagnoseClusterResponse), + diagnoseClusterMetadata.decode.bind(diagnoseClusterMetadata)) + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.dataproc.v1.ClusterController', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.clusterControllerStub) { + return this.clusterControllerStub; + } + + // Put together the "service stub" for + // google.cloud.dataproc.v1.ClusterController. + this.clusterControllerStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.ClusterController') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.dataproc.v1.ClusterController, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const clusterControllerStubMethods = + ['createCluster', 'updateCluster', 'stopCluster', 'startCluster', 'deleteCluster', 'getCluster', 'listClusters', 'diagnoseCluster']; + for (const methodName of clusterControllerStubMethods) { + const callPromise = this.clusterControllerStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + this.descriptors.longrunning[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.clusterControllerStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'dataproc.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'dataproc.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Gets the resource representation for a cluster in a project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.clusterName + * Required. The cluster name. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Cluster]{@link google.cloud.dataproc.v1.Cluster}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.get_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_GetCluster_async + */ + getCluster( + request?: protos.google.cloud.dataproc.v1.IGetClusterRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.ICluster, + protos.google.cloud.dataproc.v1.IGetClusterRequest|undefined, {}|undefined + ]>; + getCluster( + request: protos.google.cloud.dataproc.v1.IGetClusterRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.ICluster, + protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, + {}|null|undefined>): void; + getCluster( + request: protos.google.cloud.dataproc.v1.IGetClusterRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.ICluster, + protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, + {}|null|undefined>): void; + getCluster( + request?: protos.google.cloud.dataproc.v1.IGetClusterRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.ICluster, + protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.ICluster, + protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.ICluster, + protos.google.cloud.dataproc.v1.IGetClusterRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'cluster_name': request.clusterName || '', + }); + this.initialize(); + return this.innerApiCalls.getCluster(request, options, callback); + } + +/** + * Creates a cluster in a project. The returned + * {@link google.longrunning.Operation.metadata|Operation.metadata} will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {google.cloud.dataproc.v1.Cluster} request.cluster + * Required. The cluster to create. + * @param {string} [request.requestId] + * Optional. A unique ID used to identify the request. If the server receives two + * [CreateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s + * with the same id, then the second request will be ignored and the + * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the backend + * is returned. + * + * It is recommended to always set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {google.cloud.dataproc.v1.FailureAction} [request.actionOnFailedPrimaryWorkers] + * Optional. Failure action when primary worker creation fails. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.create_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_CreateCluster_async + */ + createCluster( + request?: protos.google.cloud.dataproc.v1.ICreateClusterRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + createCluster( + request: protos.google.cloud.dataproc.v1.ICreateClusterRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + createCluster( + request: protos.google.cloud.dataproc.v1.ICreateClusterRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + createCluster( + request?: protos.google.cloud.dataproc.v1.ICreateClusterRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + this.initialize(); + return this.innerApiCalls.createCluster(request, options, callback); + } +/** + * Check the status of the long running operation returned by `createCluster()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.create_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_CreateCluster_async + */ + async checkCreateClusterProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.createCluster, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } +/** + * Updates a cluster in a project. The returned + * {@link google.longrunning.Operation.metadata|Operation.metadata} will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * The cluster must be in a {@link google.cloud.dataproc.v1.ClusterStatus.State|`RUNNING`} state or an error + * is returned. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project the + * cluster belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.clusterName + * Required. The cluster name. + * @param {google.cloud.dataproc.v1.Cluster} request.cluster + * Required. The changes to the cluster. + * @param {google.protobuf.Duration} [request.gracefulDecommissionTimeout] + * Optional. Timeout for graceful YARN decomissioning. Graceful + * decommissioning allows removing nodes from the cluster without + * interrupting jobs in progress. Timeout specifies how long to wait for jobs + * in progress to finish before forcefully removing nodes (and potentially + * interrupting jobs). Default timeout is 0 (for forceful decommission), and + * the maximum allowed timeout is 1 day. (see JSON representation of + * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). + * + * Only supported on Dataproc image versions 1.2 and higher. + * @param {google.protobuf.FieldMask} request.updateMask + * Required. Specifies the path, relative to `Cluster`, of + * the field to update. For example, to change the number of workers + * in a cluster to 5, the `update_mask` parameter would be + * specified as `config.worker_config.num_instances`, + * and the `PATCH` request body would specify the new value, as follows: + * + * { + * "config":{ + * "workerConfig":{ + * "numInstances":"5" + * } + * } + * } + * Similarly, to change the number of preemptible workers in a cluster to 5, + * the `update_mask` parameter would be + * `config.secondary_worker_config.num_instances`, and the `PATCH` request + * body would be set as follows: + * + * { + * "config":{ + * "secondaryWorkerConfig":{ + * "numInstances":"5" + * } + * } + * } + * Note: Currently, only the following fields can be updated: + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or + * change autoscaling policies
+ * @param {string} [request.requestId] + * Optional. A unique ID used to identify the request. If the server + * receives two + * [UpdateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s + * with the same id, then the second request will be ignored and the + * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the + * backend is returned. + * + * It is recommended to always set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.update_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_UpdateCluster_async + */ + updateCluster( + request?: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + updateCluster( + request: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + updateCluster( + request: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + updateCluster( + request?: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'cluster_name': request.clusterName || '', + }); + this.initialize(); + return this.innerApiCalls.updateCluster(request, options, callback); + } +/** + * Check the status of the long running operation returned by `updateCluster()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.update_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_UpdateCluster_async + */ + async checkUpdateClusterProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.updateCluster, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } +/** + * Stops a cluster in a project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project the + * cluster belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.clusterName + * Required. The cluster name. + * @param {string} [request.clusterUuid] + * Optional. Specifying the `cluster_uuid` means the RPC will fail + * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. + * @param {string} [request.requestId] + * Optional. A unique ID used to identify the request. If the server + * receives two + * [StopClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s + * with the same id, then the second request will be ignored and the + * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the + * backend is returned. + * + * Recommendation: Set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.stop_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_StopCluster_async + */ + stopCluster( + request?: protos.google.cloud.dataproc.v1.IStopClusterRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + stopCluster( + request: protos.google.cloud.dataproc.v1.IStopClusterRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + stopCluster( + request: protos.google.cloud.dataproc.v1.IStopClusterRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + stopCluster( + request?: protos.google.cloud.dataproc.v1.IStopClusterRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'cluster_name': request.clusterName || '', + }); + this.initialize(); + return this.innerApiCalls.stopCluster(request, options, callback); + } +/** + * Check the status of the long running operation returned by `stopCluster()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.stop_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_StopCluster_async + */ + async checkStopClusterProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.stopCluster, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } +/** + * Starts a cluster in a project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project the + * cluster belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.clusterName + * Required. The cluster name. + * @param {string} [request.clusterUuid] + * Optional. Specifying the `cluster_uuid` means the RPC will fail + * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. + * @param {string} [request.requestId] + * Optional. A unique ID used to identify the request. If the server + * receives two + * [StartClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s + * with the same id, then the second request will be ignored and the + * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the + * backend is returned. + * + * Recommendation: Set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.start_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_StartCluster_async + */ + startCluster( + request?: protos.google.cloud.dataproc.v1.IStartClusterRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + startCluster( + request: protos.google.cloud.dataproc.v1.IStartClusterRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + startCluster( + request: protos.google.cloud.dataproc.v1.IStartClusterRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + startCluster( + request?: protos.google.cloud.dataproc.v1.IStartClusterRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'cluster_name': request.clusterName || '', + }); + this.initialize(); + return this.innerApiCalls.startCluster(request, options, callback); + } +/** + * Check the status of the long running operation returned by `startCluster()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.start_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_StartCluster_async + */ + async checkStartClusterProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.startCluster, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } +/** + * Deletes a cluster in a project. The returned + * {@link google.longrunning.Operation.metadata|Operation.metadata} will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.clusterName + * Required. The cluster name. + * @param {string} [request.clusterUuid] + * Optional. Specifying the `cluster_uuid` means the RPC should fail + * (with error NOT_FOUND) if cluster with specified UUID does not exist. + * @param {string} [request.requestId] + * Optional. A unique ID used to identify the request. If the server + * receives two + * [DeleteClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s + * with the same id, then the second request will be ignored and the + * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the + * backend is returned. + * + * It is recommended to always set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The ID must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.delete_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_DeleteCluster_async + */ + deleteCluster( + request?: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + deleteCluster( + request: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + deleteCluster( + request: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + deleteCluster( + request?: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'cluster_name': request.clusterName || '', + }); + this.initialize(); + return this.innerApiCalls.deleteCluster(request, options, callback); + } +/** + * Check the status of the long running operation returned by `deleteCluster()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.delete_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_DeleteCluster_async + */ + async checkDeleteClusterProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.deleteCluster, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } +/** + * Gets cluster diagnostic information. The returned + * {@link google.longrunning.Operation.metadata|Operation.metadata} will be + * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). + * After the operation completes, + * {@link google.longrunning.Operation.response|Operation.response} + * contains + * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.clusterName + * Required. The cluster name. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.diagnose_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_DiagnoseCluster_async + */ + diagnoseCluster( + request?: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + diagnoseCluster( + request: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + diagnoseCluster( + request: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + diagnoseCluster( + request?: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'cluster_name': request.clusterName || '', + }); + this.initialize(); + return this.innerApiCalls.diagnoseCluster(request, options, callback); + } +/** + * Check the status of the long running operation returned by `diagnoseCluster()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.diagnose_cluster.js + * region_tag:dataproc_v1_generated_ClusterController_DiagnoseCluster_async + */ + async checkDiagnoseClusterProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.diagnoseCluster, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } + /** + * Lists all regions/{region}/clusters in a project alphabetically. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} [request.filter] + * Optional. A filter constraining the clusters to list. Filters are + * case-sensitive and have the following syntax: + * + * field = value [AND [field = value]] ... + * + * where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, + * and `[KEY]` is a label key. **value** can be `*` to match all values. + * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` + * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` + * contains the `DELETING` and `ERROR` states. + * `clusterName` is the name of the cluster provided at creation time. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * + * Example filter: + * + * status.state = ACTIVE AND clusterName = mycluster + * AND labels.env = staging AND labels.starred = * + * @param {number} [request.pageSize] + * Optional. The standard List page size. + * @param {string} [request.pageToken] + * Optional. The standard List page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [Cluster]{@link google.cloud.dataproc.v1.Cluster}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listClustersAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listClusters( + request?: protos.google.cloud.dataproc.v1.IListClustersRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.ICluster[], + protos.google.cloud.dataproc.v1.IListClustersRequest|null, + protos.google.cloud.dataproc.v1.IListClustersResponse + ]>; + listClusters( + request: protos.google.cloud.dataproc.v1.IListClustersRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListClustersRequest, + protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, + protos.google.cloud.dataproc.v1.ICluster>): void; + listClusters( + request: protos.google.cloud.dataproc.v1.IListClustersRequest, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListClustersRequest, + protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, + protos.google.cloud.dataproc.v1.ICluster>): void; + listClusters( + request?: protos.google.cloud.dataproc.v1.IListClustersRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.dataproc.v1.IListClustersRequest, + protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, + protos.google.cloud.dataproc.v1.ICluster>, + callback?: PaginationCallback< + protos.google.cloud.dataproc.v1.IListClustersRequest, + protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, + protos.google.cloud.dataproc.v1.ICluster>): + Promise<[ + protos.google.cloud.dataproc.v1.ICluster[], + protos.google.cloud.dataproc.v1.IListClustersRequest|null, + protos.google.cloud.dataproc.v1.IListClustersResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + this.initialize(); + return this.innerApiCalls.listClusters(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} [request.filter] + * Optional. A filter constraining the clusters to list. Filters are + * case-sensitive and have the following syntax: + * + * field = value [AND [field = value]] ... + * + * where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, + * and `[KEY]` is a label key. **value** can be `*` to match all values. + * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` + * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` + * contains the `DELETING` and `ERROR` states. + * `clusterName` is the name of the cluster provided at creation time. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * + * Example filter: + * + * status.state = ACTIVE AND clusterName = mycluster + * AND labels.env = staging AND labels.starred = * + * @param {number} [request.pageSize] + * Optional. The standard List page size. + * @param {string} [request.pageToken] + * Optional. The standard List page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [Cluster]{@link google.cloud.dataproc.v1.Cluster} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listClustersAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listClustersStream( + request?: protos.google.cloud.dataproc.v1.IListClustersRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + const defaultCallSettings = this._defaults['listClusters']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listClusters.createStream( + this.innerApiCalls.listClusters as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listClusters`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the cluster + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} [request.filter] + * Optional. A filter constraining the clusters to list. Filters are + * case-sensitive and have the following syntax: + * + * field = value [AND [field = value]] ... + * + * where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, + * and `[KEY]` is a label key. **value** can be `*` to match all values. + * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, + * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` + * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` + * contains the `DELETING` and `ERROR` states. + * `clusterName` is the name of the cluster provided at creation time. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * + * Example filter: + * + * status.state = ACTIVE AND clusterName = mycluster + * AND labels.env = staging AND labels.starred = * + * @param {number} [request.pageSize] + * Optional. The standard List page size. + * @param {string} [request.pageToken] + * Optional. The standard List page token. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [Cluster]{@link google.cloud.dataproc.v1.Cluster}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v1/cluster_controller.list_clusters.js + * region_tag:dataproc_v1_generated_ClusterController_ListClusters_async + */ + listClustersAsync( + request?: protos.google.cloud.dataproc.v1.IListClustersRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + const defaultCallSettings = this._defaults['listClusters']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listClusters.asyncIterate( + this.innerApiCalls['listClusters'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified batch resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} batch + * @returns {string} Resource name string. + */ + batchPath(project:string,location:string,batch:string) { + return this.pathTemplates.batchPathTemplate.render({ + project: project, + location: location, + batch: batch, + }); + } + + /** + * Parse the project from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the project. + */ + matchProjectFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).project; + } + + /** + * Parse the location from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the location. + */ + matchLocationFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).location; + } + + /** + * Parse the batch from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the batch. + */ + matchBatchFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).batch; + } + + /** + * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ + project: project, + location: location, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; + } + + /** + * Parse the location from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; + } + + /** + * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectLocationWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ + project: project, + location: location, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; + } + + /** + * Parse the location from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; + } + + /** + * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; + } + + /** + * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ + project: project, + region: region, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; + } + + /** + * Parse the region from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; + } + + /** + * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectRegionWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ + project: project, + region: region, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; + } + + /** + * Parse the region from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; + } + + /** + * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; + } + + /** + * Return a fully-qualified service resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} service + * @returns {string} Resource name string. + */ + servicePath(project:string,location:string,service:string) { + return this.pathTemplates.servicePathTemplate.render({ + project: project, + location: location, + service: service, + }); + } + + /** + * Parse the project from Service resource. + * + * @param {string} serviceName + * A fully-qualified path representing Service resource. + * @returns {string} A string representing the project. + */ + matchProjectFromServiceName(serviceName: string) { + return this.pathTemplates.servicePathTemplate.match(serviceName).project; + } + + /** + * Parse the location from Service resource. + * + * @param {string} serviceName + * A fully-qualified path representing Service resource. + * @returns {string} A string representing the location. + */ + matchLocationFromServiceName(serviceName: string) { + return this.pathTemplates.servicePathTemplate.match(serviceName).location; + } + + /** + * Parse the service from Service resource. + * + * @param {string} serviceName + * A fully-qualified path representing Service resource. + * @returns {string} A string representing the service. + */ + matchServiceFromServiceName(serviceName: string) { + return this.pathTemplates.servicePathTemplate.match(serviceName).service; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.clusterControllerStub && !this._terminated) { + return this.clusterControllerStub.then(stub => { + this._terminated = true; + stub.close(); + this.operationsClient.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v1/src/v1/cluster_controller_client_config.json b/owl-bot-staging/v1/src/v1/cluster_controller_client_config.json new file mode 100644 index 00000000..6f5f5f3d --- /dev/null +++ b/owl-bot-staging/v1/src/v1/cluster_controller_client_config.json @@ -0,0 +1,72 @@ +{ + "interfaces": { + "google.cloud.dataproc.v1.ClusterController": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ], + "deadline_exceeded_internal_unavailable": [ + "DEADLINE_EXCEEDED", + "INTERNAL", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateCluster": { + "timeout_millis": 300000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "UpdateCluster": { + "timeout_millis": 300000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "StopCluster": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "StartCluster": { + "retry_codes_name": "non_idempotent", + "retry_params_name": "default" + }, + "DeleteCluster": { + "timeout_millis": 300000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "GetCluster": { + "timeout_millis": 300000, + "retry_codes_name": "deadline_exceeded_internal_unavailable", + "retry_params_name": "default" + }, + "ListClusters": { + "timeout_millis": 300000, + "retry_codes_name": "deadline_exceeded_internal_unavailable", + "retry_params_name": "default" + }, + "DiagnoseCluster": { + "timeout_millis": 300000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json b/owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json new file mode 100644 index 00000000..b26a9be7 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json @@ -0,0 +1,9 @@ +[ + "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", + "../../protos/google/cloud/dataproc/v1/batches.proto", + "../../protos/google/cloud/dataproc/v1/clusters.proto", + "../../protos/google/cloud/dataproc/v1/jobs.proto", + "../../protos/google/cloud/dataproc/v1/operations.proto", + "../../protos/google/cloud/dataproc/v1/shared.proto", + "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" +] diff --git a/owl-bot-staging/v1/src/v1/gapic_metadata.json b/owl-bot-staging/v1/src/v1/gapic_metadata.json new file mode 100644 index 00000000..6f5c9ee3 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/gapic_metadata.json @@ -0,0 +1,409 @@ +{ + "schema": "1.0", + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "typescript", + "protoPackage": "google.cloud.dataproc.v1", + "libraryPackage": "@google-cloud/dataproc", + "services": { + "AutoscalingPolicyService": { + "clients": { + "grpc": { + "libraryClient": "AutoscalingPolicyServiceClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "createAutoscalingPolicy" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "updateAutoscalingPolicy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "getAutoscalingPolicy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "deleteAutoscalingPolicy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "listAutoscalingPolicies", + "listAutoscalingPoliciesStream", + "listAutoscalingPoliciesAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "AutoscalingPolicyServiceClient", + "rpcs": { + "CreateAutoscalingPolicy": { + "methods": [ + "createAutoscalingPolicy" + ] + }, + "UpdateAutoscalingPolicy": { + "methods": [ + "updateAutoscalingPolicy" + ] + }, + "GetAutoscalingPolicy": { + "methods": [ + "getAutoscalingPolicy" + ] + }, + "DeleteAutoscalingPolicy": { + "methods": [ + "deleteAutoscalingPolicy" + ] + }, + "ListAutoscalingPolicies": { + "methods": [ + "listAutoscalingPolicies", + "listAutoscalingPoliciesStream", + "listAutoscalingPoliciesAsync" + ] + } + } + } + } + }, + "BatchController": { + "clients": { + "grpc": { + "libraryClient": "BatchControllerClient", + "rpcs": { + "GetBatch": { + "methods": [ + "getBatch" + ] + }, + "DeleteBatch": { + "methods": [ + "deleteBatch" + ] + }, + "CreateBatch": { + "methods": [ + "createBatch" + ] + }, + "ListBatches": { + "methods": [ + "listBatches", + "listBatchesStream", + "listBatchesAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "BatchControllerClient", + "rpcs": { + "GetBatch": { + "methods": [ + "getBatch" + ] + }, + "DeleteBatch": { + "methods": [ + "deleteBatch" + ] + }, + "CreateBatch": { + "methods": [ + "createBatch" + ] + }, + "ListBatches": { + "methods": [ + "listBatches", + "listBatchesStream", + "listBatchesAsync" + ] + } + } + } + } + }, + "ClusterController": { + "clients": { + "grpc": { + "libraryClient": "ClusterControllerClient", + "rpcs": { + "GetCluster": { + "methods": [ + "getCluster" + ] + }, + "CreateCluster": { + "methods": [ + "createCluster" + ] + }, + "UpdateCluster": { + "methods": [ + "updateCluster" + ] + }, + "StopCluster": { + "methods": [ + "stopCluster" + ] + }, + "StartCluster": { + "methods": [ + "startCluster" + ] + }, + "DeleteCluster": { + "methods": [ + "deleteCluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnoseCluster" + ] + }, + "ListClusters": { + "methods": [ + "listClusters", + "listClustersStream", + "listClustersAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "ClusterControllerClient", + "rpcs": { + "GetCluster": { + "methods": [ + "getCluster" + ] + }, + "CreateCluster": { + "methods": [ + "createCluster" + ] + }, + "UpdateCluster": { + "methods": [ + "updateCluster" + ] + }, + "StopCluster": { + "methods": [ + "stopCluster" + ] + }, + "StartCluster": { + "methods": [ + "startCluster" + ] + }, + "DeleteCluster": { + "methods": [ + "deleteCluster" + ] + }, + "DiagnoseCluster": { + "methods": [ + "diagnoseCluster" + ] + }, + "ListClusters": { + "methods": [ + "listClusters", + "listClustersStream", + "listClustersAsync" + ] + } + } + } + } + }, + "JobController": { + "clients": { + "grpc": { + "libraryClient": "JobControllerClient", + "rpcs": { + "SubmitJob": { + "methods": [ + "submitJob" + ] + }, + "GetJob": { + "methods": [ + "getJob" + ] + }, + "UpdateJob": { + "methods": [ + "updateJob" + ] + }, + "CancelJob": { + "methods": [ + "cancelJob" + ] + }, + "DeleteJob": { + "methods": [ + "deleteJob" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submitJobAsOperation" + ] + }, + "ListJobs": { + "methods": [ + "listJobs", + "listJobsStream", + "listJobsAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "JobControllerClient", + "rpcs": { + "SubmitJob": { + "methods": [ + "submitJob" + ] + }, + "GetJob": { + "methods": [ + "getJob" + ] + }, + "UpdateJob": { + "methods": [ + "updateJob" + ] + }, + "CancelJob": { + "methods": [ + "cancelJob" + ] + }, + "DeleteJob": { + "methods": [ + "deleteJob" + ] + }, + "SubmitJobAsOperation": { + "methods": [ + "submitJobAsOperation" + ] + }, + "ListJobs": { + "methods": [ + "listJobs", + "listJobsStream", + "listJobsAsync" + ] + } + } + } + } + }, + "WorkflowTemplateService": { + "clients": { + "grpc": { + "libraryClient": "WorkflowTemplateServiceClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "createWorkflowTemplate" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "getWorkflowTemplate" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "updateWorkflowTemplate" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "deleteWorkflowTemplate" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiateWorkflowTemplate" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiateInlineWorkflowTemplate" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "listWorkflowTemplates", + "listWorkflowTemplatesStream", + "listWorkflowTemplatesAsync" + ] + } + } + }, + "grpc-fallback": { + "libraryClient": "WorkflowTemplateServiceClient", + "rpcs": { + "CreateWorkflowTemplate": { + "methods": [ + "createWorkflowTemplate" + ] + }, + "GetWorkflowTemplate": { + "methods": [ + "getWorkflowTemplate" + ] + }, + "UpdateWorkflowTemplate": { + "methods": [ + "updateWorkflowTemplate" + ] + }, + "DeleteWorkflowTemplate": { + "methods": [ + "deleteWorkflowTemplate" + ] + }, + "InstantiateWorkflowTemplate": { + "methods": [ + "instantiateWorkflowTemplate" + ] + }, + "InstantiateInlineWorkflowTemplate": { + "methods": [ + "instantiateInlineWorkflowTemplate" + ] + }, + "ListWorkflowTemplates": { + "methods": [ + "listWorkflowTemplates", + "listWorkflowTemplatesStream", + "listWorkflowTemplatesAsync" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/src/v1/index.ts b/owl-bot-staging/v1/src/v1/index.ts new file mode 100644 index 00000000..098c50ae --- /dev/null +++ b/owl-bot-staging/v1/src/v1/index.ts @@ -0,0 +1,23 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +export {AutoscalingPolicyServiceClient} from './autoscaling_policy_service_client'; +export {BatchControllerClient} from './batch_controller_client'; +export {ClusterControllerClient} from './cluster_controller_client'; +export {JobControllerClient} from './job_controller_client'; +export {WorkflowTemplateServiceClient} from './workflow_template_service_client'; diff --git a/owl-bot-staging/v1/src/v1/job_controller_client.ts b/owl-bot-staging/v1/src/v1/job_controller_client.ts new file mode 100644 index 00000000..164b97ba --- /dev/null +++ b/owl-bot-staging/v1/src/v1/job_controller_client.ts @@ -0,0 +1,1382 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/job_controller_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './job_controller_client_config.json'; +import { operationsProtos } from 'google-gax'; +const version = require('../../../package.json').version; + +/** + * The JobController provides methods to manage jobs. + * @class + * @memberof v1 + */ +export class JobControllerClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + operationsClient: gax.OperationsClient; + jobControllerStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of JobControllerClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof JobControllerClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + batchPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/batches/{batch}' + ), + projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' + ), + projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' + ), + projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' + ), + projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listJobs: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'jobs') + }; + + const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); + + // This API contains "long-running operations", which return a + // an Operation object that allows for tracking of the operation, + // rather than holding a request open. + + this.operationsClient = this._gaxModule.lro({ + auth: this.auth, + grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined + }).operationsClient(opts); + const submitJobAsOperationResponse = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.Job') as gax.protobuf.Type; + const submitJobAsOperationMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.JobMetadata') as gax.protobuf.Type; + + this.descriptors.longrunning = { + submitJobAsOperation: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + submitJobAsOperationResponse.decode.bind(submitJobAsOperationResponse), + submitJobAsOperationMetadata.decode.bind(submitJobAsOperationMetadata)) + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.dataproc.v1.JobController', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.jobControllerStub) { + return this.jobControllerStub; + } + + // Put together the "service stub" for + // google.cloud.dataproc.v1.JobController. + this.jobControllerStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.JobController') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.dataproc.v1.JobController, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const jobControllerStubMethods = + ['submitJob', 'submitJobAsOperation', 'getJob', 'listJobs', 'updateJob', 'cancelJob', 'deleteJob']; + for (const methodName of jobControllerStubMethods) { + const callPromise = this.jobControllerStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + this.descriptors.longrunning[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.jobControllerStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'dataproc.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'dataproc.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Submits a job to a cluster. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {google.cloud.dataproc.v1.Job} request.job + * Required. The job resource. + * @param {string} [request.requestId] + * Optional. A unique id used to identify the request. If the server + * receives two + * [SubmitJobRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s + * with the same id, then the second request will be ignored and the + * first {@link google.cloud.dataproc.v1.Job|Job} created and stored in the backend + * is returned. + * + * It is recommended to always set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The id must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.submit_job.js + * region_tag:dataproc_v1_generated_JobController_SubmitJob_async + */ + submitJob( + request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ISubmitJobRequest|undefined, {}|undefined + ]>; + submitJob( + request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, + {}|null|undefined>): void; + submitJob( + request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, + {}|null|undefined>): void; + submitJob( + request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ISubmitJobRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + this.initialize(); + return this.innerApiCalls.submitJob(request, options, callback); + } +/** + * Gets the resource representation for a job in a project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.jobId + * Required. The job ID. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.get_job.js + * region_tag:dataproc_v1_generated_JobController_GetJob_async + */ + getJob( + request?: protos.google.cloud.dataproc.v1.IGetJobRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IGetJobRequest|undefined, {}|undefined + ]>; + getJob( + request: protos.google.cloud.dataproc.v1.IGetJobRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, + {}|null|undefined>): void; + getJob( + request: protos.google.cloud.dataproc.v1.IGetJobRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, + {}|null|undefined>): void; + getJob( + request?: protos.google.cloud.dataproc.v1.IGetJobRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IGetJobRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'job_id': request.jobId || '', + }); + this.initialize(); + return this.innerApiCalls.getJob(request, options, callback); + } +/** + * Updates a job in a project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.jobId + * Required. The job ID. + * @param {google.cloud.dataproc.v1.Job} request.job + * Required. The changes to the job. + * @param {google.protobuf.FieldMask} request.updateMask + * Required. Specifies the path, relative to Job, of + * the field to update. For example, to update the labels of a Job the + * update_mask parameter would be specified as + * labels, and the `PATCH` request body would specify the new + * value. Note: Currently, labels is the only + * field that can be updated. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.update_job.js + * region_tag:dataproc_v1_generated_JobController_UpdateJob_async + */ + updateJob( + request?: protos.google.cloud.dataproc.v1.IUpdateJobRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IUpdateJobRequest|undefined, {}|undefined + ]>; + updateJob( + request: protos.google.cloud.dataproc.v1.IUpdateJobRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, + {}|null|undefined>): void; + updateJob( + request: protos.google.cloud.dataproc.v1.IUpdateJobRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, + {}|null|undefined>): void; + updateJob( + request?: protos.google.cloud.dataproc.v1.IUpdateJobRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.IUpdateJobRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'job_id': request.jobId || '', + }); + this.initialize(); + return this.innerApiCalls.updateJob(request, options, callback); + } +/** + * Starts a job cancellation request. To access the job resource + * after cancellation, call + * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) + * or + * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.jobId + * Required. The job ID. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.cancel_job.js + * region_tag:dataproc_v1_generated_JobController_CancelJob_async + */ + cancelJob( + request?: protos.google.cloud.dataproc.v1.ICancelJobRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ICancelJobRequest|undefined, {}|undefined + ]>; + cancelJob( + request: protos.google.cloud.dataproc.v1.ICancelJobRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, + {}|null|undefined>): void; + cancelJob( + request: protos.google.cloud.dataproc.v1.ICancelJobRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, + {}|null|undefined>): void; + cancelJob( + request?: protos.google.cloud.dataproc.v1.ICancelJobRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IJob, + protos.google.cloud.dataproc.v1.ICancelJobRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'job_id': request.jobId || '', + }); + this.initialize(); + return this.innerApiCalls.cancelJob(request, options, callback); + } +/** + * Deletes the job from the project. If the job is active, the delete fails, + * and the response returns `FAILED_PRECONDITION`. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {string} request.jobId + * Required. The job ID. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.delete_job.js + * region_tag:dataproc_v1_generated_JobController_DeleteJob_async + */ + deleteJob( + request?: protos.google.cloud.dataproc.v1.IDeleteJobRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteJobRequest|undefined, {}|undefined + ]>; + deleteJob( + request: protos.google.cloud.dataproc.v1.IDeleteJobRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, + {}|null|undefined>): void; + deleteJob( + request: protos.google.cloud.dataproc.v1.IDeleteJobRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, + {}|null|undefined>): void; + deleteJob( + request?: protos.google.cloud.dataproc.v1.IDeleteJobRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteJobRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + 'job_id': request.jobId || '', + }); + this.initialize(); + return this.innerApiCalls.deleteJob(request, options, callback); + } + +/** + * Submits job to a cluster. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {google.cloud.dataproc.v1.Job} request.job + * Required. The job resource. + * @param {string} [request.requestId] + * Optional. A unique id used to identify the request. If the server + * receives two + * [SubmitJobRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s + * with the same id, then the second request will be ignored and the + * first {@link google.cloud.dataproc.v1.Job|Job} created and stored in the backend + * is returned. + * + * It is recommended to always set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The id must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.submit_job_as_operation.js + * region_tag:dataproc_v1_generated_JobController_SubmitJobAsOperation_async + */ + submitJobAsOperation( + request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + submitJobAsOperation( + request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + submitJobAsOperation( + request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + submitJobAsOperation( + request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + this.initialize(); + return this.innerApiCalls.submitJobAsOperation(request, options, callback); + } +/** + * Check the status of the long running operation returned by `submitJobAsOperation()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.submit_job_as_operation.js + * region_tag:dataproc_v1_generated_JobController_SubmitJobAsOperation_async + */ + async checkSubmitJobAsOperationProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.submitJobAsOperation, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } + /** + * Lists regions/{region}/jobs in a project. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {number} [request.pageSize] + * Optional. The number of results to return in each response. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {string} [request.clusterName] + * Optional. If set, the returned jobs list includes only jobs that were + * submitted to the named cluster. + * @param {google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher} [request.jobStateMatcher] + * Optional. Specifies enumerated categories of jobs to list. + * (default = match ALL jobs). + * + * If `filter` is provided, `jobStateMatcher` will be ignored. + * @param {string} [request.filter] + * Optional. A filter constraining the jobs to list. Filters are + * case-sensitive and have the following syntax: + * + * [field = value] AND [field [= value]] ... + * + * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label + * key. **value** can be `*` to match all values. + * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * + * Example filter: + * + * status.state = ACTIVE AND labels.env = staging AND labels.starred = * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [Job]{@link google.cloud.dataproc.v1.Job}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listJobsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listJobs( + request?: protos.google.cloud.dataproc.v1.IListJobsRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IJob[], + protos.google.cloud.dataproc.v1.IListJobsRequest|null, + protos.google.cloud.dataproc.v1.IListJobsResponse + ]>; + listJobs( + request: protos.google.cloud.dataproc.v1.IListJobsRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListJobsRequest, + protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, + protos.google.cloud.dataproc.v1.IJob>): void; + listJobs( + request: protos.google.cloud.dataproc.v1.IListJobsRequest, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListJobsRequest, + protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, + protos.google.cloud.dataproc.v1.IJob>): void; + listJobs( + request?: protos.google.cloud.dataproc.v1.IListJobsRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.dataproc.v1.IListJobsRequest, + protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, + protos.google.cloud.dataproc.v1.IJob>, + callback?: PaginationCallback< + protos.google.cloud.dataproc.v1.IListJobsRequest, + protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, + protos.google.cloud.dataproc.v1.IJob>): + Promise<[ + protos.google.cloud.dataproc.v1.IJob[], + protos.google.cloud.dataproc.v1.IListJobsRequest|null, + protos.google.cloud.dataproc.v1.IListJobsResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + this.initialize(); + return this.innerApiCalls.listJobs(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {number} [request.pageSize] + * Optional. The number of results to return in each response. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {string} [request.clusterName] + * Optional. If set, the returned jobs list includes only jobs that were + * submitted to the named cluster. + * @param {google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher} [request.jobStateMatcher] + * Optional. Specifies enumerated categories of jobs to list. + * (default = match ALL jobs). + * + * If `filter` is provided, `jobStateMatcher` will be ignored. + * @param {string} [request.filter] + * Optional. A filter constraining the jobs to list. Filters are + * case-sensitive and have the following syntax: + * + * [field = value] AND [field [= value]] ... + * + * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label + * key. **value** can be `*` to match all values. + * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * + * Example filter: + * + * status.state = ACTIVE AND labels.env = staging AND labels.starred = * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [Job]{@link google.cloud.dataproc.v1.Job} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listJobsAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listJobsStream( + request?: protos.google.cloud.dataproc.v1.IListJobsRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + const defaultCallSettings = this._defaults['listJobs']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listJobs.createStream( + this.innerApiCalls.listJobs as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listJobs`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.projectId + * Required. The ID of the Google Cloud Platform project that the job + * belongs to. + * @param {string} request.region + * Required. The Dataproc region in which to handle the request. + * @param {number} [request.pageSize] + * Optional. The number of results to return in each response. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {string} [request.clusterName] + * Optional. If set, the returned jobs list includes only jobs that were + * submitted to the named cluster. + * @param {google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher} [request.jobStateMatcher] + * Optional. Specifies enumerated categories of jobs to list. + * (default = match ALL jobs). + * + * If `filter` is provided, `jobStateMatcher` will be ignored. + * @param {string} [request.filter] + * Optional. A filter constraining the jobs to list. Filters are + * case-sensitive and have the following syntax: + * + * [field = value] AND [field [= value]] ... + * + * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label + * key. **value** can be `*` to match all values. + * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. + * Only the logical `AND` operator is supported; space-separated items are + * treated as having an implicit `AND` operator. + * + * Example filter: + * + * status.state = ACTIVE AND labels.env = staging AND labels.starred = * + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [Job]{@link google.cloud.dataproc.v1.Job}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v1/job_controller.list_jobs.js + * region_tag:dataproc_v1_generated_JobController_ListJobs_async + */ + listJobsAsync( + request?: protos.google.cloud.dataproc.v1.IListJobsRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'project_id': request.projectId || '', + 'region': request.region || '', + }); + const defaultCallSettings = this._defaults['listJobs']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listJobs.asyncIterate( + this.innerApiCalls['listJobs'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified batch resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} batch + * @returns {string} Resource name string. + */ + batchPath(project:string,location:string,batch:string) { + return this.pathTemplates.batchPathTemplate.render({ + project: project, + location: location, + batch: batch, + }); + } + + /** + * Parse the project from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the project. + */ + matchProjectFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).project; + } + + /** + * Parse the location from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the location. + */ + matchLocationFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).location; + } + + /** + * Parse the batch from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the batch. + */ + matchBatchFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).batch; + } + + /** + * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ + project: project, + location: location, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; + } + + /** + * Parse the location from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; + } + + /** + * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectLocationWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ + project: project, + location: location, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; + } + + /** + * Parse the location from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; + } + + /** + * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; + } + + /** + * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ + project: project, + region: region, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; + } + + /** + * Parse the region from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; + } + + /** + * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectRegionWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ + project: project, + region: region, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; + } + + /** + * Parse the region from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; + } + + /** + * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.jobControllerStub && !this._terminated) { + return this.jobControllerStub.then(stub => { + this._terminated = true; + stub.close(); + this.operationsClient.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v1/src/v1/job_controller_client_config.json b/owl-bot-staging/v1/src/v1/job_controller_client_config.json new file mode 100644 index 00000000..5d757110 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/job_controller_client_config.json @@ -0,0 +1,69 @@ +{ + "interfaces": { + "google.cloud.dataproc.v1.JobController": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ], + "deadline_exceeded_internal_unavailable": [ + "DEADLINE_EXCEEDED", + "INTERNAL", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "SubmitJob": { + "timeout_millis": 900000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "SubmitJobAsOperation": { + "timeout_millis": 900000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "GetJob": { + "timeout_millis": 900000, + "retry_codes_name": "deadline_exceeded_internal_unavailable", + "retry_params_name": "default" + }, + "ListJobs": { + "timeout_millis": 900000, + "retry_codes_name": "deadline_exceeded_internal_unavailable", + "retry_params_name": "default" + }, + "UpdateJob": { + "timeout_millis": 900000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "CancelJob": { + "timeout_millis": 900000, + "retry_codes_name": "deadline_exceeded_internal_unavailable", + "retry_params_name": "default" + }, + "DeleteJob": { + "timeout_millis": 900000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v1/src/v1/job_controller_proto_list.json b/owl-bot-staging/v1/src/v1/job_controller_proto_list.json new file mode 100644 index 00000000..b26a9be7 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/job_controller_proto_list.json @@ -0,0 +1,9 @@ +[ + "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", + "../../protos/google/cloud/dataproc/v1/batches.proto", + "../../protos/google/cloud/dataproc/v1/clusters.proto", + "../../protos/google/cloud/dataproc/v1/jobs.proto", + "../../protos/google/cloud/dataproc/v1/operations.proto", + "../../protos/google/cloud/dataproc/v1/shared.proto", + "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" +] diff --git a/owl-bot-staging/v1/src/v1/workflow_template_service_client.ts b/owl-bot-staging/v1/src/v1/workflow_template_service_client.ts new file mode 100644 index 00000000..89902bc6 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/workflow_template_service_client.ts @@ -0,0 +1,1478 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +/* global window */ +import * as gax from 'google-gax'; +import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; + +import { Transform } from 'stream'; +import { RequestType } from 'google-gax/build/src/apitypes'; +import * as protos from '../../protos/protos'; +import jsonProtos = require('../../protos/protos.json'); +/** + * Client JSON configuration object, loaded from + * `src/v1/workflow_template_service_client_config.json`. + * This file defines retry strategy and timeouts for all API methods in this library. + */ +import * as gapicConfig from './workflow_template_service_client_config.json'; +import { operationsProtos } from 'google-gax'; +const version = require('../../../package.json').version; + +/** + * The API interface for managing Workflow Templates in the + * Dataproc API. + * @class + * @memberof v1 + */ +export class WorkflowTemplateServiceClient { + private _terminated = false; + private _opts: ClientOptions; + private _providedCustomServicePath: boolean; + private _gaxModule: typeof gax | typeof gax.fallback; + private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; + private _protos: {}; + private _defaults: {[method: string]: gax.CallSettings}; + auth: gax.GoogleAuth; + descriptors: Descriptors = { + page: {}, + stream: {}, + longrunning: {}, + batching: {}, + }; + warn: (code: string, message: string, warnType?: string) => void; + innerApiCalls: {[name: string]: Function}; + pathTemplates: {[name: string]: gax.PathTemplate}; + operationsClient: gax.OperationsClient; + workflowTemplateServiceStub?: Promise<{[name: string]: Function}>; + + /** + * Construct an instance of WorkflowTemplateServiceClient. + * + * @param {object} [options] - The configuration object. + * The options accepted by the constructor are described in detail + * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). + * The common options are: + * @param {object} [options.credentials] - Credentials object. + * @param {string} [options.credentials.client_email] + * @param {string} [options.credentials.private_key] + * @param {string} [options.email] - Account email address. Required when + * using a .pem or .p12 keyFilename. + * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or + * .p12 key downloaded from the Google Developers Console. If you provide + * a path to a JSON file, the projectId option below is not necessary. + * NOTE: .pem and .p12 require you to specify options.email as well. + * @param {number} [options.port] - The port on which to connect to + * the remote host. + * @param {string} [options.projectId] - The project ID from the Google + * Developer's Console, e.g. 'grape-spaceship-123'. We will also check + * the environment variable GCLOUD_PROJECT for your project ID. If your + * app is running in an environment which supports + * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, + * your project ID will be detected automatically. + * @param {string} [options.apiEndpoint] - The domain name of the + * API remote host. + * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. + * Follows the structure of {@link gapicConfig}. + * @param {boolean} [options.fallback] - Use HTTP fallback mode. + * In fallback mode, a special browser-compatible transport implementation is used + * instead of gRPC transport. In browser context (if the `window` object is defined) + * the fallback mode is enabled automatically; set `options.fallback` to `false` + * if you need to override this behavior. + */ + constructor(opts?: ClientOptions) { + // Ensure that options include all the required fields. + const staticMembers = this.constructor as typeof WorkflowTemplateServiceClient; + const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; + this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); + const port = opts?.port || staticMembers.port; + const clientConfig = opts?.clientConfig ?? {}; + const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); + opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); + + // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. + if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { + opts['scopes'] = staticMembers.scopes; + } + + // Choose either gRPC or proto-over-HTTP implementation of google-gax. + this._gaxModule = opts.fallback ? gax.fallback : gax; + + // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. + this._gaxGrpc = new this._gaxModule.GrpcClient(opts); + + // Save options to use in initialize() method. + this._opts = opts; + + // Save the auth object to the client, for use by other methods. + this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); + + // Set useJWTAccessWithScope on the auth object. + this.auth.useJWTAccessWithScope = true; + + // Set defaultServicePath on the auth object. + this.auth.defaultServicePath = staticMembers.servicePath; + + // Set the default scopes in auth client if needed. + if (servicePath === staticMembers.servicePath) { + this.auth.defaultScopes = staticMembers.scopes; + } + + // Determine the client header string. + const clientHeader = [ + `gax/${this._gaxModule.version}`, + `gapic/${version}`, + ]; + if (typeof process !== 'undefined' && 'versions' in process) { + clientHeader.push(`gl-node/${process.versions.node}`); + } else { + clientHeader.push(`gl-web/${this._gaxModule.version}`); + } + if (!opts.fallback) { + clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); + } else if (opts.fallback === 'rest' ) { + clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); + } + if (opts.libName && opts.libVersion) { + clientHeader.push(`${opts.libName}/${opts.libVersion}`); + } + // Load the applicable protos. + this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); + + // This API contains "path templates"; forward-slash-separated + // identifiers to uniquely identify resources within the API. + // Create useful helper objects for these. + this.pathTemplates = { + batchPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/batches/{batch}' + ), + projectPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}' + ), + projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' + ), + projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' + ), + projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' + ), + projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' + ), + regionPathTemplate: new this._gaxModule.PathTemplate( + 'projects/{project}/regions/{region}' + ), + }; + + // Some of the methods on this service return "paged" results, + // (e.g. 50 results at a time, with tokens to get subsequent + // pages). Denote the keys used for pagination and results. + this.descriptors.page = { + listWorkflowTemplates: + new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'templates') + }; + + const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); + + // This API contains "long-running operations", which return a + // an Operation object that allows for tracking of the operation, + // rather than holding a request open. + + this.operationsClient = this._gaxModule.lro({ + auth: this.auth, + grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined + }).operationsClient(opts); + const instantiateWorkflowTemplateResponse = protoFilesRoot.lookup( + '.google.protobuf.Empty') as gax.protobuf.Type; + const instantiateWorkflowTemplateMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.WorkflowMetadata') as gax.protobuf.Type; + const instantiateInlineWorkflowTemplateResponse = protoFilesRoot.lookup( + '.google.protobuf.Empty') as gax.protobuf.Type; + const instantiateInlineWorkflowTemplateMetadata = protoFilesRoot.lookup( + '.google.cloud.dataproc.v1.WorkflowMetadata') as gax.protobuf.Type; + + this.descriptors.longrunning = { + instantiateWorkflowTemplate: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + instantiateWorkflowTemplateResponse.decode.bind(instantiateWorkflowTemplateResponse), + instantiateWorkflowTemplateMetadata.decode.bind(instantiateWorkflowTemplateMetadata)), + instantiateInlineWorkflowTemplate: new this._gaxModule.LongrunningDescriptor( + this.operationsClient, + instantiateInlineWorkflowTemplateResponse.decode.bind(instantiateInlineWorkflowTemplateResponse), + instantiateInlineWorkflowTemplateMetadata.decode.bind(instantiateInlineWorkflowTemplateMetadata)) + }; + + // Put together the default options sent with requests. + this._defaults = this._gaxGrpc.constructSettings( + 'google.cloud.dataproc.v1.WorkflowTemplateService', gapicConfig as gax.ClientConfig, + opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); + + // Set up a dictionary of "inner API calls"; the core implementation + // of calling the API is handled in `google-gax`, with this code + // merely providing the destination and request information. + this.innerApiCalls = {}; + + // Add a warn function to the client constructor so it can be easily tested. + this.warn = gax.warn; + } + + /** + * Initialize the client. + * Performs asynchronous operations (such as authentication) and prepares the client. + * This function will be called automatically when any class method is called for the + * first time, but if you need to initialize it before calling an actual method, + * feel free to call initialize() directly. + * + * You can await on this method if you want to make sure the client is initialized. + * + * @returns {Promise} A promise that resolves to an authenticated service stub. + */ + initialize() { + // If the client stub promise is already initialized, return immediately. + if (this.workflowTemplateServiceStub) { + return this.workflowTemplateServiceStub; + } + + // Put together the "service stub" for + // google.cloud.dataproc.v1.WorkflowTemplateService. + this.workflowTemplateServiceStub = this._gaxGrpc.createStub( + this._opts.fallback ? + (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.WorkflowTemplateService') : + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (this._protos as any).google.cloud.dataproc.v1.WorkflowTemplateService, + this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; + + // Iterate over each of the methods that the service provides + // and create an API call method for each. + const workflowTemplateServiceStubMethods = + ['createWorkflowTemplate', 'getWorkflowTemplate', 'instantiateWorkflowTemplate', 'instantiateInlineWorkflowTemplate', 'updateWorkflowTemplate', 'listWorkflowTemplates', 'deleteWorkflowTemplate']; + for (const methodName of workflowTemplateServiceStubMethods) { + const callPromise = this.workflowTemplateServiceStub.then( + stub => (...args: Array<{}>) => { + if (this._terminated) { + return Promise.reject('The client has already been closed.'); + } + const func = stub[methodName]; + return func.apply(stub, args); + }, + (err: Error|null|undefined) => () => { + throw err; + }); + + const descriptor = + this.descriptors.page[methodName] || + this.descriptors.longrunning[methodName] || + undefined; + const apiCall = this._gaxModule.createApiCall( + callPromise, + this._defaults[methodName], + descriptor + ); + + this.innerApiCalls[methodName] = apiCall; + } + + return this.workflowTemplateServiceStub; + } + + /** + * The DNS address for this API service. + * @returns {string} The DNS address for this service. + */ + static get servicePath() { + return 'dataproc.googleapis.com'; + } + + /** + * The DNS address for this API service - same as servicePath(), + * exists for compatibility reasons. + * @returns {string} The DNS address for this service. + */ + static get apiEndpoint() { + return 'dataproc.googleapis.com'; + } + + /** + * The port for this API service. + * @returns {number} The default port for this service. + */ + static get port() { + return 443; + } + + /** + * The scopes needed to make gRPC calls for every method defined + * in this service. + * @returns {string[]} List of default scopes. + */ + static get scopes() { + return [ + 'https://www.googleapis.com/auth/cloud-platform' + ]; + } + + getProjectId(): Promise; + getProjectId(callback: Callback): void; + /** + * Return the project ID used by this class. + * @returns {Promise} A promise that resolves to string containing the project ID. + */ + getProjectId(callback?: Callback): + Promise|void { + if (callback) { + this.auth.getProjectId(callback); + return; + } + return this.auth.getProjectId(); + } + + // ------------------- + // -- Service calls -- + // ------------------- +/** + * Creates new workflow template. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates.create`, the resource name of the + * region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.workflowTemplates.create`, the resource name of + * the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {google.cloud.dataproc.v1.WorkflowTemplate} request.template + * Required. The Dataproc workflow template to create. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.create_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async + */ + createWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|undefined, {}|undefined + ]>; + createWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + createWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + createWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.createWorkflowTemplate(request, options, callback); + } +/** + * Retrieves the latest workflow template. + * + * Can retrieve previously instantiated template by specifying optional + * version parameter. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The resource name of the workflow template, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates.get`, the resource name of the + * template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * + * * For `projects.locations.workflowTemplates.get`, the resource name of the + * template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + * @param {number} [request.version] + * Optional. The version of workflow template to retrieve. Only previously + * instantiated versions can be retrieved. + * + * If unspecified, retrieves the current version. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.get_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async + */ + getWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|undefined, {}|undefined + ]>; + getWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + getWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + getWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.getWorkflowTemplate(request, options, callback); + } +/** + * Updates (replaces) workflow template. The updated template + * must contain version that matches the current server version. + * + * @param {Object} request + * The request object that will be sent. + * @param {google.cloud.dataproc.v1.WorkflowTemplate} request.template + * Required. The updated workflow template. + * + * The `template.version` field must match the current version. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.update_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async + */ + updateWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|undefined, {}|undefined + ]>; + updateWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, + options: CallOptions, + callback: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + updateWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, + callback: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + updateWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate, + protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'template.name': request.template!.name || '', + }); + this.initialize(); + return this.innerApiCalls.updateWorkflowTemplate(request, options, callback); + } +/** + * Deletes a workflow template. It does not cancel in-progress workflows. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The resource name of the workflow template, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates.delete`, the resource name + * of the template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * + * * For `projects.locations.workflowTemplates.instantiate`, the resource name + * of the template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + * @param {number} [request.version] + * Optional. The version of workflow template to delete. If specified, + * will only delete the template if the current server version matches + * specified version. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.delete_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async + */ + deleteWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, + options?: CallOptions): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|undefined, {}|undefined + ]>; + deleteWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, + options: CallOptions, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + deleteWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, + callback: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): void; + deleteWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, + optionsOrCallback?: CallOptions|Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, + {}|null|undefined>, + callback?: Callback< + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, + {}|null|undefined>): + Promise<[ + protos.google.protobuf.IEmpty, + protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.deleteWorkflowTemplate(request, options, callback); + } + +/** + * Instantiates a template and begins execution. + * + * The returned Operation can be used to track execution of + * workflow by polling + * {@link google.longrunning.Operations.GetOperation|operations.get}. + * The Operation will complete when entire workflow is finished. + * + * The running workflow can be aborted via + * {@link google.longrunning.Operations.CancelOperation|operations.cancel}. + * This will cause any inflight jobs to be cancelled and workflow-owned + * clusters to be deleted. + * + * The {@link google.longrunning.Operation.metadata|Operation.metadata} will be + * [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + * Also see [Using + * WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + * + * On successful completion, + * {@link google.longrunning.Operation.response|Operation.response} will be + * {@link google.protobuf.Empty|Empty}. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.name + * Required. The resource name of the workflow template, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates.instantiate`, the resource name + * of the template has the following format: + * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` + * + * * For `projects.locations.workflowTemplates.instantiate`, the resource name + * of the template has the following format: + * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` + * @param {number} [request.version] + * Optional. The version of workflow template to instantiate. If specified, + * the workflow will be instantiated only if the current version of + * the workflow template has the supplied version. + * + * This option cannot be used to instantiate a previous version of + * workflow template. + * @param {string} [request.requestId] + * Optional. A tag that prevents multiple concurrent workflow + * instances with the same tag from running. This mitigates risk of + * concurrent instances started due to retries. + * + * It is recommended to always set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The tag must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {number[]} [request.parameters] + * Optional. Map from parameter names to values that should be used for those + * parameters. Values may not exceed 1000 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.instantiate_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async + */ + instantiateWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + instantiateWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + instantiateWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + instantiateWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'name': request.name || '', + }); + this.initialize(); + return this.innerApiCalls.instantiateWorkflowTemplate(request, options, callback); + } +/** + * Check the status of the long running operation returned by `instantiateWorkflowTemplate()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.instantiate_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async + */ + async checkInstantiateWorkflowTemplateProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.instantiateWorkflowTemplate, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } +/** + * Instantiates a template and begins execution. + * + * This method is equivalent to executing the sequence + * {@link google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate|CreateWorkflowTemplate}, {@link google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate|InstantiateWorkflowTemplate}, + * {@link google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate|DeleteWorkflowTemplate}. + * + * The returned Operation can be used to track execution of + * workflow by polling + * {@link google.longrunning.Operations.GetOperation|operations.get}. + * The Operation will complete when entire workflow is finished. + * + * The running workflow can be aborted via + * {@link google.longrunning.Operations.CancelOperation|operations.cancel}. + * This will cause any inflight jobs to be cancelled and workflow-owned + * clusters to be deleted. + * + * The {@link google.longrunning.Operation.metadata|Operation.metadata} will be + * [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). + * Also see [Using + * WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). + * + * On successful completion, + * {@link google.longrunning.Operation.response|Operation.response} will be + * {@link google.protobuf.Empty|Empty}. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates,instantiateinline`, the resource + * name of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.workflowTemplates.instantiateinline`, the + * resource name of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {google.cloud.dataproc.v1.WorkflowTemplate} request.template + * Required. The workflow template to instantiate. + * @param {string} [request.requestId] + * Optional. A tag that prevents multiple concurrent workflow + * instances with the same tag from running. This mitigates risk of + * concurrent instances started due to retries. + * + * It is recommended to always set this value to a + * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). + * + * The tag must contain only letters (a-z, A-Z), numbers (0-9), + * underscores (_), and hyphens (-). The maximum length is 40 characters. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is an object representing + * a long running operation. Its `promise()` method returns a promise + * you can `await` for. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async + */ + instantiateInlineWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, + options?: CallOptions): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>; + instantiateInlineWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, + options: CallOptions, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + instantiateInlineWorkflowTemplate( + request: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, + callback: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): void; + instantiateInlineWorkflowTemplate( + request?: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, + optionsOrCallback?: CallOptions|Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>, + callback?: Callback< + LROperation, + protos.google.longrunning.IOperation|null|undefined, + {}|null|undefined>): + Promise<[ + LROperation, + protos.google.longrunning.IOperation|undefined, {}|undefined + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.instantiateInlineWorkflowTemplate(request, options, callback); + } +/** + * Check the status of the long running operation returned by `instantiateInlineWorkflowTemplate()`. + * @param {String} name + * The operation name that will be passed. + * @returns {Promise} - The promise which resolves to an object. + * The decoded operation object has result and metadata field to get information from. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async + */ + async checkInstantiateInlineWorkflowTemplateProgress(name: string): Promise>{ + const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); + const [operation] = await this.operationsClient.getOperation(request); + const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.instantiateInlineWorkflowTemplate, gax.createDefaultBackoffSettings()); + return decodeOperation as LROperation; + } + /** + * Lists workflows that match the specified filter in the request. + * + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates,list`, the resource + * name of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.workflowTemplates.list`, the + * resource name of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {number} [request.pageSize] + * Optional. The maximum number of results to return in each response. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Promise} - The promise which resolves to an array. + * The first element of the array is Array of [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed and will merge results from all the pages into this array. + * Note that it can affect your quota. + * We recommend using `listWorkflowTemplatesAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listWorkflowTemplates( + request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + options?: CallOptions): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate[], + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest|null, + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse + ]>; + listWorkflowTemplates( + request: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + options: CallOptions, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IWorkflowTemplate>): void; + listWorkflowTemplates( + request: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + callback: PaginationCallback< + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IWorkflowTemplate>): void; + listWorkflowTemplates( + request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + optionsOrCallback?: CallOptions|PaginationCallback< + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IWorkflowTemplate>, + callback?: PaginationCallback< + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, + protos.google.cloud.dataproc.v1.IWorkflowTemplate>): + Promise<[ + protos.google.cloud.dataproc.v1.IWorkflowTemplate[], + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest|null, + protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse + ]>|void { + request = request || {}; + let options: CallOptions; + if (typeof optionsOrCallback === 'function' && callback === undefined) { + callback = optionsOrCallback; + options = {}; + } + else { + options = optionsOrCallback as CallOptions; + } + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + this.initialize(); + return this.innerApiCalls.listWorkflowTemplates(request, options, callback); + } + +/** + * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates,list`, the resource + * name of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.workflowTemplates.list`, the + * resource name of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {number} [request.pageSize] + * Optional. The maximum number of results to return in each response. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Stream} + * An object stream which emits an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate} on 'data' event. + * The client library will perform auto-pagination by default: it will call the API as many + * times as needed. Note that it can affect your quota. + * We recommend using `listWorkflowTemplatesAsync()` + * method described below for async iteration which you can stop as needed. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + */ + listWorkflowTemplatesStream( + request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + options?: CallOptions): + Transform{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listWorkflowTemplates']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listWorkflowTemplates.createStream( + this.innerApiCalls.listWorkflowTemplates as gax.GaxCall, + request, + callSettings + ); + } + +/** + * Equivalent to `listWorkflowTemplates`, but returns an iterable object. + * + * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. + * @param {Object} request + * The request object that will be sent. + * @param {string} request.parent + * Required. The resource name of the region or location, as described + * in https://cloud.google.com/apis/design/resource_names. + * + * * For `projects.regions.workflowTemplates,list`, the resource + * name of the region has the following format: + * `projects/{project_id}/regions/{region}` + * + * * For `projects.locations.workflowTemplates.list`, the + * resource name of the location has the following format: + * `projects/{project_id}/locations/{location}` + * @param {number} [request.pageSize] + * Optional. The maximum number of results to return in each response. + * @param {string} [request.pageToken] + * Optional. The page token, returned by a previous call, to request the + * next page of results. + * @param {object} [options] + * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. + * @returns {Object} + * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). + * When you iterate the returned iterable, each element will be an object representing + * [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. The API will be called under the hood as needed, once per the page, + * so you can stop the iteration when you don't need more results. + * Please see the + * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) + * for more details and examples. + * @example include:samples/generated/v1/workflow_template_service.list_workflow_templates.js + * region_tag:dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async + */ + listWorkflowTemplatesAsync( + request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, + options?: CallOptions): + AsyncIterable{ + request = request || {}; + options = options || {}; + options.otherArgs = options.otherArgs || {}; + options.otherArgs.headers = options.otherArgs.headers || {}; + options.otherArgs.headers[ + 'x-goog-request-params' + ] = gax.routingHeader.fromParams({ + 'parent': request.parent || '', + }); + const defaultCallSettings = this._defaults['listWorkflowTemplates']; + const callSettings = defaultCallSettings.merge(options); + this.initialize(); + return this.descriptors.page.listWorkflowTemplates.asyncIterate( + this.innerApiCalls['listWorkflowTemplates'] as GaxCall, + request as unknown as RequestType, + callSettings + ) as AsyncIterable; + } + // -------------------- + // -- Path templates -- + // -------------------- + + /** + * Return a fully-qualified batch resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} batch + * @returns {string} Resource name string. + */ + batchPath(project:string,location:string,batch:string) { + return this.pathTemplates.batchPathTemplate.render({ + project: project, + location: location, + batch: batch, + }); + } + + /** + * Parse the project from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the project. + */ + matchProjectFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).project; + } + + /** + * Parse the location from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the location. + */ + matchLocationFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).location; + } + + /** + * Parse the batch from Batch resource. + * + * @param {string} batchName + * A fully-qualified path representing Batch resource. + * @returns {string} A string representing the batch. + */ + matchBatchFromBatchName(batchName: string) { + return this.pathTemplates.batchPathTemplate.match(batchName).batch; + } + + /** + * Return a fully-qualified project resource name string. + * + * @param {string} project + * @returns {string} Resource name string. + */ + projectPath(project:string) { + return this.pathTemplates.projectPathTemplate.render({ + project: project, + }); + } + + /** + * Parse the project from Project resource. + * + * @param {string} projectName + * A fully-qualified path representing Project resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectName(projectName: string) { + return this.pathTemplates.projectPathTemplate.match(projectName).project; + } + + /** + * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ + project: project, + location: location, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; + } + + /** + * Parse the location from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; + } + + /** + * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. + * + * @param {string} projectLocationAutoscalingPolicyName + * A fully-qualified path representing project_location_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { + return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectLocationWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} location + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ + project: project, + location: location, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; + } + + /** + * Parse the location from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the location. + */ + matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; + } + + /** + * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. + * + * @param {string} projectLocationWorkflowTemplateName + * A fully-qualified path representing project_location_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { + return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; + } + + /** + * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} autoscaling_policy + * @returns {string} Resource name string. + */ + projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ + project: project, + region: region, + autoscaling_policy: autoscalingPolicy, + }); + } + + /** + * Parse the project from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; + } + + /** + * Parse the region from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; + } + + /** + * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. + * + * @param {string} projectRegionAutoscalingPolicyName + * A fully-qualified path representing project_region_autoscaling_policy resource. + * @returns {string} A string representing the autoscaling_policy. + */ + matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { + return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; + } + + /** + * Return a fully-qualified projectRegionWorkflowTemplate resource name string. + * + * @param {string} project + * @param {string} region + * @param {string} workflow_template + * @returns {string} Resource name string. + */ + projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ + project: project, + region: region, + workflow_template: workflowTemplate, + }); + } + + /** + * Parse the project from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the project. + */ + matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; + } + + /** + * Parse the region from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the region. + */ + matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; + } + + /** + * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. + * + * @param {string} projectRegionWorkflowTemplateName + * A fully-qualified path representing project_region_workflow_template resource. + * @returns {string} A string representing the workflow_template. + */ + matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { + return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; + } + + /** + * Return a fully-qualified region resource name string. + * + * @param {string} project + * @param {string} region + * @returns {string} Resource name string. + */ + regionPath(project:string,region:string) { + return this.pathTemplates.regionPathTemplate.render({ + project: project, + region: region, + }); + } + + /** + * Parse the project from Region resource. + * + * @param {string} regionName + * A fully-qualified path representing Region resource. + * @returns {string} A string representing the project. + */ + matchProjectFromRegionName(regionName: string) { + return this.pathTemplates.regionPathTemplate.match(regionName).project; + } + + /** + * Parse the region from Region resource. + * + * @param {string} regionName + * A fully-qualified path representing Region resource. + * @returns {string} A string representing the region. + */ + matchRegionFromRegionName(regionName: string) { + return this.pathTemplates.regionPathTemplate.match(regionName).region; + } + + /** + * Terminate the gRPC channel and close the client. + * + * The client will no longer be usable and all future behavior is undefined. + * @returns {Promise} A promise that resolves when the client is closed. + */ + close(): Promise { + if (this.workflowTemplateServiceStub && !this._terminated) { + return this.workflowTemplateServiceStub.then(stub => { + this._terminated = true; + stub.close(); + this.operationsClient.close(); + }); + } + return Promise.resolve(); + } +} diff --git a/owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json b/owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json new file mode 100644 index 00000000..62d3aa9b --- /dev/null +++ b/owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json @@ -0,0 +1,69 @@ +{ + "interfaces": { + "google.cloud.dataproc.v1.WorkflowTemplateService": { + "retry_codes": { + "non_idempotent": [], + "idempotent": [ + "DEADLINE_EXCEEDED", + "UNAVAILABLE" + ], + "unavailable": [ + "UNAVAILABLE" + ], + "deadline_exceeded_internal_unavailable": [ + "DEADLINE_EXCEEDED", + "INTERNAL", + "UNAVAILABLE" + ] + }, + "retry_params": { + "default": { + "initial_retry_delay_millis": 100, + "retry_delay_multiplier": 1.3, + "max_retry_delay_millis": 60000, + "initial_rpc_timeout_millis": 60000, + "rpc_timeout_multiplier": 1, + "max_rpc_timeout_millis": 60000, + "total_timeout_millis": 600000 + } + }, + "methods": { + "CreateWorkflowTemplate": { + "timeout_millis": 600000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "GetWorkflowTemplate": { + "timeout_millis": 600000, + "retry_codes_name": "deadline_exceeded_internal_unavailable", + "retry_params_name": "default" + }, + "InstantiateWorkflowTemplate": { + "timeout_millis": 600000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "InstantiateInlineWorkflowTemplate": { + "timeout_millis": 600000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "UpdateWorkflowTemplate": { + "timeout_millis": 600000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + }, + "ListWorkflowTemplates": { + "timeout_millis": 600000, + "retry_codes_name": "deadline_exceeded_internal_unavailable", + "retry_params_name": "default" + }, + "DeleteWorkflowTemplate": { + "timeout_millis": 600000, + "retry_codes_name": "unavailable", + "retry_params_name": "default" + } + } + } + } +} diff --git a/owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json b/owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json new file mode 100644 index 00000000..b26a9be7 --- /dev/null +++ b/owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json @@ -0,0 +1,9 @@ +[ + "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", + "../../protos/google/cloud/dataproc/v1/batches.proto", + "../../protos/google/cloud/dataproc/v1/clusters.proto", + "../../protos/google/cloud/dataproc/v1/jobs.proto", + "../../protos/google/cloud/dataproc/v1/operations.proto", + "../../protos/google/cloud/dataproc/v1/shared.proto", + "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" +] diff --git a/owl-bot-staging/v1/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v1/system-test/fixtures/sample/src/index.js new file mode 100644 index 00000000..8835b621 --- /dev/null +++ b/owl-bot-staging/v1/system-test/fixtures/sample/src/index.js @@ -0,0 +1,31 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + + +/* eslint-disable node/no-missing-require, no-unused-vars */ +const dataproc = require('@google-cloud/dataproc'); + +function main() { + const autoscalingPolicyServiceClient = new dataproc.AutoscalingPolicyServiceClient(); + const batchControllerClient = new dataproc.BatchControllerClient(); + const clusterControllerClient = new dataproc.ClusterControllerClient(); + const jobControllerClient = new dataproc.JobControllerClient(); + const workflowTemplateServiceClient = new dataproc.WorkflowTemplateServiceClient(); +} + +main(); diff --git a/owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts new file mode 100644 index 00000000..83479d35 --- /dev/null +++ b/owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts @@ -0,0 +1,56 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import {AutoscalingPolicyServiceClient, BatchControllerClient, ClusterControllerClient, JobControllerClient, WorkflowTemplateServiceClient} from '@google-cloud/dataproc'; + +// check that the client class type name can be used +function doStuffWithAutoscalingPolicyServiceClient(client: AutoscalingPolicyServiceClient) { + client.close(); +} +function doStuffWithBatchControllerClient(client: BatchControllerClient) { + client.close(); +} +function doStuffWithClusterControllerClient(client: ClusterControllerClient) { + client.close(); +} +function doStuffWithJobControllerClient(client: JobControllerClient) { + client.close(); +} +function doStuffWithWorkflowTemplateServiceClient(client: WorkflowTemplateServiceClient) { + client.close(); +} + +function main() { + // check that the client instance can be created + const autoscalingPolicyServiceClient = new AutoscalingPolicyServiceClient(); + doStuffWithAutoscalingPolicyServiceClient(autoscalingPolicyServiceClient); + // check that the client instance can be created + const batchControllerClient = new BatchControllerClient(); + doStuffWithBatchControllerClient(batchControllerClient); + // check that the client instance can be created + const clusterControllerClient = new ClusterControllerClient(); + doStuffWithClusterControllerClient(clusterControllerClient); + // check that the client instance can be created + const jobControllerClient = new JobControllerClient(); + doStuffWithJobControllerClient(jobControllerClient); + // check that the client instance can be created + const workflowTemplateServiceClient = new WorkflowTemplateServiceClient(); + doStuffWithWorkflowTemplateServiceClient(workflowTemplateServiceClient); +} + +main(); diff --git a/owl-bot-staging/v1/system-test/install.ts b/owl-bot-staging/v1/system-test/install.ts new file mode 100644 index 00000000..8ec45222 --- /dev/null +++ b/owl-bot-staging/v1/system-test/install.ts @@ -0,0 +1,49 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import { packNTest } from 'pack-n-play'; +import { readFileSync } from 'fs'; +import { describe, it } from 'mocha'; + +describe('📦 pack-n-play test', () => { + + it('TypeScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'TypeScript user can use the type definitions', + ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() + } + }; + await packNTest(options); + }); + + it('JavaScript code', async function() { + this.timeout(300000); + const options = { + packageDir: process.cwd(), + sample: { + description: 'JavaScript user can use the library', + ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() + } + }; + await packNTest(options); + }); + +}); diff --git a/owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts b/owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts new file mode 100644 index 00000000..13b33f6b --- /dev/null +++ b/owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts @@ -0,0 +1,1098 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as autoscalingpolicyserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v1.AutoscalingPolicyServiceClient', () => { + it('has servicePath', () => { + const servicePath = autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.autoscalingPolicyServiceStub, undefined); + await client.initialize(); + assert(client.autoscalingPolicyServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.autoscalingPolicyServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.autoscalingPolicyServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createAutoscalingPolicy', () => { + it('invokes createAutoscalingPolicy without error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); + client.innerApiCalls.createAutoscalingPolicy = stubSimpleCall(expectedResponse); + const [response] = await client.createAutoscalingPolicy(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createAutoscalingPolicy without error using callback', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); + client.innerApiCalls.createAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createAutoscalingPolicy( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createAutoscalingPolicy with error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createAutoscalingPolicy = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createAutoscalingPolicy(request), expectedError); + assert((client.innerApiCalls.createAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createAutoscalingPolicy with closed client', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createAutoscalingPolicy(request), expectedError); + }); + }); + + describe('updateAutoscalingPolicy', () => { + it('invokes updateAutoscalingPolicy without error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); + request.policy = {}; + request.policy.name = ''; + const expectedHeaderRequestParams = "policy.name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); + client.innerApiCalls.updateAutoscalingPolicy = stubSimpleCall(expectedResponse); + const [response] = await client.updateAutoscalingPolicy(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateAutoscalingPolicy without error using callback', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); + request.policy = {}; + request.policy.name = ''; + const expectedHeaderRequestParams = "policy.name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); + client.innerApiCalls.updateAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.updateAutoscalingPolicy( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes updateAutoscalingPolicy with error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); + request.policy = {}; + request.policy.name = ''; + const expectedHeaderRequestParams = "policy.name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.updateAutoscalingPolicy = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.updateAutoscalingPolicy(request), expectedError); + assert((client.innerApiCalls.updateAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateAutoscalingPolicy with closed client', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); + request.policy = {}; + request.policy.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.updateAutoscalingPolicy(request), expectedError); + }); + }); + + describe('getAutoscalingPolicy', () => { + it('invokes getAutoscalingPolicy without error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); + client.innerApiCalls.getAutoscalingPolicy = stubSimpleCall(expectedResponse); + const [response] = await client.getAutoscalingPolicy(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getAutoscalingPolicy without error using callback', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); + client.innerApiCalls.getAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getAutoscalingPolicy( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getAutoscalingPolicy with error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getAutoscalingPolicy = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getAutoscalingPolicy(request), expectedError); + assert((client.innerApiCalls.getAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getAutoscalingPolicy with closed client', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getAutoscalingPolicy(request), expectedError); + }); + }); + + describe('deleteAutoscalingPolicy', () => { + it('invokes deleteAutoscalingPolicy without error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteAutoscalingPolicy = stubSimpleCall(expectedResponse); + const [response] = await client.deleteAutoscalingPolicy(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteAutoscalingPolicy without error using callback', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteAutoscalingPolicy( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteAutoscalingPolicy with error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteAutoscalingPolicy = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteAutoscalingPolicy(request), expectedError); + assert((client.innerApiCalls.deleteAutoscalingPolicy as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteAutoscalingPolicy with closed client', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteAutoscalingPolicy(request), expectedError); + }); + }); + + describe('listAutoscalingPolicies', () => { + it('invokes listAutoscalingPolicies without error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + ]; + client.innerApiCalls.listAutoscalingPolicies = stubSimpleCall(expectedResponse); + const [response] = await client.listAutoscalingPolicies(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listAutoscalingPolicies as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listAutoscalingPolicies without error using callback', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + ]; + client.innerApiCalls.listAutoscalingPolicies = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listAutoscalingPolicies( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listAutoscalingPolicies as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listAutoscalingPolicies with error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listAutoscalingPolicies = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listAutoscalingPolicies(request), expectedError); + assert((client.innerApiCalls.listAutoscalingPolicies as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listAutoscalingPoliciesStream without error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + ]; + client.descriptors.page.listAutoscalingPolicies.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listAutoscalingPoliciesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.AutoscalingPolicy[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.AutoscalingPolicy) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listAutoscalingPolicies, request)); + assert.strictEqual( + (client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listAutoscalingPoliciesStream with error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listAutoscalingPolicies.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listAutoscalingPoliciesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.AutoscalingPolicy[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.AutoscalingPolicy) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listAutoscalingPolicies, request)); + assert.strictEqual( + (client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listAutoscalingPolicies without error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), + ]; + client.descriptors.page.listAutoscalingPolicies.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.dataproc.v1.IAutoscalingPolicy[] = []; + const iterable = client.listAutoscalingPoliciesAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listAutoscalingPolicies with error', async () => { + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listAutoscalingPolicies.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listAutoscalingPoliciesAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.dataproc.v1.IAutoscalingPolicy[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('batch', () => { + const fakePath = "/rendered/path/batch"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + batch: "batchValue", + }; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.batchPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.batchPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('batchPath', () => { + const result = client.batchPath("projectValue", "locationValue", "batchValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.batchPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromBatchName', () => { + const result = client.matchProjectFromBatchName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromBatchName', () => { + const result = client.matchLocationFromBatchName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchBatchFromBatchName', () => { + const result = client.matchBatchFromBatchName(fakePath); + assert.strictEqual(result, "batchValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('project', () => { + const fakePath = "/rendered/path/project"; + const expectedParameters = { + project: "projectValue", + }; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath("projectValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationAutoscalingPolicyPath', () => { + const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow_template: "workflowTemplateValue", + }; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationWorkflowTemplatePath', () => { + const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionAutoscalingPolicyPath', () => { + const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + workflow_template: "workflowTemplateValue", + }; + const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionWorkflowTemplatePath', () => { + const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v1/test/gapic_batch_controller_v1.ts b/owl-bot-staging/v1/test/gapic_batch_controller_v1.ts new file mode 100644 index 00000000..7cf9c2bd --- /dev/null +++ b/owl-bot-staging/v1/test/gapic_batch_controller_v1.ts @@ -0,0 +1,1060 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as batchcontrollerModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf, LROperation, operationsProtos} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); +} + +function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v1.BatchControllerClient', () => { + it('has servicePath', () => { + const servicePath = batchcontrollerModule.v1.BatchControllerClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = batchcontrollerModule.v1.BatchControllerClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = batchcontrollerModule.v1.BatchControllerClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new batchcontrollerModule.v1.BatchControllerClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.batchControllerStub, undefined); + await client.initialize(); + assert(client.batchControllerStub); + }); + + it('has close method for the initialized client', done => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.batchControllerStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.batchControllerStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('getBatch', () => { + it('invokes getBatch without error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()); + client.innerApiCalls.getBatch = stubSimpleCall(expectedResponse); + const [response] = await client.getBatch(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getBatch without error using callback', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()); + client.innerApiCalls.getBatch = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getBatch( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IBatch|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getBatch with error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getBatch = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getBatch(request), expectedError); + assert((client.innerApiCalls.getBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getBatch with closed client', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getBatch(request), expectedError); + }); + }); + + describe('deleteBatch', () => { + it('invokes deleteBatch without error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteBatch = stubSimpleCall(expectedResponse); + const [response] = await client.deleteBatch(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteBatch without error using callback', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteBatch = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteBatch( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteBatch with error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteBatch = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteBatch(request), expectedError); + assert((client.innerApiCalls.deleteBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteBatch with closed client', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteBatch(request), expectedError); + }); + }); + + describe('createBatch', () => { + it('invokes createBatch without error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.createBatch = stubLongRunningCall(expectedResponse); + const [operation] = await client.createBatch(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createBatch without error using callback', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.createBatch = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createBatch( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createBatch with call error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createBatch = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.createBatch(request), expectedError); + assert((client.innerApiCalls.createBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createBatch with LRO error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createBatch = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.createBatch(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.createBatch as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkCreateBatchProgress without error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkCreateBatchProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkCreateBatchProgress with error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkCreateBatchProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('listBatches', () => { + it('invokes listBatches without error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + ]; + client.innerApiCalls.listBatches = stubSimpleCall(expectedResponse); + const [response] = await client.listBatches(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listBatches as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listBatches without error using callback', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + ]; + client.innerApiCalls.listBatches = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listBatches( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IBatch[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listBatches as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listBatches with error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listBatches = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listBatches(request), expectedError); + assert((client.innerApiCalls.listBatches as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listBatchesStream without error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + ]; + client.descriptors.page.listBatches.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listBatchesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.Batch[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.Batch) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listBatches.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listBatches, request)); + assert.strictEqual( + (client.descriptors.page.listBatches.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listBatchesStream with error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listBatches.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listBatchesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.Batch[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.Batch) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listBatches.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listBatches, request)); + assert.strictEqual( + (client.descriptors.page.listBatches.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listBatches without error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), + ]; + client.descriptors.page.listBatches.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.dataproc.v1.IBatch[] = []; + const iterable = client.listBatchesAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listBatches.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listBatches.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listBatches with error', async () => { + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listBatches.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listBatchesAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.dataproc.v1.IBatch[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listBatches.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listBatches.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('batch', () => { + const fakePath = "/rendered/path/batch"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + batch: "batchValue", + }; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.batchPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.batchPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('batchPath', () => { + const result = client.batchPath("projectValue", "locationValue", "batchValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.batchPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromBatchName', () => { + const result = client.matchProjectFromBatchName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromBatchName', () => { + const result = client.matchLocationFromBatchName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchBatchFromBatchName', () => { + const result = client.matchBatchFromBatchName(fakePath); + assert.strictEqual(result, "batchValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('location', () => { + const fakePath = "/rendered/path/location"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + }; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.locationPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.locationPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('locationPath', () => { + const result = client.locationPath("projectValue", "locationValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.locationPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromLocationName', () => { + const result = client.matchProjectFromLocationName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromLocationName', () => { + const result = client.matchLocationFromLocationName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.locationPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('project', () => { + const fakePath = "/rendered/path/project"; + const expectedParameters = { + project: "projectValue", + }; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath("projectValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationAutoscalingPolicyPath', () => { + const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow_template: "workflowTemplateValue", + }; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationWorkflowTemplatePath', () => { + const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionAutoscalingPolicyPath', () => { + const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + workflow_template: "workflowTemplateValue", + }; + const client = new batchcontrollerModule.v1.BatchControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionWorkflowTemplatePath', () => { + const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts b/owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts new file mode 100644 index 00000000..cc84ac17 --- /dev/null +++ b/owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts @@ -0,0 +1,1720 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as clustercontrollerModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf, LROperation, operationsProtos} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); +} + +function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v1.ClusterControllerClient', () => { + it('has servicePath', () => { + const servicePath = clustercontrollerModule.v1.ClusterControllerClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = clustercontrollerModule.v1.ClusterControllerClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = clustercontrollerModule.v1.ClusterControllerClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.clusterControllerStub, undefined); + await client.initialize(); + assert(client.clusterControllerStub); + }); + + it('has close method for the initialized client', done => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.clusterControllerStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.clusterControllerStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('getCluster', () => { + it('invokes getCluster without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()); + client.innerApiCalls.getCluster = stubSimpleCall(expectedResponse); + const [response] = await client.getCluster(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getCluster without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()); + client.innerApiCalls.getCluster = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getCluster( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.ICluster|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getCluster with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getCluster = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getCluster(request), expectedError); + assert((client.innerApiCalls.getCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getCluster with closed client', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getCluster(request), expectedError); + }); + }); + + describe('createCluster', () => { + it('invokes createCluster without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.createCluster = stubLongRunningCall(expectedResponse); + const [operation] = await client.createCluster(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createCluster without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.createCluster = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createCluster( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createCluster with call error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createCluster = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.createCluster(request), expectedError); + assert((client.innerApiCalls.createCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createCluster with LRO error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createCluster = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.createCluster(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.createCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkCreateClusterProgress without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkCreateClusterProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkCreateClusterProgress with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkCreateClusterProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('updateCluster', () => { + it('invokes updateCluster without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.updateCluster = stubLongRunningCall(expectedResponse); + const [operation] = await client.updateCluster(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateCluster without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.updateCluster = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.updateCluster( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes updateCluster with call error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.updateCluster = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.updateCluster(request), expectedError); + assert((client.innerApiCalls.updateCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateCluster with LRO error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.updateCluster = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.updateCluster(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.updateCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkUpdateClusterProgress without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkUpdateClusterProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkUpdateClusterProgress with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkUpdateClusterProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('stopCluster', () => { + it('invokes stopCluster without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.stopCluster = stubLongRunningCall(expectedResponse); + const [operation] = await client.stopCluster(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.stopCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes stopCluster without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.stopCluster = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.stopCluster( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.stopCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes stopCluster with call error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.stopCluster = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.stopCluster(request), expectedError); + assert((client.innerApiCalls.stopCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes stopCluster with LRO error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.stopCluster = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.stopCluster(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.stopCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkStopClusterProgress without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkStopClusterProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkStopClusterProgress with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkStopClusterProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('startCluster', () => { + it('invokes startCluster without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.startCluster = stubLongRunningCall(expectedResponse); + const [operation] = await client.startCluster(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startCluster without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.startCluster = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.startCluster( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.startCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes startCluster with call error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startCluster = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.startCluster(request), expectedError); + assert((client.innerApiCalls.startCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes startCluster with LRO error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.startCluster = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.startCluster(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.startCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkStartClusterProgress without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkStartClusterProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkStartClusterProgress with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkStartClusterProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('deleteCluster', () => { + it('invokes deleteCluster without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.deleteCluster = stubLongRunningCall(expectedResponse); + const [operation] = await client.deleteCluster(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteCluster without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.deleteCluster = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteCluster( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteCluster with call error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteCluster = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.deleteCluster(request), expectedError); + assert((client.innerApiCalls.deleteCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteCluster with LRO error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteCluster = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.deleteCluster(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.deleteCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkDeleteClusterProgress without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkDeleteClusterProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkDeleteClusterProgress with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkDeleteClusterProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('diagnoseCluster', () => { + it('invokes diagnoseCluster without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.diagnoseCluster = stubLongRunningCall(expectedResponse); + const [operation] = await client.diagnoseCluster(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.diagnoseCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes diagnoseCluster without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.diagnoseCluster = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.diagnoseCluster( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.diagnoseCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes diagnoseCluster with call error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.diagnoseCluster = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.diagnoseCluster(request), expectedError); + assert((client.innerApiCalls.diagnoseCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes diagnoseCluster with LRO error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); + request.projectId = ''; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.diagnoseCluster = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.diagnoseCluster(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.diagnoseCluster as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkDiagnoseClusterProgress without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkDiagnoseClusterProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkDiagnoseClusterProgress with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkDiagnoseClusterProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('listClusters', () => { + it('invokes listClusters without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + ]; + client.innerApiCalls.listClusters = stubSimpleCall(expectedResponse); + const [response] = await client.listClusters(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listClusters as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listClusters without error using callback', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + ]; + client.innerApiCalls.listClusters = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listClusters( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.ICluster[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listClusters as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listClusters with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listClusters = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listClusters(request), expectedError); + assert((client.innerApiCalls.listClusters as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listClustersStream without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + ]; + client.descriptors.page.listClusters.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listClustersStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.Cluster[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.Cluster) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listClusters.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listClusters, request)); + assert.strictEqual( + (client.descriptors.page.listClusters.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listClustersStream with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedError = new Error('expected'); + client.descriptors.page.listClusters.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listClustersStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.Cluster[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.Cluster) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listClusters.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listClusters, request)); + assert.strictEqual( + (client.descriptors.page.listClusters.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listClusters without error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), + ]; + client.descriptors.page.listClusters.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.dataproc.v1.ICluster[] = []; + const iterable = client.listClustersAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listClusters.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listClusters.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listClusters with error', async () => { + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion=";const expectedError = new Error('expected'); + client.descriptors.page.listClusters.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listClustersAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.dataproc.v1.ICluster[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listClusters.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listClusters.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('batch', () => { + const fakePath = "/rendered/path/batch"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + batch: "batchValue", + }; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.batchPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.batchPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('batchPath', () => { + const result = client.batchPath("projectValue", "locationValue", "batchValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.batchPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromBatchName', () => { + const result = client.matchProjectFromBatchName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromBatchName', () => { + const result = client.matchLocationFromBatchName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchBatchFromBatchName', () => { + const result = client.matchBatchFromBatchName(fakePath); + assert.strictEqual(result, "batchValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationAutoscalingPolicyPath', () => { + const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow_template: "workflowTemplateValue", + }; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationWorkflowTemplatePath', () => { + const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionAutoscalingPolicyPath', () => { + const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + workflow_template: "workflowTemplateValue", + }; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionWorkflowTemplatePath', () => { + const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('service', () => { + const fakePath = "/rendered/path/service"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + service: "serviceValue", + }; + const client = new clustercontrollerModule.v1.ClusterControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.servicePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.servicePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('servicePath', () => { + const result = client.servicePath("projectValue", "locationValue", "serviceValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.servicePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromServiceName', () => { + const result = client.matchProjectFromServiceName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.servicePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromServiceName', () => { + const result = client.matchLocationFromServiceName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.servicePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchServiceFromServiceName', () => { + const result = client.matchServiceFromServiceName(fakePath); + assert.strictEqual(result, "serviceValue"); + assert((client.pathTemplates.servicePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v1/test/gapic_job_controller_v1.ts b/owl-bot-staging/v1/test/gapic_job_controller_v1.ts new file mode 100644 index 00000000..0241f3ac --- /dev/null +++ b/owl-bot-staging/v1/test/gapic_job_controller_v1.ts @@ -0,0 +1,1330 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as jobcontrollerModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf, LROperation, operationsProtos} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); +} + +function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v1.JobControllerClient', () => { + it('has servicePath', () => { + const servicePath = jobcontrollerModule.v1.JobControllerClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = jobcontrollerModule.v1.JobControllerClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = jobcontrollerModule.v1.JobControllerClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new jobcontrollerModule.v1.JobControllerClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.jobControllerStub, undefined); + await client.initialize(); + assert(client.jobControllerStub); + }); + + it('has close method for the initialized client', done => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.jobControllerStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.jobControllerStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('submitJob', () => { + it('invokes submitJob without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.submitJob = stubSimpleCall(expectedResponse); + const [response] = await client.submitJob(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.submitJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes submitJob without error using callback', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.submitJob = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.submitJob( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.submitJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes submitJob with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.submitJob = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.submitJob(request), expectedError); + assert((client.innerApiCalls.submitJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes submitJob with closed client', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.submitJob(request), expectedError); + }); + }); + + describe('getJob', () => { + it('invokes getJob without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.getJob = stubSimpleCall(expectedResponse); + const [response] = await client.getJob(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getJob without error using callback', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.getJob = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getJob( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getJob with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getJob = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getJob(request), expectedError); + assert((client.innerApiCalls.getJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getJob with closed client', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getJob(request), expectedError); + }); + }); + + describe('updateJob', () => { + it('invokes updateJob without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.updateJob = stubSimpleCall(expectedResponse); + const [response] = await client.updateJob(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateJob without error using callback', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.updateJob = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.updateJob( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes updateJob with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.updateJob = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.updateJob(request), expectedError); + assert((client.innerApiCalls.updateJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateJob with closed client', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.updateJob(request), expectedError); + }); + }); + + describe('cancelJob', () => { + it('invokes cancelJob without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.cancelJob = stubSimpleCall(expectedResponse); + const [response] = await client.cancelJob(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.cancelJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes cancelJob without error using callback', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); + client.innerApiCalls.cancelJob = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.cancelJob( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.cancelJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes cancelJob with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.cancelJob = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.cancelJob(request), expectedError); + assert((client.innerApiCalls.cancelJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes cancelJob with closed client', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.cancelJob(request), expectedError); + }); + }); + + describe('deleteJob', () => { + it('invokes deleteJob without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteJob = stubSimpleCall(expectedResponse); + const [response] = await client.deleteJob(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteJob without error using callback', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteJob = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteJob( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteJob with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = "project_id=®ion=&job_id="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteJob = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteJob(request), expectedError); + assert((client.innerApiCalls.deleteJob as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteJob with closed client', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); + request.projectId = ''; + request.region = ''; + request.jobId = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteJob(request), expectedError); + }); + }); + + describe('submitJobAsOperation', () => { + it('invokes submitJobAsOperation without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.submitJobAsOperation = stubLongRunningCall(expectedResponse); + const [operation] = await client.submitJobAsOperation(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.submitJobAsOperation as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes submitJobAsOperation without error using callback', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.submitJobAsOperation = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.submitJobAsOperation( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.submitJobAsOperation as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes submitJobAsOperation with call error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.submitJobAsOperation = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.submitJobAsOperation(request), expectedError); + assert((client.innerApiCalls.submitJobAsOperation as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes submitJobAsOperation with LRO error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.submitJobAsOperation = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.submitJobAsOperation(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.submitJobAsOperation as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkSubmitJobAsOperationProgress without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkSubmitJobAsOperationProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkSubmitJobAsOperationProgress with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkSubmitJobAsOperationProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('listJobs', () => { + it('invokes listJobs without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + ]; + client.innerApiCalls.listJobs = stubSimpleCall(expectedResponse); + const [response] = await client.listJobs(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listJobs as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listJobs without error using callback', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + ]; + client.innerApiCalls.listJobs = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listJobs( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listJobs as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listJobs with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listJobs = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listJobs(request), expectedError); + assert((client.innerApiCalls.listJobs as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listJobsStream without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + ]; + client.descriptors.page.listJobs.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listJobsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.Job[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.Job) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listJobs.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listJobs, request)); + assert.strictEqual( + (client.descriptors.page.listJobs.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listJobsStream with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedError = new Error('expected'); + client.descriptors.page.listJobs.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listJobsStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.Job[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.Job) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listJobs.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listJobs, request)); + assert.strictEqual( + (client.descriptors.page.listJobs.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listJobs without error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), + ]; + client.descriptors.page.listJobs.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.dataproc.v1.IJob[] = []; + const iterable = client.listJobsAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listJobs.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listJobs.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listJobs with error', async () => { + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); + request.projectId = ''; + request.region = ''; + const expectedHeaderRequestParams = "project_id=®ion=";const expectedError = new Error('expected'); + client.descriptors.page.listJobs.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listJobsAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.dataproc.v1.IJob[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listJobs.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listJobs.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('batch', () => { + const fakePath = "/rendered/path/batch"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + batch: "batchValue", + }; + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.batchPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.batchPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('batchPath', () => { + const result = client.batchPath("projectValue", "locationValue", "batchValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.batchPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromBatchName', () => { + const result = client.matchProjectFromBatchName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromBatchName', () => { + const result = client.matchLocationFromBatchName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchBatchFromBatchName', () => { + const result = client.matchBatchFromBatchName(fakePath); + assert.strictEqual(result, "batchValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationAutoscalingPolicyPath', () => { + const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow_template: "workflowTemplateValue", + }; + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationWorkflowTemplatePath', () => { + const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionAutoscalingPolicyPath', () => { + const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + workflow_template: "workflowTemplateValue", + }; + const client = new jobcontrollerModule.v1.JobControllerClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionWorkflowTemplatePath', () => { + const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts b/owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts new file mode 100644 index 00000000..b7e7828b --- /dev/null +++ b/owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts @@ -0,0 +1,1402 @@ +// Copyright 2022 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// ** This file is automatically generated by gapic-generator-typescript. ** +// ** https://github.com/googleapis/gapic-generator-typescript ** +// ** All changes to this file may be overwritten. ** + +import * as protos from '../protos/protos'; +import * as assert from 'assert'; +import * as sinon from 'sinon'; +import {SinonStub} from 'sinon'; +import { describe, it } from 'mocha'; +import * as workflowtemplateserviceModule from '../src'; + +import {PassThrough} from 'stream'; + +import {protobuf, LROperation, operationsProtos} from 'google-gax'; + +function generateSampleMessage(instance: T) { + const filledObject = (instance.constructor as typeof protobuf.Message) + .toObject(instance as protobuf.Message, {defaults: true}); + return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; +} + +function stubSimpleCall(response?: ResponseType, error?: Error) { + return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); +} + +function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { + return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); +} + +function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); +} + +function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { + const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); + const mockOperation = { + promise: innerStub, + }; + return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); +} + +function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { + const pagingStub = sinon.stub(); + if (responses) { + for (let i = 0; i < responses.length; ++i) { + pagingStub.onCall(i).callsArgWith(2, null, responses[i]); + } + } + const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; + const mockStream = new PassThrough({ + objectMode: true, + transform: transformStub, + }); + // trigger as many responses as needed + if (responses) { + for (let i = 0; i < responses.length; ++i) { + setImmediate(() => { mockStream.write({}); }); + } + setImmediate(() => { mockStream.end(); }); + } else { + setImmediate(() => { mockStream.write({}); }); + setImmediate(() => { mockStream.end(); }); + } + return sinon.stub().returns(mockStream); +} + +function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { + let counter = 0; + const asyncIterable = { + [Symbol.asyncIterator]() { + return { + async next() { + if (error) { + return Promise.reject(error); + } + if (counter >= responses!.length) { + return Promise.resolve({done: true, value: undefined}); + } + return Promise.resolve({done: false, value: responses![counter++]}); + } + }; + } + }; + return sinon.stub().returns(asyncIterable); +} + +describe('v1.WorkflowTemplateServiceClient', () => { + it('has servicePath', () => { + const servicePath = workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient.servicePath; + assert(servicePath); + }); + + it('has apiEndpoint', () => { + const apiEndpoint = workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient.apiEndpoint; + assert(apiEndpoint); + }); + + it('has port', () => { + const port = workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient.port; + assert(port); + assert(typeof port === 'number'); + }); + + it('should create a client with no option', () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient(); + assert(client); + }); + + it('should create a client with gRPC fallback', () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + fallback: true, + }); + assert(client); + }); + + it('has initialize method and supports deferred initialization', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.workflowTemplateServiceStub, undefined); + await client.initialize(); + assert(client.workflowTemplateServiceStub); + }); + + it('has close method for the initialized client', done => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + assert(client.workflowTemplateServiceStub); + client.close().then(() => { + done(); + }); + }); + + it('has close method for the non-initialized client', done => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + assert.strictEqual(client.workflowTemplateServiceStub, undefined); + client.close().then(() => { + done(); + }); + }); + + it('has getProjectId method', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); + const result = await client.getProjectId(); + assert.strictEqual(result, fakeProjectId); + assert((client.auth.getProjectId as SinonStub).calledWithExactly()); + }); + + it('has getProjectId method with callback', async () => { + const fakeProjectId = 'fake-project-id'; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); + const promise = new Promise((resolve, reject) => { + client.getProjectId((err?: Error|null, projectId?: string|null) => { + if (err) { + reject(err); + } else { + resolve(projectId); + } + }); + }); + const result = await promise; + assert.strictEqual(result, fakeProjectId); + }); + + describe('createWorkflowTemplate', () => { + it('invokes createWorkflowTemplate without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); + client.innerApiCalls.createWorkflowTemplate = stubSimpleCall(expectedResponse); + const [response] = await client.createWorkflowTemplate(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createWorkflowTemplate without error using callback', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); + client.innerApiCalls.createWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.createWorkflowTemplate( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.createWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes createWorkflowTemplate with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.createWorkflowTemplate = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.createWorkflowTemplate(request), expectedError); + assert((client.innerApiCalls.createWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes createWorkflowTemplate with closed client', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); + request.parent = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.createWorkflowTemplate(request), expectedError); + }); + }); + + describe('getWorkflowTemplate', () => { + it('invokes getWorkflowTemplate without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); + client.innerApiCalls.getWorkflowTemplate = stubSimpleCall(expectedResponse); + const [response] = await client.getWorkflowTemplate(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getWorkflowTemplate without error using callback', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); + client.innerApiCalls.getWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.getWorkflowTemplate( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.getWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes getWorkflowTemplate with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.getWorkflowTemplate = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.getWorkflowTemplate(request), expectedError); + assert((client.innerApiCalls.getWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes getWorkflowTemplate with closed client', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.getWorkflowTemplate(request), expectedError); + }); + }); + + describe('updateWorkflowTemplate', () => { + it('invokes updateWorkflowTemplate without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); + request.template = {}; + request.template.name = ''; + const expectedHeaderRequestParams = "template.name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); + client.innerApiCalls.updateWorkflowTemplate = stubSimpleCall(expectedResponse); + const [response] = await client.updateWorkflowTemplate(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateWorkflowTemplate without error using callback', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); + request.template = {}; + request.template.name = ''; + const expectedHeaderRequestParams = "template.name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); + client.innerApiCalls.updateWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.updateWorkflowTemplate( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.updateWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes updateWorkflowTemplate with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); + request.template = {}; + request.template.name = ''; + const expectedHeaderRequestParams = "template.name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.updateWorkflowTemplate = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.updateWorkflowTemplate(request), expectedError); + assert((client.innerApiCalls.updateWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes updateWorkflowTemplate with closed client', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); + request.template = {}; + request.template.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.updateWorkflowTemplate(request), expectedError); + }); + }); + + describe('deleteWorkflowTemplate', () => { + it('invokes deleteWorkflowTemplate without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteWorkflowTemplate = stubSimpleCall(expectedResponse); + const [response] = await client.deleteWorkflowTemplate(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteWorkflowTemplate without error using callback', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); + client.innerApiCalls.deleteWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.deleteWorkflowTemplate( + request, + (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.deleteWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes deleteWorkflowTemplate with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.deleteWorkflowTemplate = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.deleteWorkflowTemplate(request), expectedError); + assert((client.innerApiCalls.deleteWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes deleteWorkflowTemplate with closed client', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); + request.name = ''; + const expectedError = new Error('The client has already been closed.'); + client.close(); + await assert.rejects(client.deleteWorkflowTemplate(request), expectedError); + }); + }); + + describe('instantiateWorkflowTemplate', () => { + it('invokes instantiateWorkflowTemplate without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCall(expectedResponse); + const [operation] = await client.instantiateWorkflowTemplate(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes instantiateWorkflowTemplate without error using callback', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.instantiateWorkflowTemplate( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes instantiateWorkflowTemplate with call error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.instantiateWorkflowTemplate(request), expectedError); + assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes instantiateWorkflowTemplate with LRO error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); + request.name = ''; + const expectedHeaderRequestParams = "name="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.instantiateWorkflowTemplate(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkInstantiateWorkflowTemplateProgress without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkInstantiateWorkflowTemplateProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkInstantiateWorkflowTemplateProgress with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkInstantiateWorkflowTemplateProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('instantiateInlineWorkflowTemplate', () => { + it('invokes instantiateInlineWorkflowTemplate without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCall(expectedResponse); + const [operation] = await client.instantiateInlineWorkflowTemplate(request); + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes instantiateInlineWorkflowTemplate without error using callback', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); + client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.instantiateInlineWorkflowTemplate( + request, + (err?: Error|null, + result?: LROperation|null + ) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const operation = await promise as LROperation; + const [response] = await operation.promise(); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes instantiateInlineWorkflowTemplate with call error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCall(undefined, expectedError); + await assert.rejects(client.instantiateInlineWorkflowTemplate(request), expectedError); + assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes instantiateInlineWorkflowTemplate with LRO error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCall(undefined, undefined, expectedError); + const [operation] = await client.instantiateInlineWorkflowTemplate(request); + await assert.rejects(operation.promise(), expectedError); + assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes checkInstantiateInlineWorkflowTemplateProgress without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); + expectedResponse.name = 'test'; + expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; + expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} + + client.operationsClient.getOperation = stubSimpleCall(expectedResponse); + const decodedOperation = await client.checkInstantiateInlineWorkflowTemplateProgress(expectedResponse.name); + assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); + assert(decodedOperation.metadata); + assert((client.operationsClient.getOperation as SinonStub).getCall(0)); + }); + + it('invokes checkInstantiateInlineWorkflowTemplateProgress with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const expectedError = new Error('expected'); + + client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.checkInstantiateInlineWorkflowTemplateProgress(''), expectedError); + assert((client.operationsClient.getOperation as SinonStub) + .getCall(0)); + }); + }); + + describe('listWorkflowTemplates', () => { + it('invokes listWorkflowTemplates without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + ]; + client.innerApiCalls.listWorkflowTemplates = stubSimpleCall(expectedResponse); + const [response] = await client.listWorkflowTemplates(request); + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listWorkflowTemplates as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listWorkflowTemplates without error using callback', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + ]; + client.innerApiCalls.listWorkflowTemplates = stubSimpleCallWithCallback(expectedResponse); + const promise = new Promise((resolve, reject) => { + client.listWorkflowTemplates( + request, + (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate[]|null) => { + if (err) { + reject(err); + } else { + resolve(result); + } + }); + }); + const response = await promise; + assert.deepStrictEqual(response, expectedResponse); + assert((client.innerApiCalls.listWorkflowTemplates as SinonStub) + .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); + }); + + it('invokes listWorkflowTemplates with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedOptions = { + otherArgs: { + headers: { + 'x-goog-request-params': expectedHeaderRequestParams, + }, + }, + }; + const expectedError = new Error('expected'); + client.innerApiCalls.listWorkflowTemplates = stubSimpleCall(undefined, expectedError); + await assert.rejects(client.listWorkflowTemplates(request), expectedError); + assert((client.innerApiCalls.listWorkflowTemplates as SinonStub) + .getCall(0).calledWith(request, expectedOptions, undefined)); + }); + + it('invokes listWorkflowTemplatesStream without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + ]; + client.descriptors.page.listWorkflowTemplates.createStream = stubPageStreamingCall(expectedResponse); + const stream = client.listWorkflowTemplatesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.WorkflowTemplate[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.WorkflowTemplate) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + const responses = await promise; + assert.deepStrictEqual(responses, expectedResponse); + assert((client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listWorkflowTemplates, request)); + assert.strictEqual( + (client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('invokes listWorkflowTemplatesStream with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedError = new Error('expected'); + client.descriptors.page.listWorkflowTemplates.createStream = stubPageStreamingCall(undefined, expectedError); + const stream = client.listWorkflowTemplatesStream(request); + const promise = new Promise((resolve, reject) => { + const responses: protos.google.cloud.dataproc.v1.WorkflowTemplate[] = []; + stream.on('data', (response: protos.google.cloud.dataproc.v1.WorkflowTemplate) => { + responses.push(response); + }); + stream.on('end', () => { + resolve(responses); + }); + stream.on('error', (err: Error) => { + reject(err); + }); + }); + await assert.rejects(promise, expectedError); + assert((client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) + .getCall(0).calledWith(client.innerApiCalls.listWorkflowTemplates, request)); + assert.strictEqual( + (client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listWorkflowTemplates without error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent="; + const expectedResponse = [ + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), + ]; + client.descriptors.page.listWorkflowTemplates.asyncIterate = stubAsyncIterationCall(expectedResponse); + const responses: protos.google.cloud.dataproc.v1.IWorkflowTemplate[] = []; + const iterable = client.listWorkflowTemplatesAsync(request); + for await (const resource of iterable) { + responses.push(resource!); + } + assert.deepStrictEqual(responses, expectedResponse); + assert.deepStrictEqual( + (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + + it('uses async iteration with listWorkflowTemplates with error', async () => { + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); + request.parent = ''; + const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); + client.descriptors.page.listWorkflowTemplates.asyncIterate = stubAsyncIterationCall(undefined, expectedError); + const iterable = client.listWorkflowTemplatesAsync(request); + await assert.rejects(async () => { + const responses: protos.google.cloud.dataproc.v1.IWorkflowTemplate[] = []; + for await (const resource of iterable) { + responses.push(resource!); + } + }); + assert.deepStrictEqual( + (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) + .getCall(0).args[1], request); + assert.strictEqual( + (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) + .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], + expectedHeaderRequestParams + ); + }); + }); + + describe('Path templates', () => { + + describe('batch', () => { + const fakePath = "/rendered/path/batch"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + batch: "batchValue", + }; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.batchPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.batchPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('batchPath', () => { + const result = client.batchPath("projectValue", "locationValue", "batchValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.batchPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromBatchName', () => { + const result = client.matchProjectFromBatchName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromBatchName', () => { + const result = client.matchLocationFromBatchName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchBatchFromBatchName', () => { + const result = client.matchBatchFromBatchName(fakePath); + assert.strictEqual(result, "batchValue"); + assert((client.pathTemplates.batchPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('project', () => { + const fakePath = "/rendered/path/project"; + const expectedParameters = { + project: "projectValue", + }; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectPath', () => { + const result = client.projectPath("projectValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectName', () => { + const result = client.matchProjectFromProjectName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationAutoscalingPolicyPath', () => { + const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectLocationWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + location: "locationValue", + workflow_template: "workflowTemplateValue", + }; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectLocationWorkflowTemplatePath', () => { + const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchLocationFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "locationValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionAutoscalingPolicy', () => { + const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + autoscaling_policy: "autoscalingPolicyValue", + }; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionAutoscalingPolicyPath', () => { + const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { + const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); + assert.strictEqual(result, "autoscalingPolicyValue"); + assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('projectRegionWorkflowTemplate', () => { + const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + workflow_template: "workflowTemplateValue", + }; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('projectRegionWorkflowTemplatePath', () => { + const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { + const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); + assert.strictEqual(result, "workflowTemplateValue"); + assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + + describe('region', () => { + const fakePath = "/rendered/path/region"; + const expectedParameters = { + project: "projectValue", + region: "regionValue", + }; + const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ + credentials: {client_email: 'bogus', private_key: 'bogus'}, + projectId: 'bogus', + }); + client.initialize(); + client.pathTemplates.regionPathTemplate.render = + sinon.stub().returns(fakePath); + client.pathTemplates.regionPathTemplate.match = + sinon.stub().returns(expectedParameters); + + it('regionPath', () => { + const result = client.regionPath("projectValue", "regionValue"); + assert.strictEqual(result, fakePath); + assert((client.pathTemplates.regionPathTemplate.render as SinonStub) + .getCall(-1).calledWith(expectedParameters)); + }); + + it('matchProjectFromRegionName', () => { + const result = client.matchProjectFromRegionName(fakePath); + assert.strictEqual(result, "projectValue"); + assert((client.pathTemplates.regionPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + + it('matchRegionFromRegionName', () => { + const result = client.matchRegionFromRegionName(fakePath); + assert.strictEqual(result, "regionValue"); + assert((client.pathTemplates.regionPathTemplate.match as SinonStub) + .getCall(-1).calledWith(fakePath)); + }); + }); + }); +}); diff --git a/owl-bot-staging/v1/tsconfig.json b/owl-bot-staging/v1/tsconfig.json new file mode 100644 index 00000000..c78f1c88 --- /dev/null +++ b/owl-bot-staging/v1/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "./node_modules/gts/tsconfig-google.json", + "compilerOptions": { + "rootDir": ".", + "outDir": "build", + "resolveJsonModule": true, + "lib": [ + "es2018", + "dom" + ] + }, + "include": [ + "src/*.ts", + "src/**/*.ts", + "test/*.ts", + "test/**/*.ts", + "system-test/*.ts" + ] +} diff --git a/owl-bot-staging/v1/webpack.config.js b/owl-bot-staging/v1/webpack.config.js new file mode 100644 index 00000000..050b272c --- /dev/null +++ b/owl-bot-staging/v1/webpack.config.js @@ -0,0 +1,64 @@ +// Copyright 2021 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// https://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const path = require('path'); + +module.exports = { + entry: './src/index.ts', + output: { + library: 'dataproc', + filename: './dataproc.js', + }, + node: { + child_process: 'empty', + fs: 'empty', + crypto: 'empty', + }, + resolve: { + alias: { + '../../../package.json': path.resolve(__dirname, 'package.json'), + }, + extensions: ['.js', '.json', '.ts'], + }, + module: { + rules: [ + { + test: /\.tsx?$/, + use: 'ts-loader', + exclude: /node_modules/ + }, + { + test: /node_modules[\\/]@grpc[\\/]grpc-js/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]grpc/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]retry-request/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]https?-proxy-agent/, + use: 'null-loader' + }, + { + test: /node_modules[\\/]gtoken/, + use: 'null-loader' + }, + ], + }, + mode: 'production', +}; From debf12b0a29d1483085e886c442e8ef7888cd756 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 24 May 2022 09:59:01 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- owl-bot-staging/v1/.eslintignore | 7 - owl-bot-staging/v1/.eslintrc.json | 3 - owl-bot-staging/v1/.gitignore | 14 - owl-bot-staging/v1/.jsdoc.js | 55 - owl-bot-staging/v1/.mocharc.js | 33 - owl-bot-staging/v1/.prettierrc.js | 22 - owl-bot-staging/v1/README.md | 1 - owl-bot-staging/v1/linkinator.config.json | 16 - owl-bot-staging/v1/package.json | 68 - .../dataproc/v1/autoscaling_policies.proto | 366 ---- .../google/cloud/dataproc/v1/batches.proto | 372 ---- .../google/cloud/dataproc/v1/clusters.proto | 1280 ------------ .../google/cloud/dataproc/v1/jobs.proto | 924 --------- .../google/cloud/dataproc/v1/operations.proto | 118 -- .../google/cloud/dataproc/v1/shared.proto | 341 ---- .../dataproc/v1/workflow_templates.proto | 807 -------- ...olicy_service.create_autoscaling_policy.js | 70 - ...olicy_service.delete_autoscaling_policy.js | 65 - ...g_policy_service.get_autoscaling_policy.js | 65 - ...olicy_service.list_autoscaling_policies.js | 77 - ...olicy_service.update_autoscaling_policy.js | 58 - .../v1/batch_controller.create_batch.js | 83 - .../v1/batch_controller.delete_batch.js | 58 - .../v1/batch_controller.get_batch.js | 58 - .../v1/batch_controller.list_batches.js | 71 - .../v1/cluster_controller.create_cluster.js | 86 - .../v1/cluster_controller.delete_cluster.js | 88 - .../v1/cluster_controller.diagnose_cluster.js | 70 - .../v1/cluster_controller.get_cluster.js | 69 - .../v1/cluster_controller.list_clusters.js | 92 - .../v1/cluster_controller.start_cluster.js | 88 - .../v1/cluster_controller.stop_cluster.js | 88 - .../v1/cluster_controller.update_cluster.js | 151 -- .../generated/v1/job_controller.cancel_job.js | 69 - .../generated/v1/job_controller.delete_job.js | 69 - .../generated/v1/job_controller.get_job.js | 69 - .../generated/v1/job_controller.list_jobs.js | 99 - .../generated/v1/job_controller.submit_job.js | 82 - .../job_controller.submit_job_as_operation.js | 83 - .../generated/v1/job_controller.update_job.js | 84 - ...pet_metadata.google.cloud.dataproc.v1.json | 1535 -------------- ...mplate_service.create_workflow_template.js | 70 - ...mplate_service.delete_workflow_template.js | 71 - ..._template_service.get_workflow_template.js | 71 - ...ce.instantiate_inline_workflow_template.js | 81 - ...e_service.instantiate_workflow_template.js | 89 - ...emplate_service.list_workflow_templates.js | 76 - ...mplate_service.update_workflow_template.js | 59 - owl-bot-staging/v1/src/index.ts | 33 - .../v1/autoscaling_policy_service_client.ts | 1163 ----------- ...oscaling_policy_service_client_config.json | 51 - ...autoscaling_policy_service_proto_list.json | 9 - .../v1/src/v1/batch_controller_client.ts | 1100 ----------- .../v1/batch_controller_client_config.json | 42 - .../src/v1/batch_controller_proto_list.json | 9 - .../v1/src/v1/cluster_controller_client.ts | 1755 ----------------- .../v1/cluster_controller_client_config.json | 72 - .../src/v1/cluster_controller_proto_list.json | 9 - owl-bot-staging/v1/src/v1/gapic_metadata.json | 409 ---- owl-bot-staging/v1/src/v1/index.ts | 23 - .../v1/src/v1/job_controller_client.ts | 1382 ------------- .../src/v1/job_controller_client_config.json | 69 - .../v1/src/v1/job_controller_proto_list.json | 9 - .../v1/workflow_template_service_client.ts | 1478 -------------- ...rkflow_template_service_client_config.json | 69 - .../workflow_template_service_proto_list.json | 9 - .../system-test/fixtures/sample/src/index.js | 31 - .../system-test/fixtures/sample/src/index.ts | 56 - owl-bot-staging/v1/system-test/install.ts | 49 - .../gapic_autoscaling_policy_service_v1.ts | 1098 ----------- .../v1/test/gapic_batch_controller_v1.ts | 1060 ---------- .../v1/test/gapic_cluster_controller_v1.ts | 1720 ---------------- .../v1/test/gapic_job_controller_v1.ts | 1330 ------------- .../gapic_workflow_template_service_v1.ts | 1402 ------------- owl-bot-staging/v1/tsconfig.json | 19 - owl-bot-staging/v1/webpack.config.js | 64 - src/v1/cluster_controller_client.ts | 16 + src/v1/job_controller_client.ts | 13 + test/gapic_cluster_controller_v1.ts | 127 +- test/gapic_job_controller_v1.ts | 99 +- 80 files changed, 195 insertions(+), 22951 deletions(-) delete mode 100644 owl-bot-staging/v1/.eslintignore delete mode 100644 owl-bot-staging/v1/.eslintrc.json delete mode 100644 owl-bot-staging/v1/.gitignore delete mode 100644 owl-bot-staging/v1/.jsdoc.js delete mode 100644 owl-bot-staging/v1/.mocharc.js delete mode 100644 owl-bot-staging/v1/.prettierrc.js delete mode 100644 owl-bot-staging/v1/README.md delete mode 100644 owl-bot-staging/v1/linkinator.config.json delete mode 100644 owl-bot-staging/v1/package.json delete mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto delete mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto delete mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto delete mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto delete mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto delete mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto delete mode 100644 owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto delete mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json delete mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js delete mode 100644 owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js delete mode 100644 owl-bot-staging/v1/src/index.ts delete mode 100644 owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts delete mode 100644 owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json delete mode 100644 owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json delete mode 100644 owl-bot-staging/v1/src/v1/batch_controller_client.ts delete mode 100644 owl-bot-staging/v1/src/v1/batch_controller_client_config.json delete mode 100644 owl-bot-staging/v1/src/v1/batch_controller_proto_list.json delete mode 100644 owl-bot-staging/v1/src/v1/cluster_controller_client.ts delete mode 100644 owl-bot-staging/v1/src/v1/cluster_controller_client_config.json delete mode 100644 owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json delete mode 100644 owl-bot-staging/v1/src/v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/src/v1/index.ts delete mode 100644 owl-bot-staging/v1/src/v1/job_controller_client.ts delete mode 100644 owl-bot-staging/v1/src/v1/job_controller_client_config.json delete mode 100644 owl-bot-staging/v1/src/v1/job_controller_proto_list.json delete mode 100644 owl-bot-staging/v1/src/v1/workflow_template_service_client.ts delete mode 100644 owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json delete mode 100644 owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json delete mode 100644 owl-bot-staging/v1/system-test/fixtures/sample/src/index.js delete mode 100644 owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts delete mode 100644 owl-bot-staging/v1/system-test/install.ts delete mode 100644 owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts delete mode 100644 owl-bot-staging/v1/test/gapic_batch_controller_v1.ts delete mode 100644 owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts delete mode 100644 owl-bot-staging/v1/test/gapic_job_controller_v1.ts delete mode 100644 owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts delete mode 100644 owl-bot-staging/v1/tsconfig.json delete mode 100644 owl-bot-staging/v1/webpack.config.js diff --git a/owl-bot-staging/v1/.eslintignore b/owl-bot-staging/v1/.eslintignore deleted file mode 100644 index cfc348ec..00000000 --- a/owl-bot-staging/v1/.eslintignore +++ /dev/null @@ -1,7 +0,0 @@ -**/node_modules -**/.coverage -build/ -docs/ -protos/ -system-test/ -samples/generated/ diff --git a/owl-bot-staging/v1/.eslintrc.json b/owl-bot-staging/v1/.eslintrc.json deleted file mode 100644 index 78215349..00000000 --- a/owl-bot-staging/v1/.eslintrc.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "extends": "./node_modules/gts" -} diff --git a/owl-bot-staging/v1/.gitignore b/owl-bot-staging/v1/.gitignore deleted file mode 100644 index 5d32b237..00000000 --- a/owl-bot-staging/v1/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -**/*.log -**/node_modules -.coverage -coverage -.nyc_output -docs/ -out/ -build/ -system-test/secrets.js -system-test/*key.json -*.lock -.DS_Store -package-lock.json -__pycache__ diff --git a/owl-bot-staging/v1/.jsdoc.js b/owl-bot-staging/v1/.jsdoc.js deleted file mode 100644 index 2fa0c393..00000000 --- a/owl-bot-staging/v1/.jsdoc.js +++ /dev/null @@ -1,55 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -'use strict'; - -module.exports = { - opts: { - readme: './README.md', - package: './package.json', - template: './node_modules/jsdoc-fresh', - recurse: true, - verbose: true, - destination: './docs/' - }, - plugins: [ - 'plugins/markdown', - 'jsdoc-region-tag' - ], - source: { - excludePattern: '(^|\\/|\\\\)[._]', - include: [ - 'build/src', - 'protos' - ], - includePattern: '\\.js$' - }, - templates: { - copyright: 'Copyright 2022 Google LLC', - includeDate: false, - sourceFiles: false, - systemName: '@google-cloud/dataproc', - theme: 'lumen', - default: { - outputSourceFiles: false - } - }, - markdown: { - idInHeadings: true - } -}; diff --git a/owl-bot-staging/v1/.mocharc.js b/owl-bot-staging/v1/.mocharc.js deleted file mode 100644 index 481c522b..00000000 --- a/owl-bot-staging/v1/.mocharc.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -const config = { - "enable-source-maps": true, - "throw-deprecation": true, - "timeout": 10000 -} -if (process.env.MOCHA_THROW_DEPRECATION === 'false') { - delete config['throw-deprecation']; -} -if (process.env.MOCHA_REPORTER) { - config.reporter = process.env.MOCHA_REPORTER; -} -if (process.env.MOCHA_REPORTER_OUTPUT) { - config['reporter-option'] = `output=${process.env.MOCHA_REPORTER_OUTPUT}`; -} -module.exports = config diff --git a/owl-bot-staging/v1/.prettierrc.js b/owl-bot-staging/v1/.prettierrc.js deleted file mode 100644 index 494e1478..00000000 --- a/owl-bot-staging/v1/.prettierrc.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -module.exports = { - ...require('gts/.prettierrc.json') -} diff --git a/owl-bot-staging/v1/README.md b/owl-bot-staging/v1/README.md deleted file mode 100644 index 3e8aa9df..00000000 --- a/owl-bot-staging/v1/README.md +++ /dev/null @@ -1 +0,0 @@ -Dataproc: Nodejs Client diff --git a/owl-bot-staging/v1/linkinator.config.json b/owl-bot-staging/v1/linkinator.config.json deleted file mode 100644 index befd23c8..00000000 --- a/owl-bot-staging/v1/linkinator.config.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "recurse": true, - "skip": [ - "https://codecov.io/gh/googleapis/", - "www.googleapis.com", - "img.shields.io", - "https://console.cloud.google.com/cloudshell", - "https://support.google.com" - ], - "silent": true, - "concurrency": 5, - "retry": true, - "retryErrors": true, - "retryErrorsCount": 5, - "retryErrorsJitter": 3000 -} diff --git a/owl-bot-staging/v1/package.json b/owl-bot-staging/v1/package.json deleted file mode 100644 index 5ab1b932..00000000 --- a/owl-bot-staging/v1/package.json +++ /dev/null @@ -1,68 +0,0 @@ -{ - "name": "@google-cloud/dataproc", - "version": "0.1.0", - "description": "Dataproc client for Node.js", - "repository": "googleapis/nodejs-dataproc", - "license": "Apache-2.0", - "author": "Google LLC", - "main": "build/src/index.js", - "files": [ - "build/src", - "build/protos" - ], - "keywords": [ - "google apis client", - "google api client", - "google apis", - "google api", - "google", - "google cloud platform", - "google cloud", - "cloud", - "google dataproc", - "dataproc", - "autoscaling policy service", - "batch controller", - "cluster controller", - "job controller", - "workflow template service" - ], - "scripts": { - "clean": "gts clean", - "compile": "tsc -p . && cp -r protos build/", - "compile-protos": "compileProtos src", - "docs": "jsdoc -c .jsdoc.js", - "predocs-test": "npm run docs", - "docs-test": "linkinator docs", - "fix": "gts fix", - "lint": "gts check", - "prepare": "npm run compile-protos && npm run compile", - "system-test": "c8 mocha build/system-test", - "test": "c8 mocha build/test" - }, - "dependencies": { - "google-gax": "^2.29.4" - }, - "devDependencies": { - "@types/mocha": "^9.1.0", - "@types/node": "^16.0.0", - "@types/sinon": "^10.0.8", - "c8": "^7.11.0", - "gts": "^3.1.0", - "jsdoc": "^3.6.7", - "jsdoc-fresh": "^1.1.1", - "jsdoc-region-tag": "^1.3.1", - "linkinator": "^3.0.0", - "mocha": "^9.1.4", - "null-loader": "^4.0.1", - "pack-n-play": "^1.0.0-2", - "sinon": "^13.0.0", - "ts-loader": "^9.2.6", - "typescript": "^4.5.5", - "webpack": "^5.67.0", - "webpack-cli": "^4.9.1" - }, - "engines": { - "node": ">=v10.24.0" - } -} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto deleted file mode 100644 index 18b2f7df..00000000 --- a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/autoscaling_policies.proto +++ /dev/null @@ -1,366 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "AutoscalingPoliciesProto"; -option java_package = "com.google.cloud.dataproc.v1"; -option (google.api.resource_definition) = { - type: "dataproc.googleapis.com/Region" - pattern: "projects/{project}/regions/{region}" -}; - -// The API interface for managing autoscaling policies in the -// Dataproc API. -service AutoscalingPolicyService { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates new autoscaling policy. - rpc CreateAutoscalingPolicy(CreateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" - body: "policy" - additional_bindings { - post: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" - body: "policy" - } - }; - option (google.api.method_signature) = "parent,policy"; - } - - // Updates (replaces) autoscaling policy. - // - // Disabled check for update_mask, because all updates will be full - // replacements. - rpc UpdateAutoscalingPolicy(UpdateAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - put: "/v1/{policy.name=projects/*/locations/*/autoscalingPolicies/*}" - body: "policy" - additional_bindings { - put: "/v1/{policy.name=projects/*/regions/*/autoscalingPolicies/*}" - body: "policy" - } - }; - option (google.api.method_signature) = "policy"; - } - - // Retrieves autoscaling policy. - rpc GetAutoscalingPolicy(GetAutoscalingPolicyRequest) returns (AutoscalingPolicy) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" - additional_bindings { - get: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Lists autoscaling policies in the project. - rpc ListAutoscalingPolicies(ListAutoscalingPoliciesRequest) returns (ListAutoscalingPoliciesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/autoscalingPolicies" - additional_bindings { - get: "/v1/{parent=projects/*/regions/*}/autoscalingPolicies" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes an autoscaling policy. It is an error to delete an autoscaling - // policy that is in use by one or more clusters. - rpc DeleteAutoscalingPolicy(DeleteAutoscalingPolicyRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/autoscalingPolicies/*}" - additional_bindings { - delete: "/v1/{name=projects/*/regions/*/autoscalingPolicies/*}" - } - }; - option (google.api.method_signature) = "name"; - } -} - -// Describes an autoscaling policy for Dataproc cluster autoscaler. -message AutoscalingPolicy { - option (google.api.resource) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - pattern: "projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}" - pattern: "projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}" - }; - - // Required. The policy id. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - // - string id = 1; - - // Output only. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies`, the resource name of the - // policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies`, the resource name of the - // policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Autoscaling algorithm for policy. - oneof algorithm { - BasicAutoscalingAlgorithm basic_algorithm = 3 [(google.api.field_behavior) = REQUIRED]; - } - - // Required. Describes how the autoscaler will operate for primary workers. - InstanceGroupAutoscalingPolicyConfig worker_config = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Describes how the autoscaler will operate for secondary workers. - InstanceGroupAutoscalingPolicyConfig secondary_worker_config = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The labels to associate with this autoscaling policy. - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to [RFC - // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be - // associated with an autoscaling policy. - map labels = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// Basic algorithm for autoscaling. -message BasicAutoscalingAlgorithm { - oneof config { - // Required. YARN autoscaling configuration. - BasicYarnAutoscalingConfig yarn_config = 1 [(google.api.field_behavior) = REQUIRED]; - } - - // Optional. Duration between scaling events. A scaling period starts after - // the update operation from the previous event has completed. - // - // Bounds: [2m, 1d]. Default: 2m. - google.protobuf.Duration cooldown_period = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Basic autoscaling configurations for YARN. -message BasicYarnAutoscalingConfig { - // Required. Timeout for YARN graceful decommissioning of Node Managers. - // Specifies the duration to wait for jobs to complete before forcefully - // removing workers (and potentially interrupting jobs). Only applicable to - // downscaling operations. - // - // Bounds: [0s, 1d]. - google.protobuf.Duration graceful_decommission_timeout = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. Fraction of average YARN pending memory in the last cooldown period - // for which to add workers. A scale-up factor of 1.0 will result in scaling - // up so that there is no pending memory remaining after the update (more - // aggressive scaling). A scale-up factor closer to 0 will result in a smaller - // magnitude of scaling up (less aggressive scaling). - // See [How autoscaling - // works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works) - // for more information. - // - // Bounds: [0.0, 1.0]. - double scale_up_factor = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Fraction of average YARN pending memory in the last cooldown period - // for which to remove workers. A scale-down factor of 1 will result in - // scaling down so that there is no available memory remaining after the - // update (more aggressive scaling). A scale-down factor of 0 disables - // removing workers, which can be beneficial for autoscaling a single job. - // See [How autoscaling - // works](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/autoscaling#how_autoscaling_works) - // for more information. - // - // Bounds: [0.0, 1.0]. - double scale_down_factor = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Minimum scale-up threshold as a fraction of total cluster size - // before scaling occurs. For example, in a 20-worker cluster, a threshold of - // 0.1 means the autoscaler must recommend at least a 2-worker scale-up for - // the cluster to scale. A threshold of 0 means the autoscaler will scale up - // on any recommended change. - // - // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_up_min_worker_fraction = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Minimum scale-down threshold as a fraction of total cluster size - // before scaling occurs. For example, in a 20-worker cluster, a threshold of - // 0.1 means the autoscaler must recommend at least a 2 worker scale-down for - // the cluster to scale. A threshold of 0 means the autoscaler will scale down - // on any recommended change. - // - // Bounds: [0.0, 1.0]. Default: 0.0. - double scale_down_min_worker_fraction = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Configuration for the size bounds of an instance group, including its -// proportional size to other groups. -message InstanceGroupAutoscalingPolicyConfig { - // Optional. Minimum number of instances for this group. - // - // Primary workers - Bounds: [2, max_instances]. Default: 2. - // Secondary workers - Bounds: [0, max_instances]. Default: 0. - int32 min_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Maximum number of instances for this group. Required for primary - // workers. Note that by default, clusters will not use secondary workers. - // Required for secondary workers if the minimum secondary instances is set. - // - // Primary workers - Bounds: [min_instances, ). - // Secondary workers - Bounds: [min_instances, ). Default: 0. - int32 max_instances = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Weight for the instance group, which is used to determine the - // fraction of total workers in the cluster from this instance group. - // For example, if primary workers have weight 2, and secondary workers have - // weight 1, the cluster will have approximately 2 primary workers for each - // secondary worker. - // - // The cluster may not reach the specified balance if constrained - // by min/max bounds or other autoscaling settings. For example, if - // `max_instances` for secondary workers is 0, then only primary workers will - // be added. The cluster can also be out of balance when created. - // - // If weight is not set on any instance group, the cluster will default to - // equal weight for all groups: the cluster will attempt to maintain an equal - // number of workers in each group within the configured size bounds for each - // group. If weight is set for one group only, the cluster will default to - // zero weight on the unset group. For example if weight is set only on - // primary workers, the cluster will use primary workers only and no - // secondary workers. - int32 weight = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to create an autoscaling policy. -message CreateAutoscalingPolicyRequest { - // Required. The "resource name" of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.create`, the resource name - // of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.autoscalingPolicies.create`, the resource name - // of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; - - // Required. The autoscaling policy to create. - AutoscalingPolicy policy = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to fetch an autoscaling policy. -message GetAutoscalingPolicyRequest { - // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.get`, the resource name - // of the policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies.get`, the resource name - // of the policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; -} - -// A request to update an autoscaling policy. -message UpdateAutoscalingPolicyRequest { - // Required. The updated autoscaling policy. - AutoscalingPolicy policy = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to delete an autoscaling policy. -// -// Autoscaling policies in use by one or more clusters will not be deleted. -message DeleteAutoscalingPolicyRequest { - // Required. The "resource name" of the autoscaling policy, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.delete`, the resource name - // of the policy has the following format: - // `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - // - // * For `projects.locations.autoscalingPolicies.delete`, the resource name - // of the policy has the following format: - // `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; -} - -// A request to list autoscaling policies in a project. -message ListAutoscalingPoliciesRequest { - // Required. The "resource name" of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.autoscalingPolicies.list`, the resource name - // of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.autoscalingPolicies.list`, the resource name - // of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/AutoscalingPolicy" - } - ]; - - // Optional. The maximum number of results to return in each response. - // Must be less than or equal to 1000. Defaults to 100. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A response to a request to list autoscaling policies in a project. -message ListAutoscalingPoliciesResponse { - // Output only. Autoscaling policies list. - repeated AutoscalingPolicy policies = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto deleted file mode 100644 index eafb4e35..00000000 --- a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/batches.proto +++ /dev/null @@ -1,372 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/dataproc/v1/shared.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "BatchesProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// The BatchController provides methods to manage batch workloads. -service BatchController { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a batch workload that executes asynchronously. - rpc CreateBatch(CreateBatchRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/batches" - body: "batch" - }; - option (google.api.method_signature) = "parent,batch,batch_id"; - option (google.longrunning.operation_info) = { - response_type: "Batch" - metadata_type: "google.cloud.dataproc.v1.BatchOperationMetadata" - }; - } - - // Gets the batch workload resource representation. - rpc GetBatch(GetBatchRequest) returns (Batch) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/batches/*}" - }; - option (google.api.method_signature) = "name"; - } - - // Lists batch workloads. - rpc ListBatches(ListBatchesRequest) returns (ListBatchesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/batches" - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes the batch workload resource. If the batch is not in terminal state, - // the delete fails and the response returns `FAILED_PRECONDITION`. - rpc DeleteBatch(DeleteBatchRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/batches/*}" - }; - option (google.api.method_signature) = "name"; - } -} - -// A request to create a batch workload. -message CreateBatchRequest { - // Required. The parent resource where this batch will be created. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/Batch" - } - ]; - - // Required. The batch to create. - Batch batch = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The ID to use for the batch, which will become the final component of - // the batch's resource name. - // - // This value must be 4-63 characters. Valid characters are `/[a-z][0-9]-/`. - string batch_id = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique ID used to identify the request. If the service - // receives two - // [CreateBatchRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s - // with the same request_id, the second request is ignored and the - // Operation that corresponds to the first Batch created and stored - // in the backend is returned. - // - // Recommendation: Set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The value must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to get the resource representation for a batch workload. -message GetBatchRequest { - // Required. The name of the batch to retrieve. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/Batch" - } - ]; -} - -// A request to list batch workloads in a project. -message ListBatchesRequest { - // Required. The parent, which owns this collection of batches. - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/Batch" - } - ]; - - // Optional. The maximum number of batches to return in each response. - // The service may return fewer than this value. - // The default page size is 20; the maximum page size is 1000. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A page token received from a previous `ListBatches` call. - // Provide this token to retrieve the subsequent page. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A list of batch workloads. -message ListBatchesResponse { - // The batches from the specified collection. - repeated Batch batches = 1; - - // A token, which can be sent as `page_token` to retrieve the next page. - // If this field is omitted, there are no subsequent pages. - string next_page_token = 2; -} - -// A request to delete a batch workload. -message DeleteBatchRequest { - // Required. The name of the batch resource to delete. - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/Batch" - } - ]; -} - -// A representation of a batch workload in the service. -message Batch { - option (google.api.resource) = { - type: "dataproc.googleapis.com/Batch" - pattern: "projects/{project}/locations/{location}/batches/{batch}" - }; - - // Historical state information. - message StateHistory { - // Output only. The state of the batch at this point in history. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Details about the state at this point in history. - string state_message = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time when the batch entered the historical state. - google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - } - - // The batch state. - enum State { - // The batch state is unknown. - STATE_UNSPECIFIED = 0; - - // The batch is created before running. - PENDING = 1; - - // The batch is running. - RUNNING = 2; - - // The batch is cancelling. - CANCELLING = 3; - - // The batch cancellation was successful. - CANCELLED = 4; - - // The batch completed successfully. - SUCCEEDED = 5; - - // The batch is no longer running due to an error. - FAILED = 6; - } - - // Output only. The resource name of the batch. - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A batch UUID (Unique Universal Identifier). The service - // generates this value when it creates the batch. - string uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time when the batch was created. - google.protobuf.Timestamp create_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // The application/framework-specific portion of the batch configuration. - oneof batch_config { - // Optional. PySpark batch config. - PySparkBatch pyspark_batch = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Spark batch config. - SparkBatch spark_batch = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. SparkR batch config. - SparkRBatch spark_r_batch = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. SparkSql batch config. - SparkSqlBatch spark_sql_batch = 7 [(google.api.field_behavior) = OPTIONAL]; - } - - // Output only. Runtime information about batch execution. - RuntimeInfo runtime_info = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The state of the batch. - State state = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Batch state details, such as a failure - // description if the state is `FAILED`. - string state_message = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time when the batch entered a current state. - google.protobuf.Timestamp state_time = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The email address of the user who created the batch. - string creator = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The labels to associate with this batch. - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to [RFC - // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be - // associated with a batch. - map labels = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Runtime configuration for the batch execution. - RuntimeConfig runtime_config = 14 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Environment configuration for the batch execution. - EnvironmentConfig environment_config = 15 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The resource name of the operation associated with this batch. - string operation = 16 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Historical state information for the batch. - repeated StateHistory state_history = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A configuration for running an -// [Apache -// PySpark](https://spark.apache.org/docs/latest/api/python/getting_started/quickstart.html) -// batch workload. -message PySparkBatch { - // Required. The HCFS URI of the main Python file to use as the Spark driver. Must - // be a .py file. - string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The arguments to pass to the driver. Do not include arguments - // that can be set as batch properties, such as `--conf`, since a collision - // can occur that causes an incorrect batch submission. - repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS file URIs of Python files to pass to the PySpark - // framework. Supported file types: `.py`, `.egg`, and `.zip`. - repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the classpath of the - // Spark driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be placed in the working directory of - // each executor. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted into the working directory - // of each executor. Supported file types: - // `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A configuration for running an [Apache Spark](http://spark.apache.org/) -// batch workload. -message SparkBatch { - // The specification of the main method to call to drive the Spark - // workload. Specify either the jar file that contains the main class or the - // main class name. To pass both a main jar and a main class in that jar, add - // the jar to `jar_file_uris`, and then specify the main class - // name in `main_class`. - oneof driver { - // Optional. The HCFS URI of the jar file that contains the main class. - string main_jar_file_uri = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The name of the driver main class. The jar file that contains the class - // must be in the classpath or specified in `jar_file_uris`. - string main_class = 2 [(google.api.field_behavior) = OPTIONAL]; - } - - // Optional. The arguments to pass to the driver. Do not include arguments - // that can be set as batch properties, such as `--conf`, since a collision - // can occur that causes an incorrect batch submission. - repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the classpath of the - // Spark driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be placed in the working directory of - // each executor. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted into the working directory - // of each executor. Supported file types: - // `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A configuration for running an -// [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html) -// batch workload. -message SparkRBatch { - // Required. The HCFS URI of the main R file to use as the driver. - // Must be a `.R` or `.r` file. - string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The arguments to pass to the Spark driver. Do not include arguments - // that can be set as batch properties, such as `--conf`, since a collision - // can occur that causes an incorrect batch submission. - repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be placed in the working directory of - // each executor. - repeated string file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted into the working directory - // of each executor. Supported file types: - // `.jar`, `.tar`, `.tar.gz`, `.tgz`, and `.zip`. - repeated string archive_uris = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A configuration for running -// [Apache Spark SQL](http://spark.apache.org/sql/) queries as a batch workload. -message SparkSqlBatch { - // Required. The HCFS URI of the script that contains Spark SQL queries to execute. - string query_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Mapping of query variable names to values (equivalent to the - // Spark SQL command: `SET name="value";`). - map query_variables = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - repeated string jar_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto deleted file mode 100644 index 2b650c13..00000000 --- a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/clusters.proto +++ /dev/null @@ -1,1280 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/dataproc/v1/shared.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "ClustersProto"; -option java_package = "com.google.cloud.dataproc.v1"; -option (google.api.resource_definition) = { - type: "container.googleapis.com/Cluster" - pattern: "projects/{project}/locations/{location}/clusters/{cluster}" -}; -option (google.api.resource_definition) = { - type: "metastore.googleapis.com/Service" - pattern: "projects/{project}/locations/{location}/services/{service}" -}; - -// The ClusterControllerService provides methods to manage clusters -// of Compute Engine instances. -service ClusterController { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates a cluster in a project. The returned - // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - rpc CreateCluster(CreateClusterRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/clusters" - body: "cluster" - }; - option (google.api.method_signature) = "project_id,region,cluster"; - option (google.longrunning.operation_info) = { - response_type: "Cluster" - metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" - }; - } - - // Updates a cluster in a project. The returned - // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - // The cluster must be in a [`RUNNING`][google.cloud.dataproc.v1.ClusterStatus.State] state or an error - // is returned. - rpc UpdateCluster(UpdateClusterRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - patch: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" - body: "cluster" - }; - option (google.api.method_signature) = "project_id,region,cluster_name,cluster,update_mask"; - option (google.longrunning.operation_info) = { - response_type: "Cluster" - metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" - }; - } - - // Stops a cluster in a project. - rpc StopCluster(StopClusterRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:stop" - body: "*" - }; - option (google.longrunning.operation_info) = { - response_type: "Cluster" - metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" - }; - } - - // Starts a cluster in a project. - rpc StartCluster(StartClusterRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:start" - body: "*" - }; - option (google.longrunning.operation_info) = { - response_type: "Cluster" - metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" - }; - } - - // Deletes a cluster in a project. The returned - // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - rpc DeleteCluster(DeleteClusterRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - delete: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" - }; - option (google.api.method_signature) = "project_id,region,cluster_name"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" - }; - } - - // Gets the resource representation for a cluster in a project. - rpc GetCluster(GetClusterRequest) returns (Cluster) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}" - }; - option (google.api.method_signature) = "project_id,region,cluster_name"; - } - - // Lists all regions/{region}/clusters in a project alphabetically. - rpc ListClusters(ListClustersRequest) returns (ListClustersResponse) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/regions/{region}/clusters" - }; - option (google.api.method_signature) = "project_id,region"; - option (google.api.method_signature) = "project_id,region,filter"; - } - - // Gets cluster diagnostic information. The returned - // [Operation.metadata][google.longrunning.Operation.metadata] will be - // [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - // After the operation completes, - // [Operation.response][google.longrunning.Operation.response] - // contains - // [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). - rpc DiagnoseCluster(DiagnoseClusterRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/clusters/{cluster_name}:diagnose" - body: "*" - }; - option (google.api.method_signature) = "project_id,region,cluster_name"; - option (google.longrunning.operation_info) = { - response_type: "DiagnoseClusterResults" - metadata_type: "google.cloud.dataproc.v1.ClusterOperationMetadata" - }; - } -} - -// Describes the identifying information, config, and status of -// a Dataproc cluster -message Cluster { - // Required. The Google Cloud Platform project ID that the cluster belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. Cluster names within a project must be - // unique. Names of deleted clusters can be reused. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The cluster config for a cluster of Compute Engine Instances. - // Note that Dataproc may set default values, and values may change - // when clusters are updated. - ClusterConfig config = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The virtual cluster config, used when creating a Dataproc cluster that - // does not directly control the underlying compute resources, for example, - // when creating a [Dataproc-on-GKE - // cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster). - // Note that Dataproc may set default values, and values may change when - // clusters are updated. Exactly one of config or virtualClusterConfig must be - // specified. - VirtualClusterConfig virtual_cluster_config = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The labels to associate with this cluster. - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to [RFC - // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be - // associated with a cluster. - map labels = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. Cluster status. - ClusterStatus status = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The previous cluster status. - repeated ClusterStatus status_history = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A cluster UUID (Unique Universal Identifier). Dataproc - // generates this value when it creates the cluster. - string cluster_uuid = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Contains cluster daemon metrics such as HDFS and YARN stats. - // - // **Beta Feature**: This report is available for testing purposes only. It - // may be changed before final release. - ClusterMetrics metrics = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The cluster config. -message ClusterConfig { - // Optional. A Cloud Storage bucket used to stage job - // dependencies, config files, and job driver console output. - // If you do not specify a staging bucket, Cloud - // Dataproc will determine a Cloud Storage location (US, - // ASIA, or EU) for your cluster's staging bucket according to the - // Compute Engine zone where your cluster is deployed, and then create - // and manage this project-level, per-location bucket (see - // [Dataproc staging and temp - // buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - // **This field requires a Cloud Storage bucket name, not a `gs://...` URI to - // a Cloud Storage bucket.** - string config_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A Cloud Storage bucket used to store ephemeral cluster and jobs data, - // such as Spark and MapReduce history files. - // If you do not specify a temp bucket, - // Dataproc will determine a Cloud Storage location (US, - // ASIA, or EU) for your cluster's temp bucket according to the - // Compute Engine zone where your cluster is deployed, and then create - // and manage this project-level, per-location bucket. The default bucket has - // a TTL of 90 days, but you can use any TTL (or none) if you specify a - // bucket (see - // [Dataproc staging and temp - // buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - // **This field requires a Cloud Storage bucket name, not a `gs://...` URI to - // a Cloud Storage bucket.** - string temp_bucket = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The shared Compute Engine config settings for - // all instances in a cluster. - GceClusterConfig gce_cluster_config = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // the cluster's master instance. - InstanceGroupConfig master_config = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // the cluster's worker instances. - InstanceGroupConfig worker_config = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine config settings for - // a cluster's secondary worker instances - InstanceGroupConfig secondary_worker_config = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The config settings for cluster software. - SoftwareConfig software_config = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Commands to execute on each node after config is - // completed. By default, executables are run on master and all worker nodes. - // You can test a node's `role` metadata to run an executable on - // a master or worker node, as shown below using `curl` (you can also use - // `wget`): - // - // ROLE=$(curl -H Metadata-Flavor:Google - // http://metadata/computeMetadata/v1/instance/attributes/dataproc-role) - // if [[ "${ROLE}" == 'Master' ]]; then - // ... master specific actions ... - // else - // ... worker specific actions ... - // fi - repeated NodeInitializationAction initialization_actions = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Encryption settings for the cluster. - EncryptionConfig encryption_config = 15 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Autoscaling config for the policy associated with the cluster. - // Cluster does not autoscale if this field is unset. - AutoscalingConfig autoscaling_config = 18 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Security settings for the cluster. - SecurityConfig security_config = 16 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Lifecycle setting for the cluster. - LifecycleConfig lifecycle_config = 17 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Port/endpoint configuration for this cluster - EndpointConfig endpoint_config = 19 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Metastore configuration. - MetastoreConfig metastore_config = 20 [(google.api.field_behavior) = OPTIONAL]; -} - -// Dataproc cluster config for a cluster that does not directly control the -// underlying compute resources, such as a [Dataproc-on-GKE -// cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster). -message VirtualClusterConfig { - // Optional. A Storage bucket used to stage job - // dependencies, config files, and job driver console output. - // If you do not specify a staging bucket, Cloud - // Dataproc will determine a Cloud Storage location (US, - // ASIA, or EU) for your cluster's staging bucket according to the - // Compute Engine zone where your cluster is deployed, and then create - // and manage this project-level, per-location bucket (see - // [Dataproc staging and temp - // buckets](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/staging-bucket)). - // **This field requires a Cloud Storage bucket name, not a `gs://...` URI to - // a Cloud Storage bucket.** - string staging_bucket = 1 [(google.api.field_behavior) = OPTIONAL]; - - oneof infrastructure_config { - // Required. The configuration for running the Dataproc cluster on Kubernetes. - KubernetesClusterConfig kubernetes_cluster_config = 6 [(google.api.field_behavior) = REQUIRED]; - } - - // Optional. Configuration of auxiliary services used by this cluster. - AuxiliaryServicesConfig auxiliary_services_config = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// Auxiliary services configuration for a Cluster. -message AuxiliaryServicesConfig { - // Optional. The Hive Metastore configuration for this workload. - MetastoreConfig metastore_config = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Spark History Server configuration for the workload. - SparkHistoryServerConfig spark_history_server_config = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Endpoint config for this cluster -message EndpointConfig { - // Output only. The map of port descriptions to URLs. Will only be populated - // if enable_http_port_access is true. - map http_ports = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. If true, enable http access to specific ports on the cluster - // from external sources. Defaults to false. - bool enable_http_port_access = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Autoscaling Policy config associated with the cluster. -message AutoscalingConfig { - // Optional. The autoscaling policy used by the cluster. - // - // Only resource names including projectid and location (region) are valid. - // Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` - // * `projects/[project_id]/locations/[dataproc_region]/autoscalingPolicies/[policy_id]` - // - // Note that the policy must be in the same project and Dataproc region. - string policy_uri = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// Encryption settings for the cluster. -message EncryptionConfig { - // Optional. The Cloud KMS key name to use for PD disk encryption for all - // instances in the cluster. - string gce_pd_kms_key_name = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// Common config settings for resources of Compute Engine cluster -// instances, applicable to all instances in the cluster. -message GceClusterConfig { - // `PrivateIpv6GoogleAccess` controls whether and how Dataproc cluster nodes - // can communicate with Google Services through gRPC over IPv6. - // These values are directly mapped to corresponding values in the - // [Compute Engine Instance - // fields](https://cloud.google.com/compute/docs/reference/rest/v1/instances). - enum PrivateIpv6GoogleAccess { - // If unspecified, Compute Engine default behavior will apply, which - // is the same as [INHERIT_FROM_SUBNETWORK][google.cloud.dataproc.v1.GceClusterConfig.PrivateIpv6GoogleAccess.INHERIT_FROM_SUBNETWORK]. - PRIVATE_IPV6_GOOGLE_ACCESS_UNSPECIFIED = 0; - - // Private access to and from Google Services configuration - // inherited from the subnetwork configuration. This is the - // default Compute Engine behavior. - INHERIT_FROM_SUBNETWORK = 1; - - // Enables outbound private IPv6 access to Google Services from the Dataproc - // cluster. - OUTBOUND = 2; - - // Enables bidirectional private IPv6 access between Google Services and the - // Dataproc cluster. - BIDIRECTIONAL = 3; - } - - // Optional. The zone where the Compute Engine cluster will be located. - // On a create request, it is required in the "global" region. If omitted - // in a non-global Dataproc region, the service will pick a zone in the - // corresponding Compute Engine region. On a get request, zone will - // always be present. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/[zone]` - // * `projects/[project_id]/zones/[zone]` - // * `us-central1-f` - string zone_uri = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine network to be used for machine - // communications. Cannot be specified with subnetwork_uri. If neither - // `network_uri` nor `subnetwork_uri` is specified, the "default" network of - // the project is used, if it exists. Cannot be a "Custom Subnet Network" (see - // [Using Subnetworks](https://cloud.google.com/compute/docs/subnetworks) for - // more information). - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/global/default` - // * `projects/[project_id]/regions/global/default` - // * `default` - string network_uri = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine subnetwork to be used for machine - // communications. Cannot be specified with network_uri. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/regions/us-east1/subnetworks/sub0` - // * `projects/[project_id]/regions/us-east1/subnetworks/sub0` - // * `sub0` - string subnetwork_uri = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If true, all instances in the cluster will only have internal IP - // addresses. By default, clusters are not restricted to internal IP - // addresses, and will have ephemeral external IP addresses assigned to each - // instance. This `internal_ip_only` restriction can only be enabled for - // subnetwork enabled networks, and all off-cluster dependencies must be - // configured to be accessible without external IP addresses. - bool internal_ip_only = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The type of IPv6 access for a cluster. - PrivateIpv6GoogleAccess private_ipv6_google_access = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The [Dataproc service - // account](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/service-accounts#service_accounts_in_dataproc) - // (also see [VM Data Plane - // identity](https://cloud.google.com/dataproc/docs/concepts/iam/dataproc-principals#vm_service_account_data_plane_identity)) - // used by Dataproc cluster VM instances to access Google Cloud Platform - // services. - // - // If not specified, the - // [Compute Engine default service - // account](https://cloud.google.com/compute/docs/access/service-accounts#default_service_account) - // is used. - string service_account = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The URIs of service account scopes to be included in - // Compute Engine instances. The following base set of scopes is always - // included: - // - // * https://www.googleapis.com/auth/cloud.useraccounts.readonly - // * https://www.googleapis.com/auth/devstorage.read_write - // * https://www.googleapis.com/auth/logging.write - // - // If no scopes are specified, the following defaults are also provided: - // - // * https://www.googleapis.com/auth/bigquery - // * https://www.googleapis.com/auth/bigtable.admin.table - // * https://www.googleapis.com/auth/bigtable.data - // * https://www.googleapis.com/auth/devstorage.full_control - repeated string service_account_scopes = 3 [(google.api.field_behavior) = OPTIONAL]; - - // The Compute Engine tags to add to all instances (see [Tagging - // instances](https://cloud.google.com/compute/docs/label-or-tag-resources#tags)). - repeated string tags = 4; - - // The Compute Engine metadata entries to add to all instances (see - // [Project and instance - // metadata](https://cloud.google.com/compute/docs/storing-retrieving-metadata#project_and_instance_metadata)). - map metadata = 5; - - // Optional. Reservation Affinity for consuming Zonal reservation. - ReservationAffinity reservation_affinity = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Node Group Affinity for sole-tenant clusters. - NodeGroupAffinity node_group_affinity = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Shielded Instance Config for clusters using [Compute Engine Shielded - // VMs](https://cloud.google.com/security/shielded-cloud/shielded-vm). - ShieldedInstanceConfig shielded_instance_config = 14 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Confidential Instance Config for clusters using [Confidential - // VMs](https://cloud.google.com/compute/confidential-vm/docs). - ConfidentialInstanceConfig confidential_instance_config = 15 [(google.api.field_behavior) = OPTIONAL]; -} - -// Node Group Affinity for clusters using sole-tenant node groups. -message NodeGroupAffinity { - // Required. The URI of a - // sole-tenant [node group - // resource](https://cloud.google.com/compute/docs/reference/rest/v1/nodeGroups) - // that the cluster will be created on. - // - // A full URL, partial URI, or node group name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` - // * `projects/[project_id]/zones/us-central1-a/nodeGroups/node-group-1` - // * `node-group-1` - string node_group_uri = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// Shielded Instance Config for clusters using [Compute Engine Shielded -// VMs](https://cloud.google.com/security/shielded-cloud/shielded-vm). -message ShieldedInstanceConfig { - // Optional. Defines whether instances have Secure Boot enabled. - bool enable_secure_boot = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Defines whether instances have the vTPM enabled. - bool enable_vtpm = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Defines whether instances have integrity monitoring enabled. - bool enable_integrity_monitoring = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Confidential Instance Config for clusters using [Confidential -// VMs](https://cloud.google.com/compute/confidential-vm/docs) -message ConfidentialInstanceConfig { - // Optional. Defines whether the instance should have confidential compute enabled. - bool enable_confidential_compute = 1 [(google.api.field_behavior) = OPTIONAL]; -} - -// The config settings for Compute Engine resources in -// an instance group, such as a master or worker group. -message InstanceGroupConfig { - // Controls the use of - // [preemptible instances] - // (https://cloud.google.com/compute/docs/instances/preemptible) - // within the group. - enum Preemptibility { - // Preemptibility is unspecified, the system will choose the - // appropriate setting for each instance group. - PREEMPTIBILITY_UNSPECIFIED = 0; - - // Instances are non-preemptible. - // - // This option is allowed for all instance groups and is the only valid - // value for Master and Worker instance groups. - NON_PREEMPTIBLE = 1; - - // Instances are preemptible. - // - // This option is allowed only for secondary worker groups. - PREEMPTIBLE = 2; - } - - // Optional. The number of VM instances in the instance group. - // For [HA - // cluster](/dataproc/docs/concepts/configuring-clusters/high-availability) - // [master_config](#FIELDS.master_config) groups, **must be set to 3**. - // For standard cluster [master_config](#FIELDS.master_config) groups, - // **must be set to 1**. - int32 num_instances = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The list of instance names. Dataproc derives the names - // from `cluster_name`, `num_instances`, and the instance group. - repeated string instance_names = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The Compute Engine image resource used for cluster instances. - // - // The URI can represent an image or image family. - // - // Image examples: - // - // * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/[image-id]` - // * `projects/[project_id]/global/images/[image-id]` - // * `image-id` - // - // Image family examples. Dataproc will use the most recent - // image from the family: - // - // * `https://www.googleapis.com/compute/beta/projects/[project_id]/global/images/family/[custom-image-family-name]` - // * `projects/[project_id]/global/images/family/[custom-image-family-name]` - // - // If the URI is unspecified, it will be inferred from - // `SoftwareConfig.image_version` or the system default. - string image_uri = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Compute Engine machine type used for cluster instances. - // - // A full URL, partial URI, or short name are valid. Examples: - // - // * `https://www.googleapis.com/compute/v1/projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` - // * `projects/[project_id]/zones/us-east1-a/machineTypes/n1-standard-2` - // * `n1-standard-2` - // - // **Auto Zone Exception**: If you are using the Dataproc - // [Auto Zone - // Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) - // feature, you must use the short name of the machine type - // resource, for example, `n1-standard-2`. - string machine_type_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Disk option config settings. - DiskConfig disk_config = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. Specifies that this instance group contains preemptible - // instances. - bool is_preemptible = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Specifies the preemptibility of the instance group. - // - // The default value for master and worker groups is - // `NON_PREEMPTIBLE`. This default cannot be changed. - // - // The default value for secondary instances is - // `PREEMPTIBLE`. - Preemptibility preemptibility = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The config for Compute Engine Instance Group - // Manager that manages this group. - // This is only used for preemptible instance groups. - ManagedGroupConfig managed_group_config = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The Compute Engine accelerator configuration for these - // instances. - repeated AcceleratorConfig accelerators = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Specifies the minimum cpu platform for the Instance Group. - // See [Dataproc -> Minimum CPU - // Platform](https://cloud.google.com/dataproc/docs/concepts/compute/dataproc-min-cpu). - string min_cpu_platform = 9 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies the resources used to actively manage an instance group. -message ManagedGroupConfig { - // Output only. The name of the Instance Template used for the Managed - // Instance Group. - string instance_template_name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The name of the Instance Group Manager for this group. - string instance_group_manager_name = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Specifies the type and number of accelerator cards attached to the instances -// of an instance. See [GPUs on Compute -// Engine](https://cloud.google.com/compute/docs/gpus/). -message AcceleratorConfig { - // Full URL, partial URI, or short name of the accelerator type resource to - // expose to this instance. See - // [Compute Engine - // AcceleratorTypes](https://cloud.google.com/compute/docs/reference/beta/acceleratorTypes). - // - // Examples: - // - // * `https://www.googleapis.com/compute/beta/projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` - // * `projects/[project_id]/zones/us-east1-a/acceleratorTypes/nvidia-tesla-k80` - // * `nvidia-tesla-k80` - // - // **Auto Zone Exception**: If you are using the Dataproc - // [Auto Zone - // Placement](https://cloud.google.com/dataproc/docs/concepts/configuring-clusters/auto-zone#using_auto_zone_placement) - // feature, you must use the short name of the accelerator type - // resource, for example, `nvidia-tesla-k80`. - string accelerator_type_uri = 1; - - // The number of the accelerator cards of this type exposed to this instance. - int32 accelerator_count = 2; -} - -// Specifies the config of disk options for a group of VM instances. -message DiskConfig { - // Optional. Type of the boot disk (default is "pd-standard"). - // Valid values: "pd-balanced" (Persistent Disk Balanced Solid State Drive), - // "pd-ssd" (Persistent Disk Solid State Drive), - // or "pd-standard" (Persistent Disk Hard Disk Drive). - // See [Disk types](https://cloud.google.com/compute/docs/disks#disk-types). - string boot_disk_type = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Size in GB of the boot disk (default is 500GB). - int32 boot_disk_size_gb = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Number of attached SSDs, from 0 to 4 (default is 0). - // If SSDs are not attached, the boot disk is used to store runtime logs and - // [HDFS](https://hadoop.apache.org/docs/r1.2.1/hdfs_user_guide.html) data. - // If one or more SSDs are attached, this runtime bulk - // data is spread across them, and the boot disk contains only basic - // config and installed binaries. - int32 num_local_ssds = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Interface type of local SSDs (default is "scsi"). - // Valid values: "scsi" (Small Computer System Interface), - // "nvme" (Non-Volatile Memory Express). - // See [local SSD - // performance](https://cloud.google.com/compute/docs/disks/local-ssd#performance). - string local_ssd_interface = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies an executable to run on a fully configured node and a -// timeout period for executable completion. -message NodeInitializationAction { - // Required. Cloud Storage URI of executable file. - string executable_file = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Amount of time executable has to complete. Default is - // 10 minutes (see JSON representation of - // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - // - // Cluster creation fails with an explanatory error message (the - // name of the executable that caused the error and the exceeded timeout - // period) if the executable is not completed at end of the timeout period. - google.protobuf.Duration execution_timeout = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// The status of a cluster and its instances. -message ClusterStatus { - // The cluster state. - enum State { - // The cluster state is unknown. - UNKNOWN = 0; - - // The cluster is being created and set up. It is not ready for use. - CREATING = 1; - - // The cluster is currently running and healthy. It is ready for use. - // - // **Note:** The cluster state changes from "creating" to "running" status - // after the master node(s), first two primary worker nodes (and the last - // primary worker node if primary workers > 2) are running. - RUNNING = 2; - - // The cluster encountered an error. It is not ready for use. - ERROR = 3; - - // The cluster has encountered an error while being updated. Jobs can - // be submitted to the cluster, but the cluster cannot be updated. - ERROR_DUE_TO_UPDATE = 9; - - // The cluster is being deleted. It cannot be used. - DELETING = 4; - - // The cluster is being updated. It continues to accept and process jobs. - UPDATING = 5; - - // The cluster is being stopped. It cannot be used. - STOPPING = 6; - - // The cluster is currently stopped. It is not ready for use. - STOPPED = 7; - - // The cluster is being started. It is not ready for use. - STARTING = 8; - } - - // The cluster substate. - enum Substate { - // The cluster substate is unknown. - UNSPECIFIED = 0; - - // The cluster is known to be in an unhealthy state - // (for example, critical daemons are not running or HDFS capacity is - // exhausted). - // - // Applies to RUNNING state. - UNHEALTHY = 1; - - // The agent-reported status is out of date (may occur if - // Dataproc loses communication with Agent). - // - // Applies to RUNNING state. - STALE_STATUS = 2; - } - - // Output only. The cluster's state. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Output only. Details of cluster's state. - string detail = 2 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = OPTIONAL - ]; - - // Output only. Time when this state was entered (see JSON representation of - // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Timestamp state_start_time = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Additional state information that includes - // status reported by the agent. - Substate substate = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Security related configuration, including encryption, Kerberos, etc. -message SecurityConfig { - // Optional. Kerberos related configuration. - KerberosConfig kerberos_config = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Identity related configuration, including service account based - // secure multi-tenancy user mappings. - IdentityConfig identity_config = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies Kerberos related configuration. -message KerberosConfig { - // Optional. Flag to indicate whether to Kerberize the cluster (default: false). Set - // this field to true to enable Kerberos on a cluster. - bool enable_kerberos = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the root - // principal password. - string root_principal_password_uri = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The uri of the KMS key used to encrypt various sensitive - // files. - string kms_key_uri = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of the keystore file used for SSL - // encryption. If not provided, Dataproc will provide a self-signed - // certificate. - string keystore_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of the truststore file used for SSL - // encryption. If not provided, Dataproc will provide a self-signed - // certificate. - string truststore_uri = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided keystore. For the self-signed certificate, - // this password is generated by Dataproc. - string keystore_password_uri = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided key. For the self-signed certificate, this - // password is generated by Dataproc. - string key_password_uri = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // password to the user provided truststore. For the self-signed certificate, - // this password is generated by Dataproc. - string truststore_password_uri = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The remote realm the Dataproc on-cluster KDC will trust, should - // the user enable cross realm trust. - string cross_realm_trust_realm = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The KDC (IP or hostname) for the remote trusted realm in a cross - // realm trust relationship. - string cross_realm_trust_kdc = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The admin server (IP or hostname) for the remote trusted realm in - // a cross realm trust relationship. - string cross_realm_trust_admin_server = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // shared password between the on-cluster Kerberos realm and the remote - // trusted realm, in a cross realm trust relationship. - string cross_realm_trust_shared_password_uri = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud Storage URI of a KMS encrypted file containing the - // master key of the KDC database. - string kdc_db_key_uri = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The lifetime of the ticket granting ticket, in hours. - // If not specified, or user specifies 0, then default value 10 - // will be used. - int32 tgt_lifetime_hours = 14 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The name of the on-cluster Kerberos realm. - // If not specified, the uppercased domain of hostnames will be the realm. - string realm = 15 [(google.api.field_behavior) = OPTIONAL]; -} - -// Identity related configuration, including service account based -// secure multi-tenancy user mappings. -message IdentityConfig { - // Required. Map of user to service account. - map user_service_account_mapping = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// Specifies the selection and config of software inside the cluster. -message SoftwareConfig { - // Optional. The version of software inside the cluster. It must be one of the - // supported [Dataproc - // Versions](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#supported_dataproc_versions), - // such as "1.2" (including a subminor version, such as "1.2.29"), or the - // ["preview" - // version](https://cloud.google.com/dataproc/docs/concepts/versioning/dataproc-versions#other_versions). - // If unspecified, it defaults to the latest Debian version. - string image_version = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The properties to set on daemon config files. - // - // Property keys are specified in `prefix:property` format, for example - // `core:hadoop.tmp.dir`. The following are supported prefixes - // and their mappings: - // - // * capacity-scheduler: `capacity-scheduler.xml` - // * core: `core-site.xml` - // * distcp: `distcp-default.xml` - // * hdfs: `hdfs-site.xml` - // * hive: `hive-site.xml` - // * mapred: `mapred-site.xml` - // * pig: `pig.properties` - // * spark: `spark-defaults.conf` - // * yarn: `yarn-site.xml` - // - // For more information, see [Cluster - // properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties). - map properties = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The set of components to activate on the cluster. - repeated Component optional_components = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies the cluster auto-delete schedule configuration. -message LifecycleConfig { - // Optional. The duration to keep the cluster alive while idling (when no jobs - // are running). Passing this threshold will cause the cluster to be - // deleted. Minimum value is 5 minutes; maximum value is 14 days (see JSON - // representation of - // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Duration idle_delete_ttl = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Either the exact time the cluster should be deleted at or - // the cluster maximum age. - oneof ttl { - // Optional. The time when cluster will be auto-deleted (see JSON representation of - // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Timestamp auto_delete_time = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The lifetime duration of cluster. The cluster will be - // auto-deleted at the end of this period. Minimum value is 10 minutes; - // maximum value is 14 days (see JSON representation of - // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Duration auto_delete_ttl = 3 [(google.api.field_behavior) = OPTIONAL]; - } - - // Output only. The time when cluster became idle (most recent job finished) - // and became eligible for deletion due to idleness (see JSON representation - // of - // [Timestamp](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Timestamp idle_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Specifies a Metastore configuration. -message MetastoreConfig { - // Required. Resource name of an existing Dataproc Metastore service. - // - // Example: - // - // * `projects/[project_id]/locations/[dataproc_region]/services/[service-name]` - string dataproc_metastore_service = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "metastore.googleapis.com/Service" - } - ]; -} - -// Contains cluster daemon metrics, such as HDFS and YARN stats. -// -// **Beta Feature**: This report is available for testing purposes only. It may -// be changed before final release. -message ClusterMetrics { - // The HDFS metrics. - map hdfs_metrics = 1; - - // The YARN metrics. - map yarn_metrics = 2; -} - -// A request to create a cluster. -message CreateClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster to create. - Cluster cluster = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique ID used to identify the request. If the server receives two - // [CreateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s - // with the same id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the backend - // is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Failure action when primary worker creation fails. - FailureAction action_on_failed_primary_workers = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to update a cluster. -message UpdateClusterRequest { - // Required. The ID of the Google Cloud Platform project the - // cluster belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 5 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The changes to the cluster. - Cluster cluster = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Timeout for graceful YARN decomissioning. Graceful - // decommissioning allows removing nodes from the cluster without - // interrupting jobs in progress. Timeout specifies how long to wait for jobs - // in progress to finish before forcefully removing nodes (and potentially - // interrupting jobs). Default timeout is 0 (for forceful decommission), and - // the maximum allowed timeout is 1 day. (see JSON representation of - // [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - // - // Only supported on Dataproc image versions 1.2 and higher. - google.protobuf.Duration graceful_decommission_timeout = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Specifies the path, relative to `Cluster`, of - // the field to update. For example, to change the number of workers - // in a cluster to 5, the `update_mask` parameter would be - // specified as `config.worker_config.num_instances`, - // and the `PATCH` request body would specify the new value, as follows: - // - // { - // "config":{ - // "workerConfig":{ - // "numInstances":"5" - // } - // } - // } - // Similarly, to change the number of preemptible workers in a cluster to 5, - // the `update_mask` parameter would be - // `config.secondary_worker_config.num_instances`, and the `PATCH` request - // body would be set as follows: - // - // { - // "config":{ - // "secondaryWorkerConfig":{ - // "numInstances":"5" - // } - // } - // } - // Note: Currently, only the following fields can be updated: - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - // - //
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - // change autoscaling policies
- google.protobuf.FieldMask update_mask = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique ID used to identify the request. If the server - // receives two - // [UpdateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s - // with the same id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to stop a cluster. -message StopClusterRequest { - // Required. The ID of the Google Cloud Platform project the - // cluster belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Specifying the `cluster_uuid` means the RPC will fail - // (with error NOT_FOUND) if a cluster with the specified UUID does not exist. - string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique ID used to identify the request. If the server - // receives two - // [StopClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s - // with the same id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // Recommendation: Set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to start a cluster. -message StartClusterRequest { - // Required. The ID of the Google Cloud Platform project the - // cluster belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Specifying the `cluster_uuid` means the RPC will fail - // (with error NOT_FOUND) if a cluster with the specified UUID does not exist. - string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique ID used to identify the request. If the server - // receives two - // [StartClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s - // with the same id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // Recommendation: Set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to delete a cluster. -message DeleteClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Specifying the `cluster_uuid` means the RPC should fail - // (with error NOT_FOUND) if cluster with specified UUID does not exist. - string cluster_uuid = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A unique ID used to identify the request. If the server - // receives two - // [DeleteClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s - // with the same id, then the second request will be ignored and the - // first [google.longrunning.Operation][google.longrunning.Operation] created and stored in the - // backend is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; -} - -// Request to get the resource representation for a cluster in a project. -message GetClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to list the clusters in a project. -message ListClustersRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 4 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A filter constraining the clusters to list. Filters are - // case-sensitive and have the following syntax: - // - // field = value [AND [field = value]] ... - // - // where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, - // and `[KEY]` is a label key. **value** can be `*` to match all values. - // `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, - // `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` - // contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` - // contains the `DELETING` and `ERROR` states. - // `clusterName` is the name of the cluster provided at creation time. - // Only the logical `AND` operator is supported; space-separated items are - // treated as having an implicit `AND` operator. - // - // Example filter: - // - // status.state = ACTIVE AND clusterName = mycluster - // AND labels.env = staging AND labels.starred = * - string filter = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The standard List page size. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The standard List page token. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The list of all clusters in a project. -message ListClustersResponse { - // Output only. The clusters in the project. - repeated Cluster clusters = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. To fetch additional results, provide this value as the - // `page_token` in a subsequent `ListClustersRequest`. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to collect cluster diagnostic information. -message DiagnoseClusterRequest { - // Required. The ID of the Google Cloud Platform project that the cluster - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster name. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// The location of diagnostic output. -message DiagnoseClusterResults { - // Output only. The Cloud Storage URI of the diagnostic output. - // The output report is a plain text file with a summary of collected - // diagnostics. - string output_uri = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Reservation Affinity for consuming Zonal reservation. -message ReservationAffinity { - // Indicates whether to consume capacity from an reservation or not. - enum Type { - TYPE_UNSPECIFIED = 0; - - // Do not consume from any allocated capacity. - NO_RESERVATION = 1; - - // Consume any reservation available. - ANY_RESERVATION = 2; - - // Must consume from a specific reservation. Must specify key value fields - // for specifying the reservations. - SPECIFIC_RESERVATION = 3; - } - - // Optional. Type of reservation to consume - Type consume_reservation_type = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Corresponds to the label key of reservation resource. - string key = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Corresponds to the label values of reservation resource. - repeated string values = 3 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto deleted file mode 100644 index 823a12cf..00000000 --- a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/jobs.proto +++ /dev/null @@ -1,924 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/field_mask.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "JobsProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// The JobController provides methods to manage jobs. -service JobController { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Submits a job to a cluster. - rpc SubmitJob(SubmitJobRequest) returns (Job) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/jobs:submit" - body: "*" - }; - option (google.api.method_signature) = "project_id,region,job"; - } - - // Submits job to a cluster. - rpc SubmitJobAsOperation(SubmitJobRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/jobs:submitAsOperation" - body: "*" - }; - option (google.api.method_signature) = "project_id, region, job"; - option (google.longrunning.operation_info) = { - response_type: "Job" - metadata_type: "JobMetadata" - }; - } - - // Gets the resource representation for a job in a project. - rpc GetJob(GetJobRequest) returns (Job) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" - }; - option (google.api.method_signature) = "project_id,region,job_id"; - } - - // Lists regions/{region}/jobs in a project. - rpc ListJobs(ListJobsRequest) returns (ListJobsResponse) { - option (google.api.http) = { - get: "/v1/projects/{project_id}/regions/{region}/jobs" - }; - option (google.api.method_signature) = "project_id,region"; - option (google.api.method_signature) = "project_id,region,filter"; - } - - // Updates a job in a project. - rpc UpdateJob(UpdateJobRequest) returns (Job) { - option (google.api.http) = { - patch: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" - body: "job" - }; - } - - // Starts a job cancellation request. To access the job resource - // after cancellation, call - // [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) - // or - // [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). - rpc CancelJob(CancelJobRequest) returns (Job) { - option (google.api.http) = { - post: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}:cancel" - body: "*" - }; - option (google.api.method_signature) = "project_id,region,job_id"; - } - - // Deletes the job from the project. If the job is active, the delete fails, - // and the response returns `FAILED_PRECONDITION`. - rpc DeleteJob(DeleteJobRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/projects/{project_id}/regions/{region}/jobs/{job_id}" - }; - option (google.api.method_signature) = "project_id,region,job_id"; - } -} - -// The runtime logging config of the job. -message LoggingConfig { - // The Log4j level for job execution. When running an - // [Apache Hive](https://hive.apache.org/) job, Cloud - // Dataproc configures the Hive client to an equivalent verbosity level. - enum Level { - // Level is unspecified. Use default level for log4j. - LEVEL_UNSPECIFIED = 0; - - // Use ALL level for log4j. - ALL = 1; - - // Use TRACE level for log4j. - TRACE = 2; - - // Use DEBUG level for log4j. - DEBUG = 3; - - // Use INFO level for log4j. - INFO = 4; - - // Use WARN level for log4j. - WARN = 5; - - // Use ERROR level for log4j. - ERROR = 6; - - // Use FATAL level for log4j. - FATAL = 7; - - // Turn off log4j. - OFF = 8; - } - - // The per-package log levels for the driver. This may include - // "root" package name to configure rootLogger. - // Examples: - // 'com.google = FATAL', 'root = INFO', 'org.apache = DEBUG' - map driver_log_levels = 2; -} - -// A Dataproc job for running -// [Apache Hadoop -// MapReduce](https://hadoop.apache.org/docs/current/hadoop-mapreduce-client/hadoop-mapreduce-client-core/MapReduceTutorial.html) -// jobs on [Apache Hadoop -// YARN](https://hadoop.apache.org/docs/r2.7.1/hadoop-yarn/hadoop-yarn-site/YARN.html). -message HadoopJob { - // Required. Indicates the location of the driver's main class. Specify - // either the jar file that contains the main class or the main class name. - // To specify both, add the jar file to `jar_file_uris`, and then specify - // the main class name in this property. - oneof driver { - // The HCFS URI of the jar file containing the main class. - // Examples: - // 'gs://foo-bucket/analytics-binaries/extract-useful-metrics-mr.jar' - // 'hdfs:/tmp/test-samples/custom-wordcount.jar' - // 'file:///home/usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar' - string main_jar_file_uri = 1; - - // The name of the driver's main class. The jar file containing the class - // must be in the default CLASSPATH or specified in `jar_file_uris`. - string main_class = 2; - } - - // Optional. The arguments to pass to the driver. Do not - // include arguments, such as `-libjars` or `-Dfoo=bar`, that can be set as - // job properties, since a collision may occur that causes an incorrect job - // submission. - repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Jar file URIs to add to the CLASSPATHs of the - // Hadoop driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS (Hadoop Compatible Filesystem) URIs of files to be copied - // to the working directory of Hadoop drivers and distributed tasks. Useful - // for naively parallel tasks. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted in the working directory of - // Hadoop drivers and tasks. Supported file types: - // .jar, .tar, .tar.gz, .tgz, or .zip. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure Hadoop. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site and - // classes in user code. - map properties = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running [Apache Spark](http://spark.apache.org/) -// applications on YARN. -message SparkJob { - // Required. The specification of the main method to call to drive the job. - // Specify either the jar file that contains the main class or the main class - // name. To pass both a main jar and a main class in that jar, add the jar to - // `CommonJob.jar_file_uris`, and then specify the main class name in - // `main_class`. - oneof driver { - // The HCFS URI of the jar file that contains the main class. - string main_jar_file_uri = 1; - - // The name of the driver's main class. The jar file that contains the class - // must be in the default CLASSPATH or specified in `jar_file_uris`. - string main_class = 2; - } - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the - // Spark driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be placed in the working directory of - // each executor. Useful for naively parallel tasks. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted into the working directory - // of each executor. Supported file types: - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure Spark. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running -// [Apache -// PySpark](https://spark.apache.org/docs/0.9.0/python-programming-guide.html) -// applications on YARN. -message PySparkJob { - // Required. The HCFS URI of the main Python file to use as the driver. Must - // be a .py file. - string main_python_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS file URIs of Python files to pass to the PySpark - // framework. Supported file types: .py, .egg, and .zip. - repeated string python_file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATHs of the - // Python driver and tasks. - repeated string jar_file_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be placed in the working directory of - // each executor. Useful for naively parallel tasks. - repeated string file_uris = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted into the working directory - // of each executor. Supported file types: - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure PySpark. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 8 [(google.api.field_behavior) = OPTIONAL]; -} - -// A list of queries to run on a cluster. -message QueryList { - // Required. The queries to execute. You do not need to end a query expression - // with a semicolon. Multiple queries can be specified in one - // string by separating each with a semicolon. Here is an example of a - // Dataproc API snippet that uses a QueryList to specify a HiveJob: - // - // "hiveJob": { - // "queryList": { - // "queries": [ - // "query1", - // "query2", - // "query3;query4", - // ] - // } - // } - repeated string queries = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A Dataproc job for running [Apache Hive](https://hive.apache.org/) -// queries on YARN. -message HiveJob { - // Required. The sequence of Hive queries to execute, specified as either - // an HCFS file URI or a list of queries. - oneof queries { - // The HCFS URI of the script that contains Hive queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Whether to continue executing queries if a query fails. - // The default value is `false`. Setting to `true` can be useful when - // executing independent parallel queries. - bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Mapping of query variable names to values (equivalent to the - // Hive command: `SET name="value";`). - map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names and values, used to configure Hive. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, - // /etc/hive/conf/hive-site.xml, and classes in user code. - map properties = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATH of the - // Hive server and Hadoop MapReduce (MR) tasks. Can contain Hive SerDes - // and UDFs. - repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running [Apache Spark -// SQL](http://spark.apache.org/sql/) queries. -message SparkSqlJob { - // Required. The sequence of Spark SQL queries to execute, specified as - // either an HCFS file URI or as a list of queries. - oneof queries { - // The HCFS URI of the script that contains SQL queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Mapping of query variable names to values (equivalent to the - // Spark SQL command: SET `name="value";`). - map script_variables = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure - // Spark SQL's SparkConf. Properties that conflict with values set by the - // Dataproc API may be overwritten. - map properties = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to be added to the Spark CLASSPATH. - repeated string jar_file_uris = 56 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running [Apache Pig](https://pig.apache.org/) -// queries on YARN. -message PigJob { - // Required. The sequence of Pig queries to execute, specified as an HCFS - // file URI or a list of queries. - oneof queries { - // The HCFS URI of the script that contains the Pig queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Whether to continue executing queries if a query fails. - // The default value is `false`. Setting to `true` can be useful when - // executing independent parallel queries. - bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Mapping of query variable names to values (equivalent to the Pig - // command: `name=[value]`). - map script_variables = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure Pig. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in /etc/hadoop/conf/*-site.xml, - // /etc/pig/conf/pig.properties, and classes in user code. - map properties = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of jar files to add to the CLASSPATH of - // the Pig Client and Hadoop MapReduce (MR) tasks. Can contain Pig UDFs. - repeated string jar_file_uris = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running -// [Apache SparkR](https://spark.apache.org/docs/latest/sparkr.html) -// applications on YARN. -message SparkRJob { - // Required. The HCFS URI of the main R file to use as the driver. - // Must be a .R file. - string main_r_file_uri = 1 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The arguments to pass to the driver. Do not include arguments, - // such as `--conf`, that can be set as job properties, since a collision may - // occur that causes an incorrect job submission. - repeated string args = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of files to be placed in the working directory of - // each executor. Useful for naively parallel tasks. - repeated string file_uris = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. HCFS URIs of archives to be extracted into the working directory - // of each executor. Supported file types: - // .jar, .tar, .tar.gz, .tgz, and .zip. - repeated string archive_uris = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, used to configure SparkR. - // Properties that conflict with values set by the Dataproc API may be - // overwritten. Can include properties set in - // /etc/spark/conf/spark-defaults.conf and classes in user code. - map properties = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job for running [Presto](https://prestosql.io/) queries. -// **IMPORTANT**: The [Dataproc Presto Optional -// Component](https://cloud.google.com/dataproc/docs/concepts/components/presto) -// must be enabled when the cluster is created to submit a Presto job to the -// cluster. -message PrestoJob { - // Required. The sequence of Presto queries to execute, specified as - // either an HCFS file URI or as a list of queries. - oneof queries { - // The HCFS URI of the script that contains SQL queries. - string query_file_uri = 1; - - // A list of queries. - QueryList query_list = 2; - } - - // Optional. Whether to continue executing queries if a query fails. - // The default value is `false`. Setting to `true` can be useful when - // executing independent parallel queries. - bool continue_on_failure = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The format in which query output will be displayed. See the - // Presto documentation for supported output formats - string output_format = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Presto client tags to attach to this query - repeated string client_tags = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values. Used to set Presto - // [session properties](https://prestodb.io/docs/current/sql/set-session.html) - // Equivalent to using the --session flag in the Presto CLI - map properties = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The runtime log config for job execution. - LoggingConfig logging_config = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// Dataproc job config. -message JobPlacement { - // Required. The name of the cluster where the job will be submitted. - string cluster_name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Output only. A cluster UUID generated by the Dataproc service when - // the job is submitted. - string cluster_uuid = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Cluster labels to identify a cluster where the job will be submitted. - map cluster_labels = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Dataproc job status. -message JobStatus { - // The job state. - enum State { - // The job state is unknown. - STATE_UNSPECIFIED = 0; - - // The job is pending; it has been submitted, but is not yet running. - PENDING = 1; - - // Job has been received by the service and completed initial setup; - // it will soon be submitted to the cluster. - SETUP_DONE = 8; - - // The job is running on the cluster. - RUNNING = 2; - - // A CancelJob request has been received, but is pending. - CANCEL_PENDING = 3; - - // Transient in-flight resources have been canceled, and the request to - // cancel the running job has been issued to the cluster. - CANCEL_STARTED = 7; - - // The job cancellation was successful. - CANCELLED = 4; - - // The job has completed successfully. - DONE = 5; - - // The job has completed, but encountered an error. - ERROR = 6; - - // Job attempt has failed. The detail field contains failure details for - // this attempt. - // - // Applies to restartable jobs only. - ATTEMPT_FAILURE = 9; - } - - // The job substate. - enum Substate { - // The job substate is unknown. - UNSPECIFIED = 0; - - // The Job is submitted to the agent. - // - // Applies to RUNNING state. - SUBMITTED = 1; - - // The Job has been received and is awaiting execution (it may be waiting - // for a condition to be met). See the "details" field for the reason for - // the delay. - // - // Applies to RUNNING state. - QUEUED = 2; - - // The agent-reported status is out of date, which may be caused by a - // loss of communication between the agent and Dataproc. If the - // agent does not send a timely update, the job will fail. - // - // Applies to RUNNING state. - STALE_STATUS = 3; - } - - // Output only. A state message specifying the overall job state. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Output only. Job state details, such as an error - // description if the state is ERROR. - string details = 2 [ - (google.api.field_behavior) = OUTPUT_ONLY, - (google.api.field_behavior) = OPTIONAL - ]; - - // Output only. The time when this state was entered. - google.protobuf.Timestamp state_start_time = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Additional state information, which includes - // status reported by the agent. - Substate substate = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Encapsulates the full scoping used to reference a job. -message JobReference { - // Optional. The ID of the Google Cloud Platform project that the job belongs to. If - // specified, must match the request project ID. - string project_id = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The job ID, which must be unique within the project. - // - // The ID must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), or hyphens (-). The maximum length is 100 characters. - // - // If not specified by the caller, the job ID will be provided by the server. - string job_id = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A YARN application created by a job. Application information is a subset of -// org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto. -// -// **Beta Feature**: This report is available for testing purposes only. It may -// be changed before final release. -message YarnApplication { - // The application state, corresponding to - // YarnProtos.YarnApplicationStateProto. - enum State { - // Status is unspecified. - STATE_UNSPECIFIED = 0; - - // Status is NEW. - NEW = 1; - - // Status is NEW_SAVING. - NEW_SAVING = 2; - - // Status is SUBMITTED. - SUBMITTED = 3; - - // Status is ACCEPTED. - ACCEPTED = 4; - - // Status is RUNNING. - RUNNING = 5; - - // Status is FINISHED. - FINISHED = 6; - - // Status is FAILED. - FAILED = 7; - - // Status is KILLED. - KILLED = 8; - } - - // Required. The application name. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The application state. - State state = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The numerical progress of the application, from 1 to 100. - float progress = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The HTTP URL of the ApplicationMaster, HistoryServer, or - // TimelineServer that provides application-specific information. The URL uses - // the internal hostname, and requires a proxy server for resolution and, - // possibly, access. - string tracking_url = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A Dataproc job resource. -message Job { - // Optional. The fully qualified reference to the job, which can be used to - // obtain the equivalent REST path of the job resource. If this property - // is not specified when a job is created, the server generates a - // job_id. - JobReference reference = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. Job information, including how, when, and where to - // run the job. - JobPlacement placement = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The application/framework-specific portion of the job. - oneof type_job { - // Optional. Job is a Hadoop job. - HadoopJob hadoop_job = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Spark job. - SparkJob spark_job = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a PySpark job. - PySparkJob pyspark_job = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Hive job. - HiveJob hive_job = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Pig job. - PigJob pig_job = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a SparkR job. - SparkRJob spark_r_job = 21 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a SparkSql job. - SparkSqlJob spark_sql_job = 12 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Presto job. - PrestoJob presto_job = 23 [(google.api.field_behavior) = OPTIONAL]; - } - - // Output only. The job status. Additional application-specific - // status information may be contained in the type_job - // and yarn_applications fields. - JobStatus status = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The previous job status. - repeated JobStatus status_history = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The collection of YARN applications spun up by this job. - // - // **Beta** Feature: This report is available for testing purposes only. It - // may be changed before final release. - repeated YarnApplication yarn_applications = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A URI pointing to the location of the stdout of the job's - // driver program. - string driver_output_resource_uri = 17 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. If present, the location of miscellaneous control files - // which may be used as part of job setup and handling. If not present, - // control files may be placed in the same location as `driver_output_uri`. - string driver_control_files_uri = 15 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The labels to associate with this job. - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to [RFC - // 1035](https://www.ietf.org/rfc/rfc1035.txt). No more than 32 labels can be - // associated with a job. - map labels = 18 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job scheduling configuration. - JobScheduling scheduling = 20 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. A UUID that uniquely identifies a job within the project - // over time. This is in contrast to a user-settable reference.job_id that - // may be reused over time. - string job_uuid = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Indicates whether the job is completed. If the value is `false`, - // the job is still in progress. If `true`, the job is completed, and - // `status.state` field will indicate if it was successful, failed, - // or cancelled. - bool done = 24 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Job scheduling options. -message JobScheduling { - // Optional. Maximum number of times per hour a driver may be restarted as - // a result of driver exiting with non-zero code before job is - // reported failed. - // - // A job may be reported as thrashing if driver exits with non-zero code - // 4 times within 10 minute window. - // - // Maximum value is 10. - // - // **Note:** Currently, this restartable job option is - // not supported in Dataproc - // [workflow - // template](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template) - // jobs. - int32 max_failures_per_hour = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Maximum number of times in total a driver may be restarted as a result of - // driver exiting with non-zero code before job is reported failed. - // Maximum value is 240. - // - // **Note:** Currently, this restartable job option is - // not supported in Dataproc - // [workflow - // template](https://cloud.google.com/dataproc/docs/concepts/workflows/using-workflows#adding_jobs_to_a_template) - // jobs. - int32 max_failures_total = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to submit a job. -message SubmitJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job resource. - Job job = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A unique id used to identify the request. If the server - // receives two - // [SubmitJobRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s - // with the same id, then the second request will be ignored and the - // first [Job][google.cloud.dataproc.v1.Job] created and stored in the backend - // is returned. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Job Operation metadata. -message JobMetadata { - // Output only. The job id. - string job_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Most recent job status. - JobStatus status = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Operation type. - string operation_type = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Job submission time. - google.protobuf.Timestamp start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to get the resource representation for a job in a project. -message GetJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to list jobs in a project. -message ListJobsRequest { - // A matcher that specifies categories of job states. - enum JobStateMatcher { - // Match all jobs, regardless of state. - ALL = 0; - - // Only match jobs in non-terminal states: PENDING, RUNNING, or - // CANCEL_PENDING. - ACTIVE = 1; - - // Only match jobs in terminal states: CANCELLED, DONE, or ERROR. - NON_ACTIVE = 2; - } - - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 6 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The number of results to return in each response. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. If set, the returned jobs list includes only jobs that were - // submitted to the named cluster. - string cluster_name = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Specifies enumerated categories of jobs to list. - // (default = match ALL jobs). - // - // If `filter` is provided, `jobStateMatcher` will be ignored. - JobStateMatcher job_state_matcher = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A filter constraining the jobs to list. Filters are - // case-sensitive and have the following syntax: - // - // [field = value] AND [field [= value]] ... - // - // where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label - // key. **value** can be `*` to match all values. - // `status.state` can be either `ACTIVE` or `NON_ACTIVE`. - // Only the logical `AND` operator is supported; space-separated items are - // treated as having an implicit `AND` operator. - // - // Example filter: - // - // status.state = ACTIVE AND labels.env = staging AND labels.starred = * - string filter = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to update a job. -message UpdateJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The changes to the job. - Job job = 4 [(google.api.field_behavior) = REQUIRED]; - - // Required. Specifies the path, relative to Job, of - // the field to update. For example, to update the labels of a Job the - // update_mask parameter would be specified as - // labels, and the `PATCH` request body would specify the new - // value. Note: Currently, labels is the only - // field that can be updated. - google.protobuf.FieldMask update_mask = 5 [(google.api.field_behavior) = REQUIRED]; -} - -// A list of jobs in a project. -message ListJobsResponse { - // Output only. Jobs list. - repeated Job jobs = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. This token is included in the response if there are more results - // to fetch. To fetch additional results, provide this value as the - // `page_token` in a subsequent ListJobsRequest. - string next_page_token = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to cancel a job. -message CancelJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to delete a job. -message DeleteJobRequest { - // Required. The ID of the Google Cloud Platform project that the job - // belongs to. - string project_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Dataproc region in which to handle the request. - string region = 3 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job ID. - string job_id = 2 [(google.api.field_behavior) = REQUIRED]; -} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto deleted file mode 100644 index e12bd299..00000000 --- a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/operations.proto +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/field_behavior.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "OperationsProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// Metadata describing the Batch operation. -message BatchOperationMetadata { - // Operation type for Batch resources - enum BatchOperationType { - // Batch operation type is unknown. - BATCH_OPERATION_TYPE_UNSPECIFIED = 0; - - // Batch operation type. - BATCH = 1; - } - - // Name of the batch for the operation. - string batch = 1; - - // Batch UUID for the operation. - string batch_uuid = 2; - - // The time when the operation was created. - google.protobuf.Timestamp create_time = 3; - - // The time when the operation finished. - google.protobuf.Timestamp done_time = 4; - - // The operation type. - BatchOperationType operation_type = 6; - - // Short description of the operation. - string description = 7; - - // Labels associated with the operation. - map labels = 8; - - // Warnings encountered during operation execution. - repeated string warnings = 9; -} - -// The status of the operation. -message ClusterOperationStatus { - // The operation state. - enum State { - // Unused. - UNKNOWN = 0; - - // The operation has been created. - PENDING = 1; - - // The operation is running. - RUNNING = 2; - - // The operation is done; either cancelled or completed. - DONE = 3; - } - - // Output only. A message containing the operation state. - State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A message containing the detailed operation state. - string inner_state = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A message containing any operation metadata details. - string details = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time this state was entered. - google.protobuf.Timestamp state_start_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// Metadata describing the operation. -message ClusterOperationMetadata { - // Output only. Name of the cluster for the operation. - string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Cluster UUID for the operation. - string cluster_uuid = 8 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Current operation status. - ClusterOperationStatus status = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The previous operation status. - repeated ClusterOperationStatus status_history = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The operation type. - string operation_type = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Short description of operation. - string description = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Labels associated with the operation - map labels = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Errors encountered during operation execution. - repeated string warnings = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; -} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto deleted file mode 100644 index 18796915..00000000 --- a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/shared.proto +++ /dev/null @@ -1,341 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/field_behavior.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "SharedProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// Runtime configuration for a workload. -message RuntimeConfig { - // Optional. Version of the batch runtime. - string version = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Optional custom container image for the job runtime environment. If - // not specified, a default container image will be used. - string container_image = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A mapping of property names to values, which are used to configure workload - // execution. - map properties = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// Environment configuration for a workload. -message EnvironmentConfig { - // Optional. Execution configuration for a workload. - ExecutionConfig execution_config = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Peripherals configuration that workload has access to. - PeripheralsConfig peripherals_config = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Execution configuration for a workload. -message ExecutionConfig { - // Optional. Service account that used to execute workload. - string service_account = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Network configuration for workload execution. - oneof network { - // Optional. Network URI to connect workload to. - string network_uri = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Subnetwork URI to connect workload to. - string subnetwork_uri = 5 [(google.api.field_behavior) = OPTIONAL]; - } - - // Optional. Tags used for network traffic control. - repeated string network_tags = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The Cloud KMS key to use for encryption. - string kms_key = 7 [(google.api.field_behavior) = OPTIONAL]; -} - -// Spark History Server configuration for the workload. -message SparkHistoryServerConfig { - // Optional. Resource name of an existing Dataproc Cluster to act as a Spark History - // Server for the workload. - // - // Example: - // - // * `projects/[project_id]/regions/[region]/clusters/[cluster_name]` - string dataproc_cluster = 1 [ - (google.api.field_behavior) = OPTIONAL - ]; -} - -// Auxiliary services configuration for a workload. -message PeripheralsConfig { - // Optional. Resource name of an existing Dataproc Metastore service. - // - // Example: - // - // * `projects/[project_id]/locations/[region]/services/[service_id]` - string metastore_service = 1 [ - (google.api.field_behavior) = OPTIONAL - ]; - - // Optional. The Spark History Server configuration for the workload. - SparkHistoryServerConfig spark_history_server_config = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// Runtime information about workload execution. -message RuntimeInfo { - // Output only. Map of remote access endpoints (such as web interfaces and APIs) to their - // URIs. - map endpoints = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A URI pointing to the location of the stdout and stderr of the workload. - string output_uri = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. A URI pointing to the location of the diagnostics tarball. - string diagnostic_output_uri = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The cluster's GKE config. -message GkeClusterConfig { - // Optional. A target GKE cluster to deploy to. It must be in the same project and - // region as the Dataproc cluster (the GKE cluster can be zonal or regional). - // Format: 'projects/{project}/locations/{location}/clusters/{cluster_id}' - string gke_cluster_target = 2 [ - (google.api.field_behavior) = OPTIONAL - ]; - - // Optional. GKE NodePools where workloads will be scheduled. At least one node pool - // must be assigned the 'default' role. Each role can be given to only a - // single NodePoolTarget. All NodePools must have the same location settings. - // If a nodePoolTarget is not specified, Dataproc constructs a default - // nodePoolTarget. - repeated GkeNodePoolTarget node_pool_target = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The configuration for running the Dataproc cluster on Kubernetes. -message KubernetesClusterConfig { - // Optional. A namespace within the Kubernetes cluster to deploy into. If this namespace - // does not exist, it is created. If it exists, Dataproc - // verifies that another Dataproc VirtualCluster is not installed - // into it. If not specified, the name of the Dataproc Cluster is used. - string kubernetes_namespace = 1 [(google.api.field_behavior) = OPTIONAL]; - - oneof config { - // Required. The configuration for running the Dataproc cluster on GKE. - GkeClusterConfig gke_cluster_config = 2 [(google.api.field_behavior) = REQUIRED]; - } - - // Optional. The software configuration for this Dataproc cluster running on Kubernetes. - KubernetesSoftwareConfig kubernetes_software_config = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The software configuration for this Dataproc cluster running on Kubernetes. -message KubernetesSoftwareConfig { - // The components that should be installed in this Dataproc cluster. The key - // must be a string from the KubernetesComponent enumeration. The value is - // the version of the software to be installed. - // At least one entry must be specified. - map component_version = 1; - - // The properties to set on daemon config files. - // - // Property keys are specified in `prefix:property` format, for example - // `spark:spark.kubernetes.container.image`. The following are supported - // prefixes and their mappings: - // - // * spark: `spark-defaults.conf` - // - // For more information, see [Cluster - // properties](https://cloud.google.com/dataproc/docs/concepts/cluster-properties). - map properties = 2; -} - -// GKE NodePools that Dataproc workloads run on. -message GkeNodePoolTarget { - // `Role` specifies whose tasks will run on the NodePool. The roles can be - // specific to workloads. Exactly one GkeNodePoolTarget within the - // VirtualCluster must have 'default' role, which is used to run all workloads - // that are not associated with a NodePool. - enum Role { - // Role is unspecified. - ROLE_UNSPECIFIED = 0; - - // Any roles that are not directly assigned to a NodePool run on the - // `default` role's NodePool. - DEFAULT = 1; - - // Run controllers and webhooks. - CONTROLLER = 2; - - // Run spark driver. - SPARK_DRIVER = 3; - - // Run spark executors. - SPARK_EXECUTOR = 4; - } - - // Required. The target GKE NodePool. - // Format: - // 'projects/{project}/locations/{location}/clusters/{cluster}/nodePools/{node_pool}' - string node_pool = 1 [ - (google.api.field_behavior) = REQUIRED - ]; - - // Required. The types of role for a GKE NodePool - repeated Role roles = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The configuration for the GKE NodePool. - // - // If specified, Dataproc attempts to create a NodePool with the - // specified shape. If one with the same name already exists, it is - // verified against all specified fields. If a field differs, the - // virtual cluster creation will fail. - // - // If omitted, any NodePool with the specified name is used. If a - // NodePool with the specified name does not exist, Dataproc create a NodePool - // with default values. - GkeNodePoolConfig node_pool_config = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// The configuration of a GKE NodePool used by a [Dataproc-on-GKE -// cluster](https://cloud.google.com/dataproc/docs/concepts/jobs/dataproc-gke#create-a-dataproc-on-gke-cluster). -message GkeNodePoolConfig { - // Parameters that describe cluster nodes. - message GkeNodeConfig { - // Optional. The name of a Compute Engine [machine - // type](https://cloud.google.com/compute/docs/machine-types). - string machine_type = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Whether the nodes are created as [preemptible VM - // instances](https://cloud.google.com/compute/docs/instances/preemptible). - bool preemptible = 10 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The number of local SSD disks to attach to the node, which is limited by - // the maximum number of disks allowable per zone (see [Adding Local - // SSDs](https://cloud.google.com/compute/docs/disks/local-ssd)). - int32 local_ssd_count = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A list of [hardware - // accelerators](https://cloud.google.com/compute/docs/gpus) to attach to - // each node. - repeated GkeNodePoolAcceleratorConfig accelerators = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. [Minimum CPU - // platform](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform) - // to be used by this instance. The instance may be scheduled on the - // specified or a newer CPU platform. Specify the friendly names of CPU - // platforms, such as "Intel Haswell"` or Intel Sandy Bridge". - string min_cpu_platform = 13 [(google.api.field_behavior) = OPTIONAL]; - } - - // A GkeNodeConfigAcceleratorConfig represents a Hardware Accelerator request - // for a NodePool. - message GkeNodePoolAcceleratorConfig { - // The number of accelerator cards exposed to an instance. - int64 accelerator_count = 1; - - // The accelerator type resource namename (see GPUs on Compute Engine). - string accelerator_type = 2; - } - - // GkeNodePoolAutoscaling contains information the cluster autoscaler needs to - // adjust the size of the node pool to the current cluster usage. - message GkeNodePoolAutoscalingConfig { - // The minimum number of nodes in the NodePool. Must be >= 0 and <= - // max_node_count. - int32 min_node_count = 2; - - // The maximum number of nodes in the NodePool. Must be >= min_node_count. - // **Note:** Quota must be sufficient to scale up the cluster. - int32 max_node_count = 3; - } - - // Optional. The node pool configuration. - GkeNodeConfig config = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The list of Compute Engine - // [zones](https://cloud.google.com/compute/docs/zones#available) where - // NodePool's nodes will be located. - // - // **Note:** Currently, only one zone may be specified. - // - // If a location is not specified during NodePool creation, Dataproc will - // choose a location. - repeated string locations = 13 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The autoscaler configuration for this NodePool. The autoscaler is enabled - // only when a valid configuration is present. - GkeNodePoolAutoscalingConfig autoscaling = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Cluster components that can be activated. -enum Component { - // Unspecified component. Specifying this will cause Cluster creation to fail. - COMPONENT_UNSPECIFIED = 0; - - // The Anaconda python distribution. The Anaconda component is not supported - // in the Dataproc - // 2.0 - // image. The 2.0 image is pre-installed with Miniconda. - ANACONDA = 5; - - // Docker - DOCKER = 13; - - // The Druid query engine. (alpha) - DRUID = 9; - - // Flink - FLINK = 14; - - // HBase. (beta) - HBASE = 11; - - // The Hive Web HCatalog (the REST service for accessing HCatalog). - HIVE_WEBHCAT = 3; - - // The Jupyter Notebook. - JUPYTER = 1; - - // The Presto query engine. - PRESTO = 6; - - // The Ranger service. - RANGER = 12; - - // The Solr service. - SOLR = 10; - - // The Zeppelin notebook. - ZEPPELIN = 4; - - // The Zookeeper service. - ZOOKEEPER = 8; -} - -// Actions in response to failure of a resource associated with a cluster. -enum FailureAction { - // When FailureAction is unspecified, failure action defaults to NO_ACTION. - FAILURE_ACTION_UNSPECIFIED = 0; - - // Take no action on failure to create a cluster resource. NO_ACTION is the - // default. - NO_ACTION = 1; - - // Delete the failed cluster resource. - DELETE = 2; -} diff --git a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto b/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto deleted file mode 100644 index 416ba26d..00000000 --- a/owl-bot-staging/v1/protos/google/cloud/dataproc/v1/workflow_templates.proto +++ /dev/null @@ -1,807 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -syntax = "proto3"; - -package google.cloud.dataproc.v1; - -import "google/api/annotations.proto"; -import "google/api/client.proto"; -import "google/api/field_behavior.proto"; -import "google/api/resource.proto"; -import "google/cloud/dataproc/v1/clusters.proto"; -import "google/cloud/dataproc/v1/jobs.proto"; -import "google/longrunning/operations.proto"; -import "google/protobuf/duration.proto"; -import "google/protobuf/empty.proto"; -import "google/protobuf/timestamp.proto"; - -option go_package = "google.golang.org/genproto/googleapis/cloud/dataproc/v1;dataproc"; -option java_multiple_files = true; -option java_outer_classname = "WorkflowTemplatesProto"; -option java_package = "com.google.cloud.dataproc.v1"; - -// The API interface for managing Workflow Templates in the -// Dataproc API. -service WorkflowTemplateService { - option (google.api.default_host) = "dataproc.googleapis.com"; - option (google.api.oauth_scopes) = "https://www.googleapis.com/auth/cloud-platform"; - - // Creates new workflow template. - rpc CreateWorkflowTemplate(CreateWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/workflowTemplates" - body: "template" - additional_bindings { - post: "/v1/{parent=projects/*/regions/*}/workflowTemplates" - body: "template" - } - }; - option (google.api.method_signature) = "parent,template"; - } - - // Retrieves the latest workflow template. - // - // Can retrieve previously instantiated template by specifying optional - // version parameter. - rpc GetWorkflowTemplate(GetWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - get: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" - additional_bindings { - get: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" - } - }; - option (google.api.method_signature) = "name"; - } - - // Instantiates a template and begins execution. - // - // The returned Operation can be used to track execution of - // workflow by polling - // [operations.get][google.longrunning.Operations.GetOperation]. - // The Operation will complete when entire workflow is finished. - // - // The running workflow can be aborted via - // [operations.cancel][google.longrunning.Operations.CancelOperation]. - // This will cause any inflight jobs to be cancelled and workflow-owned - // clusters to be deleted. - // - // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - // Also see [Using - // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - // - // On successful completion, - // [Operation.response][google.longrunning.Operation.response] will be - // [Empty][google.protobuf.Empty]. - rpc InstantiateWorkflowTemplate(InstantiateWorkflowTemplateRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/{name=projects/*/locations/*/workflowTemplates/*}:instantiate" - body: "*" - additional_bindings { - post: "/v1/{name=projects/*/regions/*/workflowTemplates/*}:instantiate" - body: "*" - } - }; - option (google.api.method_signature) = "name"; - option (google.api.method_signature) = "name,parameters"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "WorkflowMetadata" - }; - } - - // Instantiates a template and begins execution. - // - // This method is equivalent to executing the sequence - // [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], - // [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. - // - // The returned Operation can be used to track execution of - // workflow by polling - // [operations.get][google.longrunning.Operations.GetOperation]. - // The Operation will complete when entire workflow is finished. - // - // The running workflow can be aborted via - // [operations.cancel][google.longrunning.Operations.CancelOperation]. - // This will cause any inflight jobs to be cancelled and workflow-owned - // clusters to be deleted. - // - // The [Operation.metadata][google.longrunning.Operation.metadata] will be - // [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - // Also see [Using - // WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - // - // On successful completion, - // [Operation.response][google.longrunning.Operation.response] will be - // [Empty][google.protobuf.Empty]. - rpc InstantiateInlineWorkflowTemplate(InstantiateInlineWorkflowTemplateRequest) returns (google.longrunning.Operation) { - option (google.api.http) = { - post: "/v1/{parent=projects/*/locations/*}/workflowTemplates:instantiateInline" - body: "template" - additional_bindings { - post: "/v1/{parent=projects/*/regions/*}/workflowTemplates:instantiateInline" - body: "template" - } - }; - option (google.api.method_signature) = "parent,template"; - option (google.longrunning.operation_info) = { - response_type: "google.protobuf.Empty" - metadata_type: "WorkflowMetadata" - }; - } - - // Updates (replaces) workflow template. The updated template - // must contain version that matches the current server version. - rpc UpdateWorkflowTemplate(UpdateWorkflowTemplateRequest) returns (WorkflowTemplate) { - option (google.api.http) = { - put: "/v1/{template.name=projects/*/locations/*/workflowTemplates/*}" - body: "template" - additional_bindings { - put: "/v1/{template.name=projects/*/regions/*/workflowTemplates/*}" - body: "template" - } - }; - option (google.api.method_signature) = "template"; - } - - // Lists workflows that match the specified filter in the request. - rpc ListWorkflowTemplates(ListWorkflowTemplatesRequest) returns (ListWorkflowTemplatesResponse) { - option (google.api.http) = { - get: "/v1/{parent=projects/*/locations/*}/workflowTemplates" - additional_bindings { - get: "/v1/{parent=projects/*/regions/*}/workflowTemplates" - } - }; - option (google.api.method_signature) = "parent"; - } - - // Deletes a workflow template. It does not cancel in-progress workflows. - rpc DeleteWorkflowTemplate(DeleteWorkflowTemplateRequest) returns (google.protobuf.Empty) { - option (google.api.http) = { - delete: "/v1/{name=projects/*/locations/*/workflowTemplates/*}" - additional_bindings { - delete: "/v1/{name=projects/*/regions/*/workflowTemplates/*}" - } - }; - option (google.api.method_signature) = "name"; - } -} - -// A Dataproc workflow template resource. -message WorkflowTemplate { - option (google.api.resource) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - pattern: "projects/{project}/regions/{region}/workflowTemplates/{workflow_template}" - pattern: "projects/{project}/locations/{location}/workflowTemplates/{workflow_template}" - history: ORIGINALLY_SINGLE_PATTERN - }; - - string id = 2 [(google.api.field_behavior) = REQUIRED]; - - // Output only. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. Used to perform a consistent read-modify-write. - // - // This field should be left blank for a `CreateWorkflowTemplate` request. It - // is required for an `UpdateWorkflowTemplate` request, and must match the - // current server version. A typical update template flow would fetch the - // current template with a `GetWorkflowTemplate` request, which will return - // the current template with the `version` field filled in with the - // current server version. The user updates other fields in the template, - // then returns it as part of the `UpdateWorkflowTemplate` request. - int32 version = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Output only. The time template was created. - google.protobuf.Timestamp create_time = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The time template was last updated. - google.protobuf.Timestamp update_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Optional. The labels to associate with this template. These labels - // will be propagated to all jobs and clusters created by the workflow - // instance. - // - // Label **keys** must contain 1 to 63 characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // - // Label **values** may be empty, but, if present, must contain 1 to 63 - // characters, and must conform to - // [RFC 1035](https://www.ietf.org/rfc/rfc1035.txt). - // - // No more than 32 labels can be associated with a template. - map labels = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Required. WorkflowTemplate scheduling information. - WorkflowTemplatePlacement placement = 7 [(google.api.field_behavior) = REQUIRED]; - - // Required. The Directed Acyclic Graph of Jobs to submit. - repeated OrderedJob jobs = 8 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Template parameters whose values are substituted into the - // template. Values for parameters must be provided when the template is - // instantiated. - repeated TemplateParameter parameters = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Timeout duration for the DAG of jobs, expressed in seconds (see - // [JSON representation of - // duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - // The timeout duration must be from 10 minutes ("600s") to 24 hours - // ("86400s"). The timer begins when the first job is submitted. If the - // workflow is running at the end of the timeout period, any remaining jobs - // are cancelled, the workflow is ended, and if the workflow was running on a - // [managed - // cluster](/dataproc/docs/concepts/workflows/using-workflows#configuring_or_selecting_a_cluster), - // the cluster is deleted. - google.protobuf.Duration dag_timeout = 10 [(google.api.field_behavior) = OPTIONAL]; -} - -// Specifies workflow execution target. -// -// Either `managed_cluster` or `cluster_selector` is required. -message WorkflowTemplatePlacement { - // Required. Specifies where workflow executes; either on a managed - // cluster or an existing cluster chosen by labels. - oneof placement { - // A cluster that is managed by the workflow. - ManagedCluster managed_cluster = 1; - - // Optional. A selector that chooses target cluster for jobs based - // on metadata. - // - // The selector is evaluated at the time each job is submitted. - ClusterSelector cluster_selector = 2; - } -} - -// Cluster that is managed by the workflow. -message ManagedCluster { - // Required. The cluster name prefix. A unique cluster name will be formed by - // appending a random suffix. - // - // The name must contain only lower-case letters (a-z), numbers (0-9), - // and hyphens (-). Must begin with a letter. Cannot begin or end with - // hyphen. Must consist of between 2 and 35 characters. - string cluster_name = 2 [(google.api.field_behavior) = REQUIRED]; - - // Required. The cluster configuration. - ClusterConfig config = 3 [(google.api.field_behavior) = REQUIRED]; - - // Optional. The labels to associate with this cluster. - // - // Label keys must be between 1 and 63 characters long, and must conform to - // the following PCRE regular expression: - // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - // - // Label values must be between 1 and 63 characters long, and must conform to - // the following PCRE regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} - // - // No more than 32 labels can be associated with a given cluster. - map labels = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// A selector that chooses target cluster for jobs based on metadata. -message ClusterSelector { - // Optional. The zone where workflow process executes. This parameter does not - // affect the selection of the cluster. - // - // If unspecified, the zone of the first cluster matching the selector - // is used. - string zone = 1 [(google.api.field_behavior) = OPTIONAL]; - - // Required. The cluster labels. Cluster must have all labels - // to match. - map cluster_labels = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A job executed by the workflow. -message OrderedJob { - // Required. The step id. The id must be unique among all jobs - // within the template. - // - // The step id is used as prefix for job id, as job - // `goog-dataproc-workflow-step-id` label, and in - // [prerequisiteStepIds][google.cloud.dataproc.v1.OrderedJob.prerequisite_step_ids] field from other - // steps. - // - // The id must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). Cannot begin or end with underscore - // or hyphen. Must consist of between 3 and 50 characters. - string step_id = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. The job definition. - oneof job_type { - // Optional. Job is a Hadoop job. - HadoopJob hadoop_job = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Spark job. - SparkJob spark_job = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a PySpark job. - PySparkJob pyspark_job = 4 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Hive job. - HiveJob hive_job = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Pig job. - PigJob pig_job = 6 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a SparkR job. - SparkRJob spark_r_job = 11 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a SparkSql job. - SparkSqlJob spark_sql_job = 7 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job is a Presto job. - PrestoJob presto_job = 12 [(google.api.field_behavior) = OPTIONAL]; - } - - // Optional. The labels to associate with this job. - // - // Label keys must be between 1 and 63 characters long, and must conform to - // the following regular expression: - // [\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62} - // - // Label values must be between 1 and 63 characters long, and must conform to - // the following regular expression: [\p{Ll}\p{Lo}\p{N}_-]{0,63} - // - // No more than 32 labels can be associated with a given job. - map labels = 8 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Job scheduling configuration. - JobScheduling scheduling = 9 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The optional list of prerequisite job step_ids. - // If not specified, the job will start at the beginning of workflow. - repeated string prerequisite_step_ids = 10 [(google.api.field_behavior) = OPTIONAL]; -} - -// A configurable parameter that replaces one or more fields in the template. -// Parameterizable fields: -// - Labels -// - File uris -// - Job properties -// - Job arguments -// - Script variables -// - Main class (in HadoopJob and SparkJob) -// - Zone (in ClusterSelector) -message TemplateParameter { - // Required. Parameter name. - // The parameter name is used as the key, and paired with the - // parameter value, which are passed to the template when the template - // is instantiated. - // The name must contain only capital letters (A-Z), numbers (0-9), and - // underscores (_), and must not start with a number. The maximum length is - // 40 characters. - string name = 1 [(google.api.field_behavior) = REQUIRED]; - - // Required. Paths to all fields that the parameter replaces. - // A field is allowed to appear in at most one parameter's list of field - // paths. - // - // A field path is similar in syntax to a [google.protobuf.FieldMask][google.protobuf.FieldMask]. - // For example, a field path that references the zone field of a workflow - // template's cluster selector would be specified as - // `placement.clusterSelector.zone`. - // - // Also, field paths can reference fields using the following syntax: - // - // * Values in maps can be referenced by key: - // * labels['key'] - // * placement.clusterSelector.clusterLabels['key'] - // * placement.managedCluster.labels['key'] - // * placement.clusterSelector.clusterLabels['key'] - // * jobs['step-id'].labels['key'] - // - // * Jobs in the jobs list can be referenced by step-id: - // * jobs['step-id'].hadoopJob.mainJarFileUri - // * jobs['step-id'].hiveJob.queryFileUri - // * jobs['step-id'].pySparkJob.mainPythonFileUri - // * jobs['step-id'].hadoopJob.jarFileUris[0] - // * jobs['step-id'].hadoopJob.archiveUris[0] - // * jobs['step-id'].hadoopJob.fileUris[0] - // * jobs['step-id'].pySparkJob.pythonFileUris[0] - // - // * Items in repeated fields can be referenced by a zero-based index: - // * jobs['step-id'].sparkJob.args[0] - // - // * Other examples: - // * jobs['step-id'].hadoopJob.properties['key'] - // * jobs['step-id'].hadoopJob.args[0] - // * jobs['step-id'].hiveJob.scriptVariables['key'] - // * jobs['step-id'].hadoopJob.mainJarFileUri - // * placement.clusterSelector.zone - // - // It may not be possible to parameterize maps and repeated fields in their - // entirety since only individual map values and individual items in repeated - // fields can be referenced. For example, the following field paths are - // invalid: - // - // - placement.clusterSelector.clusterLabels - // - jobs['step-id'].sparkJob.args - repeated string fields = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. Brief description of the parameter. - // Must not exceed 1024 characters. - string description = 3 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Validation rules to be applied to this parameter's value. - ParameterValidation validation = 4 [(google.api.field_behavior) = OPTIONAL]; -} - -// Configuration for parameter validation. -message ParameterValidation { - // Required. The type of validation to be performed. - oneof validation_type { - // Validation based on regular expressions. - RegexValidation regex = 1; - - // Validation based on a list of allowed values. - ValueValidation values = 2; - } -} - -// Validation based on regular expressions. -message RegexValidation { - // Required. RE2 regular expressions used to validate the parameter's value. - // The value must match the regex in its entirety (substring - // matches are not sufficient). - repeated string regexes = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// Validation based on a list of allowed values. -message ValueValidation { - // Required. List of allowed values for the parameter. - repeated string values = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A Dataproc workflow template resource. -message WorkflowMetadata { - // The operation state. - enum State { - // Unused. - UNKNOWN = 0; - - // The operation has been created. - PENDING = 1; - - // The operation is running. - RUNNING = 2; - - // The operation is done; either cancelled or completed. - DONE = 3; - } - - // Output only. The resource name of the workflow template as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string template = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The version of template at the time of - // workflow instantiation. - int32 version = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The create cluster operation metadata. - ClusterOperation create_cluster = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The workflow graph. - WorkflowGraph graph = 4 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The delete cluster operation metadata. - ClusterOperation delete_cluster = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The workflow state. - State state = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The name of the target cluster. - string cluster_name = 7 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Map from parameter names to values that were used for those parameters. - map parameters = 8; - - // Output only. Workflow start time. - google.protobuf.Timestamp start_time = 9 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Workflow end time. - google.protobuf.Timestamp end_time = 10 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The UUID of target cluster. - string cluster_uuid = 11 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The timeout duration for the DAG of jobs, expressed in seconds (see - // [JSON representation of - // duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - google.protobuf.Duration dag_timeout = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. DAG start time, only set for workflows with [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] when DAG - // begins. - google.protobuf.Timestamp dag_start_time = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. DAG end time, only set for workflows with [dag_timeout][google.cloud.dataproc.v1.WorkflowMetadata.dag_timeout] when DAG ends. - google.protobuf.Timestamp dag_end_time = 14 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The cluster operation triggered by a workflow. -message ClusterOperation { - // Output only. The id of the cluster operation. - string operation_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Error, if operation failed. - string error = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Indicates the operation is done. - bool done = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The workflow graph. -message WorkflowGraph { - // Output only. The workflow nodes. - repeated WorkflowNode nodes = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// The workflow node. -message WorkflowNode { - // The workflow node state. - enum NodeState { - // State is unspecified. - NODE_STATE_UNSPECIFIED = 0; - - // The node is awaiting prerequisite node to finish. - BLOCKED = 1; - - // The node is runnable but not running. - RUNNABLE = 2; - - // The node is running. - RUNNING = 3; - - // The node completed successfully. - COMPLETED = 4; - - // The node failed. A node can be marked FAILED because - // its ancestor or peer failed. - FAILED = 5; - } - - // Output only. The name of the node. - string step_id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. Node's prerequisite nodes. - repeated string prerequisite_step_ids = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The job id; populated after the node enters RUNNING state. - string job_id = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The node state. - NodeState state = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. The error detail. - string error = 6 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to create a workflow template. -message CreateWorkflowTemplateRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.create`, the resource name of the - // region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.create`, the resource name of - // the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Required. The Dataproc workflow template to create. - WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to fetch a workflow template. -message GetWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.get`, the resource name of the - // template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.get`, the resource name of the - // template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to retrieve. Only previously - // instantiated versions can be retrieved. - // - // If unspecified, retrieves the current version. - int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to instantiate a workflow template. -message InstantiateWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to instantiate. If specified, - // the workflow will be instantiated only if the current version of - // the workflow template has the supplied version. - // - // This option cannot be used to instantiate a previous version of - // workflow template. - int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. A tag that prevents multiple concurrent workflow - // instances with the same tag from running. This mitigates risk of - // concurrent instances started due to retries. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The tag must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 5 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. Map from parameter names to values that should be used for those - // parameters. Values may not exceed 1000 characters. - map parameters = 6 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to instantiate an inline workflow template. -message InstantiateInlineWorkflowTemplateRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,instantiateinline`, the resource - // name of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.instantiateinline`, the - // resource name of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Required. The workflow template to instantiate. - WorkflowTemplate template = 2 [(google.api.field_behavior) = REQUIRED]; - - // Optional. A tag that prevents multiple concurrent workflow - // instances with the same tag from running. This mitigates risk of - // concurrent instances started due to retries. - // - // It is recommended to always set this value to a - // [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - // - // The tag must contain only letters (a-z, A-Z), numbers (0-9), - // underscores (_), and hyphens (-). The maximum length is 40 characters. - string request_id = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A request to update a workflow template. -message UpdateWorkflowTemplateRequest { - // Required. The updated workflow template. - // - // The `template.version` field must match the current version. - WorkflowTemplate template = 1 [(google.api.field_behavior) = REQUIRED]; -} - -// A request to list workflow templates in a project. -message ListWorkflowTemplatesRequest { - // Required. The resource name of the region or location, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates,list`, the resource - // name of the region has the following format: - // `projects/{project_id}/regions/{region}` - // - // * For `projects.locations.workflowTemplates.list`, the - // resource name of the location has the following format: - // `projects/{project_id}/locations/{location}` - string parent = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - child_type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The maximum number of results to return in each response. - int32 page_size = 2 [(google.api.field_behavior) = OPTIONAL]; - - // Optional. The page token, returned by a previous call, to request the - // next page of results. - string page_token = 3 [(google.api.field_behavior) = OPTIONAL]; -} - -// A response to a request to list workflow templates in a project. -message ListWorkflowTemplatesResponse { - // Output only. WorkflowTemplates list. - repeated WorkflowTemplate templates = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; - - // Output only. This token is included in the response if there are more - // results to fetch. To fetch additional results, provide this value as the - // page_token in a subsequent ListWorkflowTemplatesRequest. - string next_page_token = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; -} - -// A request to delete a workflow template. -// -// Currently started workflows will remain running. -message DeleteWorkflowTemplateRequest { - // Required. The resource name of the workflow template, as described - // in https://cloud.google.com/apis/design/resource_names. - // - // * For `projects.regions.workflowTemplates.delete`, the resource name - // of the template has the following format: - // `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - // - // * For `projects.locations.workflowTemplates.instantiate`, the resource name - // of the template has the following format: - // `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - string name = 1 [ - (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { - type: "dataproc.googleapis.com/WorkflowTemplate" - } - ]; - - // Optional. The version of workflow template to delete. If specified, - // will only delete the template if the current server version matches - // specified version. - int32 version = 2 [(google.api.field_behavior) = OPTIONAL]; -} diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js deleted file mode 100644 index 0d0ad0fc..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, policy) { - // [START dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The "resource name" of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.autoscalingPolicies.create`, the resource name - * of the region has the following format: - * `projects/{project_id}/regions/{region}` - * * For `projects.locations.autoscalingPolicies.create`, the resource name - * of the location has the following format: - * `projects/{project_id}/locations/{location}` - */ - // const parent = 'abc123' - /** - * Required. The autoscaling policy to create. - */ - // const policy = {} - - // Imports the Dataproc library - const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new AutoscalingPolicyServiceClient(); - - async function callCreateAutoscalingPolicy() { - // Construct request - const request = { - parent, - policy, - }; - - // Run request - const response = await dataprocClient.createAutoscalingPolicy(request); - console.log(response); - } - - callCreateAutoscalingPolicy(); - // [END dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js deleted file mode 100644 index 86e57f8b..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The "resource name" of the autoscaling policy, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.autoscalingPolicies.delete`, the resource name - * of the policy has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - * * For `projects.locations.autoscalingPolicies.delete`, the resource name - * of the policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - */ - // const name = 'abc123' - - // Imports the Dataproc library - const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new AutoscalingPolicyServiceClient(); - - async function callDeleteAutoscalingPolicy() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await dataprocClient.deleteAutoscalingPolicy(request); - console.log(response); - } - - callDeleteAutoscalingPolicy(); - // [END dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js deleted file mode 100644 index 76a64c9e..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js +++ /dev/null @@ -1,65 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The "resource name" of the autoscaling policy, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.autoscalingPolicies.get`, the resource name - * of the policy has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - * * For `projects.locations.autoscalingPolicies.get`, the resource name - * of the policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - */ - // const name = 'abc123' - - // Imports the Dataproc library - const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new AutoscalingPolicyServiceClient(); - - async function callGetAutoscalingPolicy() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await dataprocClient.getAutoscalingPolicy(request); - console.log(response); - } - - callGetAutoscalingPolicy(); - // [END dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js deleted file mode 100644 index 991caee2..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js +++ /dev/null @@ -1,77 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The "resource name" of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.autoscalingPolicies.list`, the resource name - * of the region has the following format: - * `projects/{project_id}/regions/{region}` - * * For `projects.locations.autoscalingPolicies.list`, the resource name - * of the location has the following format: - * `projects/{project_id}/locations/{location}` - */ - // const parent = 'abc123' - /** - * Optional. The maximum number of results to return in each response. - * Must be less than or equal to 1000. Defaults to 100. - */ - // const pageSize = 1234 - /** - * Optional. The page token, returned by a previous call, to request the - * next page of results. - */ - // const pageToken = 'abc123' - - // Imports the Dataproc library - const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new AutoscalingPolicyServiceClient(); - - async function callListAutoscalingPolicies() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await dataprocClient.listAutoscalingPoliciesAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListAutoscalingPolicies(); - // [END dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js b/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js deleted file mode 100644 index 95b50b8c..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(policy) { - // [START dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The updated autoscaling policy. - */ - // const policy = {} - - // Imports the Dataproc library - const {AutoscalingPolicyServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new AutoscalingPolicyServiceClient(); - - async function callUpdateAutoscalingPolicy() { - // Construct request - const request = { - policy, - }; - - // Run request - const response = await dataprocClient.updateAutoscalingPolicy(request); - console.log(response); - } - - callUpdateAutoscalingPolicy(); - // [END dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js deleted file mode 100644 index 398e78f6..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/batch_controller.create_batch.js +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, batch) { - // [START dataproc_v1_generated_BatchController_CreateBatch_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The parent resource where this batch will be created. - */ - // const parent = 'abc123' - /** - * Required. The batch to create. - */ - // const batch = {} - /** - * Optional. The ID to use for the batch, which will become the final component of - * the batch's resource name. - * This value must be 4-63 characters. Valid characters are `/[a-z][0-9]-/`. - */ - // const batchId = 'abc123' - /** - * Optional. A unique ID used to identify the request. If the service - * receives two - * CreateBatchRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s - * with the same request_id, the second request is ignored and the - * Operation that corresponds to the first Batch created and stored - * in the backend is returned. - * Recommendation: Set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The value must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {BatchControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new BatchControllerClient(); - - async function callCreateBatch() { - // Construct request - const request = { - parent, - batch, - }; - - // Run request - const [operation] = await dataprocClient.createBatch(request); - const [response] = await operation.promise(); - console.log(response); - } - - callCreateBatch(); - // [END dataproc_v1_generated_BatchController_CreateBatch_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js deleted file mode 100644 index 66c9596e..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/batch_controller.delete_batch.js +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START dataproc_v1_generated_BatchController_DeleteBatch_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the batch resource to delete. - */ - // const name = 'abc123' - - // Imports the Dataproc library - const {BatchControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new BatchControllerClient(); - - async function callDeleteBatch() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await dataprocClient.deleteBatch(request); - console.log(response); - } - - callDeleteBatch(); - // [END dataproc_v1_generated_BatchController_DeleteBatch_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js deleted file mode 100644 index 2cb9bb34..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/batch_controller.get_batch.js +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START dataproc_v1_generated_BatchController_GetBatch_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The name of the batch to retrieve. - */ - // const name = 'abc123' - - // Imports the Dataproc library - const {BatchControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new BatchControllerClient(); - - async function callGetBatch() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await dataprocClient.getBatch(request); - console.log(response); - } - - callGetBatch(); - // [END dataproc_v1_generated_BatchController_GetBatch_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js b/owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js deleted file mode 100644 index 4170a038..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/batch_controller.list_batches.js +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START dataproc_v1_generated_BatchController_ListBatches_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The parent, which owns this collection of batches. - */ - // const parent = 'abc123' - /** - * Optional. The maximum number of batches to return in each response. - * The service may return fewer than this value. - * The default page size is 20; the maximum page size is 1000. - */ - // const pageSize = 1234 - /** - * Optional. A page token received from a previous `ListBatches` call. - * Provide this token to retrieve the subsequent page. - */ - // const pageToken = 'abc123' - - // Imports the Dataproc library - const {BatchControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new BatchControllerClient(); - - async function callListBatches() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await dataprocClient.listBatchesAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListBatches(); - // [END dataproc_v1_generated_BatchController_ListBatches_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js deleted file mode 100644 index d0623a49..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.create_cluster.js +++ /dev/null @@ -1,86 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, cluster) { - // [START dataproc_v1_generated_ClusterController_CreateCluster_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The cluster to create. - */ - // const cluster = {} - /** - * Optional. A unique ID used to identify the request. If the server receives two - * CreateClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s - * with the same id, then the second request will be ignored and the - * first google.longrunning.Operation google.longrunning.Operation created and stored in the backend - * is returned. - * It is recommended to always set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - /** - * Optional. Failure action when primary worker creation fails. - */ - // const actionOnFailedPrimaryWorkers = {} - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callCreateCluster() { - // Construct request - const request = { - projectId, - region, - cluster, - }; - - // Run request - const [operation] = await dataprocClient.createCluster(request); - const [response] = await operation.promise(); - console.log(response); - } - - callCreateCluster(); - // [END dataproc_v1_generated_ClusterController_CreateCluster_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js deleted file mode 100644 index a832f7d9..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.delete_cluster.js +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, clusterName) { - // [START dataproc_v1_generated_ClusterController_DeleteCluster_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The cluster name. - */ - // const clusterName = 'abc123' - /** - * Optional. Specifying the `cluster_uuid` means the RPC should fail - * (with error NOT_FOUND) if cluster with specified UUID does not exist. - */ - // const clusterUuid = 'abc123' - /** - * Optional. A unique ID used to identify the request. If the server - * receives two - * DeleteClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s - * with the same id, then the second request will be ignored and the - * first google.longrunning.Operation google.longrunning.Operation created and stored in the - * backend is returned. - * It is recommended to always set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callDeleteCluster() { - // Construct request - const request = { - projectId, - region, - clusterName, - }; - - // Run request - const [operation] = await dataprocClient.deleteCluster(request); - const [response] = await operation.promise(); - console.log(response); - } - - callDeleteCluster(); - // [END dataproc_v1_generated_ClusterController_DeleteCluster_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js deleted file mode 100644 index 11c39954..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.diagnose_cluster.js +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, clusterName) { - // [START dataproc_v1_generated_ClusterController_DiagnoseCluster_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The cluster name. - */ - // const clusterName = 'abc123' - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callDiagnoseCluster() { - // Construct request - const request = { - projectId, - region, - clusterName, - }; - - // Run request - const [operation] = await dataprocClient.diagnoseCluster(request); - const [response] = await operation.promise(); - console.log(response); - } - - callDiagnoseCluster(); - // [END dataproc_v1_generated_ClusterController_DiagnoseCluster_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js deleted file mode 100644 index b6886881..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.get_cluster.js +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, clusterName) { - // [START dataproc_v1_generated_ClusterController_GetCluster_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The cluster name. - */ - // const clusterName = 'abc123' - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callGetCluster() { - // Construct request - const request = { - projectId, - region, - clusterName, - }; - - // Run request - const response = await dataprocClient.getCluster(request); - console.log(response); - } - - callGetCluster(); - // [END dataproc_v1_generated_ClusterController_GetCluster_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js deleted file mode 100644 index 00e7a70a..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.list_clusters.js +++ /dev/null @@ -1,92 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region) { - // [START dataproc_v1_generated_ClusterController_ListClusters_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Optional. A filter constraining the clusters to list. Filters are - * case-sensitive and have the following syntax: - * field = value AND field = value ... - * where **field** is one of `status.state`, `clusterName`, or `labels.KEY`, - * and `[KEY]` is a label key. **value** can be `*` to match all values. - * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, - * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` - * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` - * contains the `DELETING` and `ERROR` states. - * `clusterName` is the name of the cluster provided at creation time. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * Example filter: - * status.state = ACTIVE AND clusterName = mycluster - * AND labels.env = staging AND labels.starred = * - */ - // const filter = 'abc123' - /** - * Optional. The standard List page size. - */ - // const pageSize = 1234 - /** - * Optional. The standard List page token. - */ - // const pageToken = 'abc123' - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callListClusters() { - // Construct request - const request = { - projectId, - region, - }; - - // Run request - const iterable = await dataprocClient.listClustersAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListClusters(); - // [END dataproc_v1_generated_ClusterController_ListClusters_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js deleted file mode 100644 index 9da8f138..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.start_cluster.js +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, clusterName) { - // [START dataproc_v1_generated_ClusterController_StartCluster_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project the - * cluster belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The cluster name. - */ - // const clusterName = 'abc123' - /** - * Optional. Specifying the `cluster_uuid` means the RPC will fail - * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. - */ - // const clusterUuid = 'abc123' - /** - * Optional. A unique ID used to identify the request. If the server - * receives two - * StartClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s - * with the same id, then the second request will be ignored and the - * first google.longrunning.Operation google.longrunning.Operation created and stored in the - * backend is returned. - * Recommendation: Set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callStartCluster() { - // Construct request - const request = { - projectId, - region, - clusterName, - }; - - // Run request - const [operation] = await dataprocClient.startCluster(request); - const [response] = await operation.promise(); - console.log(response); - } - - callStartCluster(); - // [END dataproc_v1_generated_ClusterController_StartCluster_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js deleted file mode 100644 index 5fe7e713..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.stop_cluster.js +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, clusterName) { - // [START dataproc_v1_generated_ClusterController_StopCluster_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project the - * cluster belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The cluster name. - */ - // const clusterName = 'abc123' - /** - * Optional. Specifying the `cluster_uuid` means the RPC will fail - * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. - */ - // const clusterUuid = 'abc123' - /** - * Optional. A unique ID used to identify the request. If the server - * receives two - * StopClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s - * with the same id, then the second request will be ignored and the - * first google.longrunning.Operation google.longrunning.Operation created and stored in the - * backend is returned. - * Recommendation: Set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callStopCluster() { - // Construct request - const request = { - projectId, - region, - clusterName, - }; - - // Run request - const [operation] = await dataprocClient.stopCluster(request); - const [response] = await operation.promise(); - console.log(response); - } - - callStopCluster(); - // [END dataproc_v1_generated_ClusterController_StopCluster_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js b/owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js deleted file mode 100644 index 89efb902..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/cluster_controller.update_cluster.js +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, clusterName, cluster, updateMask) { - // [START dataproc_v1_generated_ClusterController_UpdateCluster_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project the - * cluster belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The cluster name. - */ - // const clusterName = 'abc123' - /** - * Required. The changes to the cluster. - */ - // const cluster = {} - /** - * Optional. Timeout for graceful YARN decomissioning. Graceful - * decommissioning allows removing nodes from the cluster without - * interrupting jobs in progress. Timeout specifies how long to wait for jobs - * in progress to finish before forcefully removing nodes (and potentially - * interrupting jobs). Default timeout is 0 (for forceful decommission), and - * the maximum allowed timeout is 1 day. (see JSON representation of - * Duration (https://developers.google.com/protocol-buffers/docs/proto3#json)). - * Only supported on Dataproc image versions 1.2 and higher. - */ - // const gracefulDecommissionTimeout = {} - /** - * Required. Specifies the path, relative to `Cluster`, of - * the field to update. For example, to change the number of workers - * in a cluster to 5, the `update_mask` parameter would be - * specified as `config.worker_config.num_instances`, - * and the `PATCH` request body would specify the new value, as follows: - * { - * "config":{ - * "workerConfig":{ - * "numInstances":"5" - * } - * } - * } - * Similarly, to change the number of preemptible workers in a cluster to 5, - * the `update_mask` parameter would be - * `config.secondary_worker_config.num_instances`, and the `PATCH` request - * body would be set as follows: - * { - * "config":{ - * "secondaryWorkerConfig":{ - * "numInstances":"5" - * } - * } - * } - * Note: Currently, only the following fields can be updated: - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - * change autoscaling policies
- */ - // const updateMask = {} - /** - * Optional. A unique ID used to identify the request. If the server - * receives two - * UpdateClusterRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s - * with the same id, then the second request will be ignored and the - * first google.longrunning.Operation google.longrunning.Operation created and stored in the - * backend is returned. - * It is recommended to always set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {ClusterControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new ClusterControllerClient(); - - async function callUpdateCluster() { - // Construct request - const request = { - projectId, - region, - clusterName, - cluster, - updateMask, - }; - - // Run request - const [operation] = await dataprocClient.updateCluster(request); - const [response] = await operation.promise(); - console.log(response); - } - - callUpdateCluster(); - // [END dataproc_v1_generated_ClusterController_UpdateCluster_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js deleted file mode 100644 index a51f789c..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/job_controller.cancel_job.js +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, jobId) { - // [START dataproc_v1_generated_JobController_CancelJob_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The job ID. - */ - // const jobId = 'abc123' - - // Imports the Dataproc library - const {JobControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new JobControllerClient(); - - async function callCancelJob() { - // Construct request - const request = { - projectId, - region, - jobId, - }; - - // Run request - const response = await dataprocClient.cancelJob(request); - console.log(response); - } - - callCancelJob(); - // [END dataproc_v1_generated_JobController_CancelJob_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js deleted file mode 100644 index 4f794f47..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/job_controller.delete_job.js +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, jobId) { - // [START dataproc_v1_generated_JobController_DeleteJob_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The job ID. - */ - // const jobId = 'abc123' - - // Imports the Dataproc library - const {JobControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new JobControllerClient(); - - async function callDeleteJob() { - // Construct request - const request = { - projectId, - region, - jobId, - }; - - // Run request - const response = await dataprocClient.deleteJob(request); - console.log(response); - } - - callDeleteJob(); - // [END dataproc_v1_generated_JobController_DeleteJob_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js deleted file mode 100644 index 20135e2a..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/job_controller.get_job.js +++ /dev/null @@ -1,69 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, jobId) { - // [START dataproc_v1_generated_JobController_GetJob_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The job ID. - */ - // const jobId = 'abc123' - - // Imports the Dataproc library - const {JobControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new JobControllerClient(); - - async function callGetJob() { - // Construct request - const request = { - projectId, - region, - jobId, - }; - - // Run request - const response = await dataprocClient.getJob(request); - console.log(response); - } - - callGetJob(); - // [END dataproc_v1_generated_JobController_GetJob_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js deleted file mode 100644 index fd0ddc7b..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/job_controller.list_jobs.js +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region) { - // [START dataproc_v1_generated_JobController_ListJobs_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Optional. The number of results to return in each response. - */ - // const pageSize = 1234 - /** - * Optional. The page token, returned by a previous call, to request the - * next page of results. - */ - // const pageToken = 'abc123' - /** - * Optional. If set, the returned jobs list includes only jobs that were - * submitted to the named cluster. - */ - // const clusterName = 'abc123' - /** - * Optional. Specifies enumerated categories of jobs to list. - * (default = match ALL jobs). - * If `filter` is provided, `jobStateMatcher` will be ignored. - */ - // const jobStateMatcher = {} - /** - * Optional. A filter constraining the jobs to list. Filters are - * case-sensitive and have the following syntax: - * field = value AND field = value ... - * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label - * key. **value** can be `*` to match all values. - * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * Example filter: - * status.state = ACTIVE AND labels.env = staging AND labels.starred = * - */ - // const filter = 'abc123' - - // Imports the Dataproc library - const {JobControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new JobControllerClient(); - - async function callListJobs() { - // Construct request - const request = { - projectId, - region, - }; - - // Run request - const iterable = await dataprocClient.listJobsAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListJobs(); - // [END dataproc_v1_generated_JobController_ListJobs_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js deleted file mode 100644 index bc03f366..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job.js +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, job) { - // [START dataproc_v1_generated_JobController_SubmitJob_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The job resource. - */ - // const job = {} - /** - * Optional. A unique id used to identify the request. If the server - * receives two - * SubmitJobRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s - * with the same id, then the second request will be ignored and the - * first Job google.cloud.dataproc.v1.Job created and stored in the backend - * is returned. - * It is recommended to always set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The id must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {JobControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new JobControllerClient(); - - async function callSubmitJob() { - // Construct request - const request = { - projectId, - region, - job, - }; - - // Run request - const response = await dataprocClient.submitJob(request); - console.log(response); - } - - callSubmitJob(); - // [END dataproc_v1_generated_JobController_SubmitJob_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js deleted file mode 100644 index 8f613395..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/job_controller.submit_job_as_operation.js +++ /dev/null @@ -1,83 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, job) { - // [START dataproc_v1_generated_JobController_SubmitJobAsOperation_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The job resource. - */ - // const job = {} - /** - * Optional. A unique id used to identify the request. If the server - * receives two - * SubmitJobRequest (https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s - * with the same id, then the second request will be ignored and the - * first Job google.cloud.dataproc.v1.Job created and stored in the backend - * is returned. - * It is recommended to always set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The id must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {JobControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new JobControllerClient(); - - async function callSubmitJobAsOperation() { - // Construct request - const request = { - projectId, - region, - job, - }; - - // Run request - const [operation] = await dataprocClient.submitJobAsOperation(request); - const [response] = await operation.promise(); - console.log(response); - } - - callSubmitJobAsOperation(); - // [END dataproc_v1_generated_JobController_SubmitJobAsOperation_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js b/owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js deleted file mode 100644 index 0ba0c750..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/job_controller.update_job.js +++ /dev/null @@ -1,84 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(projectId, region, jobId, job, updateMask) { - // [START dataproc_v1_generated_JobController_UpdateJob_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - */ - // const projectId = 'abc123' - /** - * Required. The Dataproc region in which to handle the request. - */ - // const region = 'us-central1' - /** - * Required. The job ID. - */ - // const jobId = 'abc123' - /** - * Required. The changes to the job. - */ - // const job = {} - /** - * Required. Specifies the path, relative to Job, of - * the field to update. For example, to update the labels of a Job the - * update_mask parameter would be specified as - * labels, and the `PATCH` request body would specify the new - * value. Note: Currently, labels is the only - * field that can be updated. - */ - // const updateMask = {} - - // Imports the Dataproc library - const {JobControllerClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new JobControllerClient(); - - async function callUpdateJob() { - // Construct request - const request = { - projectId, - region, - jobId, - job, - updateMask, - }; - - // Run request - const response = await dataprocClient.updateJob(request); - console.log(response); - } - - callUpdateJob(); - // [END dataproc_v1_generated_JobController_UpdateJob_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json b/owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json deleted file mode 100644 index f5e99f6f..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/snippet_metadata.google.cloud.dataproc.v1.json +++ /dev/null @@ -1,1535 +0,0 @@ -{ - "clientLibrary": { - "name": "nodejs-dataproc", - "version": "0.1.0", - "language": "TYPESCRIPT", - "apis": [ - { - "id": "google.cloud.dataproc.v1", - "version": "v1" - } - ] - }, - "snippets": [ - { - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async", - "title": "dataproc createAutoscalingPolicy Sample", - "origin": "API_DEFINITION", - "description": " Creates new autoscaling policy.", - "canonical": true, - "file": "autoscaling_policy_service.create_autoscaling_policy.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 62, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "policy", - "type": ".google.cloud.dataproc.v1.AutoscalingPolicy" - } - ], - "resultType": ".google.cloud.dataproc.v1.AutoscalingPolicy", - "client": { - "shortName": "AutoscalingPolicyServiceClient", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" - }, - "method": { - "shortName": "CreateAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.CreateAutoscalingPolicy", - "service": { - "shortName": "AutoscalingPolicyService", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async", - "title": "dataproc updateAutoscalingPolicy Sample", - "origin": "API_DEFINITION", - "description": " Updates (replaces) autoscaling policy. Disabled check for update_mask, because all updates will be full replacements.", - "canonical": true, - "file": "autoscaling_policy_service.update_autoscaling_policy.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 50, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "UpdateAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", - "async": true, - "parameters": [ - { - "name": "policy", - "type": ".google.cloud.dataproc.v1.AutoscalingPolicy" - } - ], - "resultType": ".google.cloud.dataproc.v1.AutoscalingPolicy", - "client": { - "shortName": "AutoscalingPolicyServiceClient", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" - }, - "method": { - "shortName": "UpdateAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.UpdateAutoscalingPolicy", - "service": { - "shortName": "AutoscalingPolicyService", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async", - "title": "dataproc getAutoscalingPolicy Sample", - "origin": "API_DEFINITION", - "description": " Retrieves autoscaling policy.", - "canonical": true, - "file": "autoscaling_policy_service.get_autoscaling_policy.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 57, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.AutoscalingPolicy", - "client": { - "shortName": "AutoscalingPolicyServiceClient", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" - }, - "method": { - "shortName": "GetAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.GetAutoscalingPolicy", - "service": { - "shortName": "AutoscalingPolicyService", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async", - "title": "dataproc listAutoscalingPolicies Sample", - "origin": "API_DEFINITION", - "description": " Lists autoscaling policies in the project.", - "canonical": true, - "file": "autoscaling_policy_service.list_autoscaling_policies.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 69, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListAutoscalingPolicies", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.ListAutoscalingPoliciesResponse", - "client": { - "shortName": "AutoscalingPolicyServiceClient", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" - }, - "method": { - "shortName": "ListAutoscalingPolicies", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.ListAutoscalingPolicies", - "service": { - "shortName": "AutoscalingPolicyService", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async", - "title": "dataproc deleteAutoscalingPolicy Sample", - "origin": "API_DEFINITION", - "description": " Deletes an autoscaling policy. It is an error to delete an autoscaling policy that is in use by one or more clusters.", - "canonical": true, - "file": "autoscaling_policy_service.delete_autoscaling_policy.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 57, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "AutoscalingPolicyServiceClient", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyServiceClient" - }, - "method": { - "shortName": "DeleteAutoscalingPolicy", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService.DeleteAutoscalingPolicy", - "service": { - "shortName": "AutoscalingPolicyService", - "fullName": "google.cloud.dataproc.v1.AutoscalingPolicyService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_BatchController_CreateBatch_async", - "title": "dataproc createBatch Sample", - "origin": "API_DEFINITION", - "description": " Creates a batch workload that executes asynchronously.", - "canonical": true, - "file": "batch_controller.create_batch.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 75, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateBatch", - "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "batch", - "type": ".google.cloud.dataproc.v1.Batch" - }, - { - "name": "batch_id", - "type": "TYPE_STRING" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "BatchControllerClient", - "fullName": "google.cloud.dataproc.v1.BatchControllerClient" - }, - "method": { - "shortName": "CreateBatch", - "fullName": "google.cloud.dataproc.v1.BatchController.CreateBatch", - "service": { - "shortName": "BatchController", - "fullName": "google.cloud.dataproc.v1.BatchController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_BatchController_GetBatch_async", - "title": "dataproc getBatch Sample", - "origin": "API_DEFINITION", - "description": " Gets the batch workload resource representation.", - "canonical": true, - "file": "batch_controller.get_batch.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 50, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetBatch", - "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.Batch", - "client": { - "shortName": "BatchControllerClient", - "fullName": "google.cloud.dataproc.v1.BatchControllerClient" - }, - "method": { - "shortName": "GetBatch", - "fullName": "google.cloud.dataproc.v1.BatchController.GetBatch", - "service": { - "shortName": "BatchController", - "fullName": "google.cloud.dataproc.v1.BatchController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_BatchController_ListBatches_async", - "title": "dataproc listBatches Sample", - "origin": "API_DEFINITION", - "description": " Lists batch workloads.", - "canonical": true, - "file": "batch_controller.list_batches.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 63, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListBatches", - "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.ListBatchesResponse", - "client": { - "shortName": "BatchControllerClient", - "fullName": "google.cloud.dataproc.v1.BatchControllerClient" - }, - "method": { - "shortName": "ListBatches", - "fullName": "google.cloud.dataproc.v1.BatchController.ListBatches", - "service": { - "shortName": "BatchController", - "fullName": "google.cloud.dataproc.v1.BatchController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_BatchController_DeleteBatch_async", - "title": "dataproc deleteBatch Sample", - "origin": "API_DEFINITION", - "description": " Deletes the batch workload resource. If the batch is not in terminal state, the delete fails and the response returns `FAILED_PRECONDITION`.", - "canonical": true, - "file": "batch_controller.delete_batch.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 50, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteBatch", - "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "BatchControllerClient", - "fullName": "google.cloud.dataproc.v1.BatchControllerClient" - }, - "method": { - "shortName": "DeleteBatch", - "fullName": "google.cloud.dataproc.v1.BatchController.DeleteBatch", - "service": { - "shortName": "BatchController", - "fullName": "google.cloud.dataproc.v1.BatchController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_CreateCluster_async", - "title": "dataproc createCluster Sample", - "origin": "API_DEFINITION", - "description": " Creates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).", - "canonical": true, - "file": "cluster_controller.create_cluster.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 78, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "cluster", - "type": ".google.cloud.dataproc.v1.Cluster" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - }, - { - "name": "action_on_failed_primary_workers", - "type": ".google.cloud.dataproc.v1.FailureAction" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "CreateCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.CreateCluster", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_UpdateCluster_async", - "title": "dataproc updateCluster Sample", - "origin": "API_DEFINITION", - "description": " Updates a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). The cluster must be in a [`RUNNING`][google.cloud.dataproc.v1.ClusterStatus.State] state or an error is returned.", - "canonical": true, - "file": "cluster_controller.update_cluster.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 143, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "UpdateCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "cluster_name", - "type": "TYPE_STRING" - }, - { - "name": "cluster", - "type": ".google.cloud.dataproc.v1.Cluster" - }, - { - "name": "graceful_decommission_timeout", - "type": ".google.protobuf.Duration" - }, - { - "name": "update_mask", - "type": ".google.protobuf.FieldMask" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "UpdateCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.UpdateCluster", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_StopCluster_async", - "title": "dataproc stopCluster Sample", - "origin": "API_DEFINITION", - "description": " Stops a cluster in a project.", - "canonical": true, - "file": "cluster_controller.stop_cluster.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 80, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StopCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "cluster_name", - "type": "TYPE_STRING" - }, - { - "name": "cluster_uuid", - "type": "TYPE_STRING" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "StopCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.StopCluster", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_StartCluster_async", - "title": "dataproc startCluster Sample", - "origin": "API_DEFINITION", - "description": " Starts a cluster in a project.", - "canonical": true, - "file": "cluster_controller.start_cluster.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 80, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "StartCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "cluster_name", - "type": "TYPE_STRING" - }, - { - "name": "cluster_uuid", - "type": "TYPE_STRING" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "StartCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.StartCluster", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_DeleteCluster_async", - "title": "dataproc deleteCluster Sample", - "origin": "API_DEFINITION", - "description": " Deletes a cluster in a project. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata).", - "canonical": true, - "file": "cluster_controller.delete_cluster.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 80, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "cluster_name", - "type": "TYPE_STRING" - }, - { - "name": "cluster_uuid", - "type": "TYPE_STRING" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "DeleteCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.DeleteCluster", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_GetCluster_async", - "title": "dataproc getCluster Sample", - "origin": "API_DEFINITION", - "description": " Gets the resource representation for a cluster in a project.", - "canonical": true, - "file": "cluster_controller.get_cluster.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 61, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "cluster_name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.Cluster", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "GetCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.GetCluster", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_ListClusters_async", - "title": "dataproc listClusters Sample", - "origin": "API_DEFINITION", - "description": " Lists all regions/{region}/clusters in a project alphabetically.", - "canonical": true, - "file": "cluster_controller.list_clusters.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 84, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListClusters", - "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "filter", - "type": "TYPE_STRING" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.ListClustersResponse", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "ListClusters", - "fullName": "google.cloud.dataproc.v1.ClusterController.ListClusters", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_ClusterController_DiagnoseCluster_async", - "title": "dataproc diagnoseCluster Sample", - "origin": "API_DEFINITION", - "description": " Gets cluster diagnostic information. The returned [Operation.metadata][google.longrunning.Operation.metadata] will be [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). After the operation completes, [Operation.response][google.longrunning.Operation.response] contains [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults).", - "canonical": true, - "file": "cluster_controller.diagnose_cluster.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 62, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DiagnoseCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "cluster_name", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "ClusterControllerClient", - "fullName": "google.cloud.dataproc.v1.ClusterControllerClient" - }, - "method": { - "shortName": "DiagnoseCluster", - "fullName": "google.cloud.dataproc.v1.ClusterController.DiagnoseCluster", - "service": { - "shortName": "ClusterController", - "fullName": "google.cloud.dataproc.v1.ClusterController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_JobController_SubmitJob_async", - "title": "dataproc submitJob Sample", - "origin": "API_DEFINITION", - "description": " Submits a job to a cluster.", - "canonical": true, - "file": "job_controller.submit_job.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 74, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "SubmitJob", - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "job", - "type": ".google.cloud.dataproc.v1.Job" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.Job", - "client": { - "shortName": "JobControllerClient", - "fullName": "google.cloud.dataproc.v1.JobControllerClient" - }, - "method": { - "shortName": "SubmitJob", - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJob", - "service": { - "shortName": "JobController", - "fullName": "google.cloud.dataproc.v1.JobController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_JobController_SubmitJobAsOperation_async", - "title": "dataproc submitJobAsOperation Sample", - "origin": "API_DEFINITION", - "description": " Submits job to a cluster.", - "canonical": true, - "file": "job_controller.submit_job_as_operation.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 75, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "SubmitJobAsOperation", - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "job", - "type": ".google.cloud.dataproc.v1.Job" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "JobControllerClient", - "fullName": "google.cloud.dataproc.v1.JobControllerClient" - }, - "method": { - "shortName": "SubmitJobAsOperation", - "fullName": "google.cloud.dataproc.v1.JobController.SubmitJobAsOperation", - "service": { - "shortName": "JobController", - "fullName": "google.cloud.dataproc.v1.JobController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_JobController_GetJob_async", - "title": "dataproc getJob Sample", - "origin": "API_DEFINITION", - "description": " Gets the resource representation for a job in a project.", - "canonical": true, - "file": "job_controller.get_job.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 61, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetJob", - "fullName": "google.cloud.dataproc.v1.JobController.GetJob", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "job_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.Job", - "client": { - "shortName": "JobControllerClient", - "fullName": "google.cloud.dataproc.v1.JobControllerClient" - }, - "method": { - "shortName": "GetJob", - "fullName": "google.cloud.dataproc.v1.JobController.GetJob", - "service": { - "shortName": "JobController", - "fullName": "google.cloud.dataproc.v1.JobController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_JobController_ListJobs_async", - "title": "dataproc listJobs Sample", - "origin": "API_DEFINITION", - "description": " Lists regions/{region}/jobs in a project.", - "canonical": true, - "file": "job_controller.list_jobs.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 91, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListJobs", - "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - }, - { - "name": "cluster_name", - "type": "TYPE_STRING" - }, - { - "name": "job_state_matcher", - "type": ".google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher" - }, - { - "name": "filter", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.ListJobsResponse", - "client": { - "shortName": "JobControllerClient", - "fullName": "google.cloud.dataproc.v1.JobControllerClient" - }, - "method": { - "shortName": "ListJobs", - "fullName": "google.cloud.dataproc.v1.JobController.ListJobs", - "service": { - "shortName": "JobController", - "fullName": "google.cloud.dataproc.v1.JobController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_JobController_UpdateJob_async", - "title": "dataproc updateJob Sample", - "origin": "API_DEFINITION", - "description": " Updates a job in a project.", - "canonical": true, - "file": "job_controller.update_job.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 76, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "UpdateJob", - "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "job_id", - "type": "TYPE_STRING" - }, - { - "name": "job", - "type": ".google.cloud.dataproc.v1.Job" - }, - { - "name": "update_mask", - "type": ".google.protobuf.FieldMask" - } - ], - "resultType": ".google.cloud.dataproc.v1.Job", - "client": { - "shortName": "JobControllerClient", - "fullName": "google.cloud.dataproc.v1.JobControllerClient" - }, - "method": { - "shortName": "UpdateJob", - "fullName": "google.cloud.dataproc.v1.JobController.UpdateJob", - "service": { - "shortName": "JobController", - "fullName": "google.cloud.dataproc.v1.JobController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_JobController_CancelJob_async", - "title": "dataproc cancelJob Sample", - "origin": "API_DEFINITION", - "description": " Starts a job cancellation request. To access the job resource after cancellation, call [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) or [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get).", - "canonical": true, - "file": "job_controller.cancel_job.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 61, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CancelJob", - "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "job_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.Job", - "client": { - "shortName": "JobControllerClient", - "fullName": "google.cloud.dataproc.v1.JobControllerClient" - }, - "method": { - "shortName": "CancelJob", - "fullName": "google.cloud.dataproc.v1.JobController.CancelJob", - "service": { - "shortName": "JobController", - "fullName": "google.cloud.dataproc.v1.JobController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_JobController_DeleteJob_async", - "title": "dataproc deleteJob Sample", - "origin": "API_DEFINITION", - "description": " Deletes the job from the project. If the job is active, the delete fails, and the response returns `FAILED_PRECONDITION`.", - "canonical": true, - "file": "job_controller.delete_job.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 61, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteJob", - "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", - "async": true, - "parameters": [ - { - "name": "project_id", - "type": "TYPE_STRING" - }, - { - "name": "region", - "type": "TYPE_STRING" - }, - { - "name": "job_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "JobControllerClient", - "fullName": "google.cloud.dataproc.v1.JobControllerClient" - }, - "method": { - "shortName": "DeleteJob", - "fullName": "google.cloud.dataproc.v1.JobController.DeleteJob", - "service": { - "shortName": "JobController", - "fullName": "google.cloud.dataproc.v1.JobController" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async", - "title": "dataproc createWorkflowTemplate Sample", - "origin": "API_DEFINITION", - "description": " Creates new workflow template.", - "canonical": true, - "file": "workflow_template_service.create_workflow_template.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 62, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "CreateWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "template", - "type": ".google.cloud.dataproc.v1.WorkflowTemplate" - } - ], - "resultType": ".google.cloud.dataproc.v1.WorkflowTemplate", - "client": { - "shortName": "WorkflowTemplateServiceClient", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" - }, - "method": { - "shortName": "CreateWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate", - "service": { - "shortName": "WorkflowTemplateService", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async", - "title": "dataproc getWorkflowTemplate Sample", - "origin": "API_DEFINITION", - "description": " Retrieves the latest workflow template. Can retrieve previously instantiated template by specifying optional version parameter.", - "canonical": true, - "file": "workflow_template_service.get_workflow_template.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 63, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "GetWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "version", - "type": "TYPE_INT32" - } - ], - "resultType": ".google.cloud.dataproc.v1.WorkflowTemplate", - "client": { - "shortName": "WorkflowTemplateServiceClient", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" - }, - "method": { - "shortName": "GetWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.GetWorkflowTemplate", - "service": { - "shortName": "WorkflowTemplateService", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async", - "title": "dataproc instantiateWorkflowTemplate Sample", - "origin": "API_DEFINITION", - "description": " Instantiates a template and begins execution. The returned Operation can be used to track execution of workflow by polling [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when entire workflow is finished. The running workflow can be aborted via [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). Also see [Using WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty].", - "canonical": true, - "file": "workflow_template_service.instantiate_workflow_template.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 81, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "InstantiateWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "version", - "type": "TYPE_INT32" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - }, - { - "name": "parameters", - "type": "TYPE_MESSAGE[]" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "WorkflowTemplateServiceClient", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" - }, - "method": { - "shortName": "InstantiateWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate", - "service": { - "shortName": "WorkflowTemplateService", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async", - "title": "dataproc instantiateInlineWorkflowTemplate Sample", - "origin": "API_DEFINITION", - "description": " Instantiates a template and begins execution. This method is equivalent to executing the sequence [CreateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate], [InstantiateWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate], [DeleteWorkflowTemplate][google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate]. The returned Operation can be used to track execution of workflow by polling [operations.get][google.longrunning.Operations.GetOperation]. The Operation will complete when entire workflow is finished. The running workflow can be aborted via [operations.cancel][google.longrunning.Operations.CancelOperation]. This will cause any inflight jobs to be cancelled and workflow-owned clusters to be deleted. The [Operation.metadata][google.longrunning.Operation.metadata] will be [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). Also see [Using WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). On successful completion, [Operation.response][google.longrunning.Operation.response] will be [Empty][google.protobuf.Empty].", - "canonical": true, - "file": "workflow_template_service.instantiate_inline_workflow_template.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 73, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "InstantiateInlineWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "template", - "type": ".google.cloud.dataproc.v1.WorkflowTemplate" - }, - { - "name": "request_id", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.longrunning.Operation", - "client": { - "shortName": "WorkflowTemplateServiceClient", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" - }, - "method": { - "shortName": "InstantiateInlineWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateInlineWorkflowTemplate", - "service": { - "shortName": "WorkflowTemplateService", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async", - "title": "dataproc updateWorkflowTemplate Sample", - "origin": "API_DEFINITION", - "description": " Updates (replaces) workflow template. The updated template must contain version that matches the current server version.", - "canonical": true, - "file": "workflow_template_service.update_workflow_template.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 51, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "UpdateWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", - "async": true, - "parameters": [ - { - "name": "template", - "type": ".google.cloud.dataproc.v1.WorkflowTemplate" - } - ], - "resultType": ".google.cloud.dataproc.v1.WorkflowTemplate", - "client": { - "shortName": "WorkflowTemplateServiceClient", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" - }, - "method": { - "shortName": "UpdateWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.UpdateWorkflowTemplate", - "service": { - "shortName": "WorkflowTemplateService", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async", - "title": "dataproc listWorkflowTemplates Sample", - "origin": "API_DEFINITION", - "description": " Lists workflows that match the specified filter in the request.", - "canonical": true, - "file": "workflow_template_service.list_workflow_templates.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 68, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "ListWorkflowTemplates", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", - "async": true, - "parameters": [ - { - "name": "parent", - "type": "TYPE_STRING" - }, - { - "name": "page_size", - "type": "TYPE_INT32" - }, - { - "name": "page_token", - "type": "TYPE_STRING" - } - ], - "resultType": ".google.cloud.dataproc.v1.ListWorkflowTemplatesResponse", - "client": { - "shortName": "WorkflowTemplateServiceClient", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" - }, - "method": { - "shortName": "ListWorkflowTemplates", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.ListWorkflowTemplates", - "service": { - "shortName": "WorkflowTemplateService", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" - } - } - } - }, - { - "regionTag": "dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async", - "title": "dataproc deleteWorkflowTemplate Sample", - "origin": "API_DEFINITION", - "description": " Deletes a workflow template. It does not cancel in-progress workflows.", - "canonical": true, - "file": "workflow_template_service.delete_workflow_template.js", - "language": "JAVASCRIPT", - "segments": [ - { - "start": 25, - "end": 63, - "type": "FULL" - } - ], - "clientMethod": { - "shortName": "DeleteWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", - "async": true, - "parameters": [ - { - "name": "name", - "type": "TYPE_STRING" - }, - { - "name": "version", - "type": "TYPE_INT32" - } - ], - "resultType": ".google.protobuf.Empty", - "client": { - "shortName": "WorkflowTemplateServiceClient", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateServiceClient" - }, - "method": { - "shortName": "DeleteWorkflowTemplate", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate", - "service": { - "shortName": "WorkflowTemplateService", - "fullName": "google.cloud.dataproc.v1.WorkflowTemplateService" - } - } - } - } - ] -} diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js deleted file mode 100644 index facd28fd..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.create_workflow_template.js +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, template) { - // [START dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.workflowTemplates.create`, the resource name of the - * region has the following format: - * `projects/{project_id}/regions/{region}` - * * For `projects.locations.workflowTemplates.create`, the resource name of - * the location has the following format: - * `projects/{project_id}/locations/{location}` - */ - // const parent = 'abc123' - /** - * Required. The Dataproc workflow template to create. - */ - // const template = {} - - // Imports the Dataproc library - const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new WorkflowTemplateServiceClient(); - - async function callCreateWorkflowTemplate() { - // Construct request - const request = { - parent, - template, - }; - - // Run request - const response = await dataprocClient.createWorkflowTemplate(request); - console.log(response); - } - - callCreateWorkflowTemplate(); - // [END dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js deleted file mode 100644 index 9a9dec08..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.delete_workflow_template.js +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The resource name of the workflow template, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.workflowTemplates.delete`, the resource name - * of the template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * * For `projects.locations.workflowTemplates.instantiate`, the resource name - * of the template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - */ - // const name = 'abc123' - /** - * Optional. The version of workflow template to delete. If specified, - * will only delete the template if the current server version matches - * specified version. - */ - // const version = 1234 - - // Imports the Dataproc library - const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new WorkflowTemplateServiceClient(); - - async function callDeleteWorkflowTemplate() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await dataprocClient.deleteWorkflowTemplate(request); - console.log(response); - } - - callDeleteWorkflowTemplate(); - // [END dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js deleted file mode 100644 index 0013a450..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.get_workflow_template.js +++ /dev/null @@ -1,71 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The resource name of the workflow template, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.workflowTemplates.get`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * * For `projects.locations.workflowTemplates.get`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - */ - // const name = 'abc123' - /** - * Optional. The version of workflow template to retrieve. Only previously - * instantiated versions can be retrieved. - * If unspecified, retrieves the current version. - */ - // const version = 1234 - - // Imports the Dataproc library - const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new WorkflowTemplateServiceClient(); - - async function callGetWorkflowTemplate() { - // Construct request - const request = { - name, - }; - - // Run request - const response = await dataprocClient.getWorkflowTemplate(request); - console.log(response); - } - - callGetWorkflowTemplate(); - // [END dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js deleted file mode 100644 index 6aec41bc..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js +++ /dev/null @@ -1,81 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent, template) { - // [START dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.workflowTemplates,instantiateinline`, the resource - * name of the region has the following format: - * `projects/{project_id}/regions/{region}` - * * For `projects.locations.workflowTemplates.instantiateinline`, the - * resource name of the location has the following format: - * `projects/{project_id}/locations/{location}` - */ - // const parent = 'abc123' - /** - * Required. The workflow template to instantiate. - */ - // const template = {} - /** - * Optional. A tag that prevents multiple concurrent workflow - * instances with the same tag from running. This mitigates risk of - * concurrent instances started due to retries. - * It is recommended to always set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The tag must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - - // Imports the Dataproc library - const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new WorkflowTemplateServiceClient(); - - async function callInstantiateInlineWorkflowTemplate() { - // Construct request - const request = { - parent, - template, - }; - - // Run request - const [operation] = await dataprocClient.instantiateInlineWorkflowTemplate(request); - const [response] = await operation.promise(); - console.log(response); - } - - callInstantiateInlineWorkflowTemplate(); - // [END dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js deleted file mode 100644 index ee97d2a0..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.instantiate_workflow_template.js +++ /dev/null @@ -1,89 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(name) { - // [START dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The resource name of the workflow template, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.workflowTemplates.instantiate`, the resource name - * of the template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * * For `projects.locations.workflowTemplates.instantiate`, the resource name - * of the template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - */ - // const name = 'abc123' - /** - * Optional. The version of workflow template to instantiate. If specified, - * the workflow will be instantiated only if the current version of - * the workflow template has the supplied version. - * This option cannot be used to instantiate a previous version of - * workflow template. - */ - // const version = 1234 - /** - * Optional. A tag that prevents multiple concurrent workflow - * instances with the same tag from running. This mitigates risk of - * concurrent instances started due to retries. - * It is recommended to always set this value to a - * UUID (https://en.wikipedia.org/wiki/Universally_unique_identifier). - * The tag must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - */ - // const requestId = 'abc123' - /** - * Optional. Map from parameter names to values that should be used for those - * parameters. Values may not exceed 1000 characters. - */ - // const parameters = 1234 - - // Imports the Dataproc library - const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new WorkflowTemplateServiceClient(); - - async function callInstantiateWorkflowTemplate() { - // Construct request - const request = { - name, - }; - - // Run request - const [operation] = await dataprocClient.instantiateWorkflowTemplate(request); - const [response] = await operation.promise(); - console.log(response); - } - - callInstantiateWorkflowTemplate(); - // [END dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js deleted file mode 100644 index 0facd90a..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.list_workflow_templates.js +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(parent) { - // [START dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * * For `projects.regions.workflowTemplates,list`, the resource - * name of the region has the following format: - * `projects/{project_id}/regions/{region}` - * * For `projects.locations.workflowTemplates.list`, the - * resource name of the location has the following format: - * `projects/{project_id}/locations/{location}` - */ - // const parent = 'abc123' - /** - * Optional. The maximum number of results to return in each response. - */ - // const pageSize = 1234 - /** - * Optional. The page token, returned by a previous call, to request the - * next page of results. - */ - // const pageToken = 'abc123' - - // Imports the Dataproc library - const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new WorkflowTemplateServiceClient(); - - async function callListWorkflowTemplates() { - // Construct request - const request = { - parent, - }; - - // Run request - const iterable = await dataprocClient.listWorkflowTemplatesAsync(request); - for await (const response of iterable) { - console.log(response); - } - } - - callListWorkflowTemplates(); - // [END dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js b/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js deleted file mode 100644 index c961fda8..00000000 --- a/owl-bot-staging/v1/samples/generated/v1/workflow_template_service.update_workflow_template.js +++ /dev/null @@ -1,59 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - - -'use strict'; - -function main(template) { - // [START dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async] - /** - * TODO(developer): Uncomment these variables before running the sample. - */ - /** - * Required. The updated workflow template. - * The `template.version` field must match the current version. - */ - // const template = {} - - // Imports the Dataproc library - const {WorkflowTemplateServiceClient} = require('@google-cloud/dataproc').v1; - - // Instantiates a client - const dataprocClient = new WorkflowTemplateServiceClient(); - - async function callUpdateWorkflowTemplate() { - // Construct request - const request = { - template, - }; - - // Run request - const response = await dataprocClient.updateWorkflowTemplate(request); - console.log(response); - } - - callUpdateWorkflowTemplate(); - // [END dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async] -} - -process.on('unhandledRejection', err => { - console.error(err.message); - process.exitCode = 1; -}); -main(...process.argv.slice(2)); diff --git a/owl-bot-staging/v1/src/index.ts b/owl-bot-staging/v1/src/index.ts deleted file mode 100644 index a4342783..00000000 --- a/owl-bot-staging/v1/src/index.ts +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as v1 from './v1'; -const AutoscalingPolicyServiceClient = v1.AutoscalingPolicyServiceClient; -type AutoscalingPolicyServiceClient = v1.AutoscalingPolicyServiceClient; -const BatchControllerClient = v1.BatchControllerClient; -type BatchControllerClient = v1.BatchControllerClient; -const ClusterControllerClient = v1.ClusterControllerClient; -type ClusterControllerClient = v1.ClusterControllerClient; -const JobControllerClient = v1.JobControllerClient; -type JobControllerClient = v1.JobControllerClient; -const WorkflowTemplateServiceClient = v1.WorkflowTemplateServiceClient; -type WorkflowTemplateServiceClient = v1.WorkflowTemplateServiceClient; -export {v1, AutoscalingPolicyServiceClient, BatchControllerClient, ClusterControllerClient, JobControllerClient, WorkflowTemplateServiceClient}; -export default {v1, AutoscalingPolicyServiceClient, BatchControllerClient, ClusterControllerClient, JobControllerClient, WorkflowTemplateServiceClient}; -import * as protos from '../protos/protos'; -export {protos} diff --git a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts deleted file mode 100644 index 79c04832..00000000 --- a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client.ts +++ /dev/null @@ -1,1163 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1/autoscaling_policy_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './autoscaling_policy_service_client_config.json'; - -const version = require('../../../package.json').version; - -/** - * The API interface for managing autoscaling policies in the - * Dataproc API. - * @class - * @memberof v1 - */ -export class AutoscalingPolicyServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - autoscalingPolicyServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of AutoscalingPolicyServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof AutoscalingPolicyServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - batchPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/batches/{batch}' - ), - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' - ), - projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' - ), - projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' - ), - projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' - ), - projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listAutoscalingPolicies: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'policies') - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.dataproc.v1.AutoscalingPolicyService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.autoscalingPolicyServiceStub) { - return this.autoscalingPolicyServiceStub; - } - - // Put together the "service stub" for - // google.cloud.dataproc.v1.AutoscalingPolicyService. - this.autoscalingPolicyServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.AutoscalingPolicyService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.dataproc.v1.AutoscalingPolicyService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const autoscalingPolicyServiceStubMethods = - ['createAutoscalingPolicy', 'updateAutoscalingPolicy', 'getAutoscalingPolicy', 'listAutoscalingPolicies', 'deleteAutoscalingPolicy']; - for (const methodName of autoscalingPolicyServiceStubMethods) { - const callPromise = this.autoscalingPolicyServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.autoscalingPolicyServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'dataproc.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'dataproc.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates new autoscaling policy. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The "resource name" of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.autoscalingPolicies.create`, the resource name - * of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.autoscalingPolicies.create`, the resource name - * of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {google.cloud.dataproc.v1.AutoscalingPolicy} request.policy - * Required. The autoscaling policy to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/autoscaling_policy_service.create_autoscaling_policy.js - * region_tag:dataproc_v1_generated_AutoscalingPolicyService_CreateAutoscalingPolicy_async - */ - createAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|undefined, {}|undefined - ]>; - createAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - createAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - createAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.ICreateAutoscalingPolicyRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createAutoscalingPolicy(request, options, callback); - } -/** - * Updates (replaces) autoscaling policy. - * - * Disabled check for update_mask, because all updates will be full - * replacements. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.dataproc.v1.AutoscalingPolicy} request.policy - * Required. The updated autoscaling policy. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/autoscaling_policy_service.update_autoscaling_policy.js - * region_tag:dataproc_v1_generated_AutoscalingPolicyService_UpdateAutoscalingPolicy_async - */ - updateAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|undefined, {}|undefined - ]>; - updateAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - updateAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - updateAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IUpdateAutoscalingPolicyRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'policy.name': request.policy!.name || '', - }); - this.initialize(); - return this.innerApiCalls.updateAutoscalingPolicy(request, options, callback); - } -/** - * Retrieves autoscaling policy. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The "resource name" of the autoscaling policy, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.autoscalingPolicies.get`, the resource name - * of the policy has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - * - * * For `projects.locations.autoscalingPolicies.get`, the resource name - * of the policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/autoscaling_policy_service.get_autoscaling_policy.js - * region_tag:dataproc_v1_generated_AutoscalingPolicyService_GetAutoscalingPolicy_async - */ - getAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|undefined, {}|undefined - ]>; - getAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - getAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - getAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy, - protos.google.cloud.dataproc.v1.IGetAutoscalingPolicyRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getAutoscalingPolicy(request, options, callback); - } -/** - * Deletes an autoscaling policy. It is an error to delete an autoscaling - * policy that is in use by one or more clusters. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The "resource name" of the autoscaling policy, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.autoscalingPolicies.delete`, the resource name - * of the policy has the following format: - * `projects/{project_id}/regions/{region}/autoscalingPolicies/{policy_id}` - * - * * For `projects.locations.autoscalingPolicies.delete`, the resource name - * of the policy has the following format: - * `projects/{project_id}/locations/{location}/autoscalingPolicies/{policy_id}` - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/autoscaling_policy_service.delete_autoscaling_policy.js - * region_tag:dataproc_v1_generated_AutoscalingPolicyService_DeleteAutoscalingPolicy_async - */ - deleteAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|undefined, {}|undefined - ]>; - deleteAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - deleteAutoscalingPolicy( - request: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): void; - deleteAutoscalingPolicy( - request?: protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteAutoscalingPolicyRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteAutoscalingPolicy(request, options, callback); - } - - /** - * Lists autoscaling policies in the project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The "resource name" of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.autoscalingPolicies.list`, the resource name - * of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.autoscalingPolicies.list`, the resource name - * of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {number} [request.pageSize] - * Optional. The maximum number of results to return in each response. - * Must be less than or equal to 1000. Defaults to 100. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listAutoscalingPoliciesAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listAutoscalingPolicies( - request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy[], - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest|null, - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse - ]>; - listAutoscalingPolicies( - request: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IAutoscalingPolicy>): void; - listAutoscalingPolicies( - request: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IAutoscalingPolicy>): void; - listAutoscalingPolicies( - request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IAutoscalingPolicy>, - callback?: PaginationCallback< - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IAutoscalingPolicy>): - Promise<[ - protos.google.cloud.dataproc.v1.IAutoscalingPolicy[], - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest|null, - protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listAutoscalingPolicies(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The "resource name" of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.autoscalingPolicies.list`, the resource name - * of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.autoscalingPolicies.list`, the resource name - * of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {number} [request.pageSize] - * Optional. The maximum number of results to return in each response. - * Must be less than or equal to 1000. Defaults to 100. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listAutoscalingPoliciesAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listAutoscalingPoliciesStream( - request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listAutoscalingPolicies']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listAutoscalingPolicies.createStream( - this.innerApiCalls.listAutoscalingPolicies as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listAutoscalingPolicies`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The "resource name" of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.autoscalingPolicies.list`, the resource name - * of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.autoscalingPolicies.list`, the resource name - * of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {number} [request.pageSize] - * Optional. The maximum number of results to return in each response. - * Must be less than or equal to 1000. Defaults to 100. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [AutoscalingPolicy]{@link google.cloud.dataproc.v1.AutoscalingPolicy}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v1/autoscaling_policy_service.list_autoscaling_policies.js - * region_tag:dataproc_v1_generated_AutoscalingPolicyService_ListAutoscalingPolicies_async - */ - listAutoscalingPoliciesAsync( - request?: protos.google.cloud.dataproc.v1.IListAutoscalingPoliciesRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listAutoscalingPolicies']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listAutoscalingPolicies.asyncIterate( - this.innerApiCalls['listAutoscalingPolicies'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified batch resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} batch - * @returns {string} Resource name string. - */ - batchPath(project:string,location:string,batch:string) { - return this.pathTemplates.batchPathTemplate.render({ - project: project, - location: location, - batch: batch, - }); - } - - /** - * Parse the project from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the project. - */ - matchProjectFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).project; - } - - /** - * Parse the location from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the location. - */ - matchLocationFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).location; - } - - /** - * Parse the batch from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the batch. - */ - matchBatchFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).batch; - } - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified project resource name string. - * - * @param {string} project - * @returns {string} Resource name string. - */ - projectPath(project:string) { - return this.pathTemplates.projectPathTemplate.render({ - project: project, - }); - } - - /** - * Parse the project from Project resource. - * - * @param {string} projectName - * A fully-qualified path representing Project resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectName(projectName: string) { - return this.pathTemplates.projectPathTemplate.match(projectName).project; - } - - /** - * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ - project: project, - location: location, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; - } - - /** - * Parse the location from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; - } - - /** - * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectLocationWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ - project: project, - location: location, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; - } - - /** - * Parse the location from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; - } - - /** - * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; - } - - /** - * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ - project: project, - region: region, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; - } - - /** - * Parse the region from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; - } - - /** - * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectRegionWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ - project: project, - region: region, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; - } - - /** - * Parse the region from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; - } - - /** - * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.autoscalingPolicyServiceStub && !this._terminated) { - return this.autoscalingPolicyServiceStub.then(stub => { - this._terminated = true; - stub.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json deleted file mode 100644 index 09bd8922..00000000 --- a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_client_config.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "interfaces": { - "google.cloud.dataproc.v1.AutoscalingPolicyService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateAutoscalingPolicy": { - "timeout_millis": 600000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "UpdateAutoscalingPolicy": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "GetAutoscalingPolicy": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "ListAutoscalingPolicies": { - "timeout_millis": 600000, - "retry_codes_name": "idempotent", - "retry_params_name": "default" - }, - "DeleteAutoscalingPolicy": { - "timeout_millis": 600000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json b/owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json deleted file mode 100644 index b26a9be7..00000000 --- a/owl-bot-staging/v1/src/v1/autoscaling_policy_service_proto_list.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", - "../../protos/google/cloud/dataproc/v1/batches.proto", - "../../protos/google/cloud/dataproc/v1/clusters.proto", - "../../protos/google/cloud/dataproc/v1/jobs.proto", - "../../protos/google/cloud/dataproc/v1/operations.proto", - "../../protos/google/cloud/dataproc/v1/shared.proto", - "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" -] diff --git a/owl-bot-staging/v1/src/v1/batch_controller_client.ts b/owl-bot-staging/v1/src/v1/batch_controller_client.ts deleted file mode 100644 index 3c4c4168..00000000 --- a/owl-bot-staging/v1/src/v1/batch_controller_client.ts +++ /dev/null @@ -1,1100 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1/batch_controller_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './batch_controller_client_config.json'; -import { operationsProtos } from 'google-gax'; -const version = require('../../../package.json').version; - -/** - * The BatchController provides methods to manage batch workloads. - * @class - * @memberof v1 - */ -export class BatchControllerClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - operationsClient: gax.OperationsClient; - batchControllerStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of BatchControllerClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof BatchControllerClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - batchPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/batches/{batch}' - ), - locationPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}' - ), - projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' - ), - projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' - ), - projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' - ), - projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' - ), - projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listBatches: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'batches') - }; - - const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); - - // This API contains "long-running operations", which return a - // an Operation object that allows for tracking of the operation, - // rather than holding a request open. - - this.operationsClient = this._gaxModule.lro({ - auth: this.auth, - grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined - }).operationsClient(opts); - const createBatchResponse = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.Batch') as gax.protobuf.Type; - const createBatchMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.BatchOperationMetadata') as gax.protobuf.Type; - - this.descriptors.longrunning = { - createBatch: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - createBatchResponse.decode.bind(createBatchResponse), - createBatchMetadata.decode.bind(createBatchMetadata)) - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.dataproc.v1.BatchController', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.batchControllerStub) { - return this.batchControllerStub; - } - - // Put together the "service stub" for - // google.cloud.dataproc.v1.BatchController. - this.batchControllerStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.BatchController') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.dataproc.v1.BatchController, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const batchControllerStubMethods = - ['createBatch', 'getBatch', 'listBatches', 'deleteBatch']; - for (const methodName of batchControllerStubMethods) { - const callPromise = this.batchControllerStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - this.descriptors.longrunning[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.batchControllerStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'dataproc.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'dataproc.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Gets the batch workload resource representation. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The name of the batch to retrieve. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Batch]{@link google.cloud.dataproc.v1.Batch}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/batch_controller.get_batch.js - * region_tag:dataproc_v1_generated_BatchController_GetBatch_async - */ - getBatch( - request?: protos.google.cloud.dataproc.v1.IGetBatchRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IBatch, - protos.google.cloud.dataproc.v1.IGetBatchRequest|undefined, {}|undefined - ]>; - getBatch( - request: protos.google.cloud.dataproc.v1.IGetBatchRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IBatch, - protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, - {}|null|undefined>): void; - getBatch( - request: protos.google.cloud.dataproc.v1.IGetBatchRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IBatch, - protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, - {}|null|undefined>): void; - getBatch( - request?: protos.google.cloud.dataproc.v1.IGetBatchRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IBatch, - protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IBatch, - protos.google.cloud.dataproc.v1.IGetBatchRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IBatch, - protos.google.cloud.dataproc.v1.IGetBatchRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getBatch(request, options, callback); - } -/** - * Deletes the batch workload resource. If the batch is not in terminal state, - * the delete fails and the response returns `FAILED_PRECONDITION`. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The name of the batch resource to delete. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/batch_controller.delete_batch.js - * region_tag:dataproc_v1_generated_BatchController_DeleteBatch_async - */ - deleteBatch( - request?: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteBatchRequest|undefined, {}|undefined - ]>; - deleteBatch( - request: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, - {}|null|undefined>): void; - deleteBatch( - request: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, - {}|null|undefined>): void; - deleteBatch( - request?: protos.google.cloud.dataproc.v1.IDeleteBatchRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteBatchRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteBatchRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteBatch(request, options, callback); - } - -/** - * Creates a batch workload that executes asynchronously. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The parent resource where this batch will be created. - * @param {google.cloud.dataproc.v1.Batch} request.batch - * Required. The batch to create. - * @param {string} [request.batchId] - * Optional. The ID to use for the batch, which will become the final component of - * the batch's resource name. - * - * This value must be 4-63 characters. Valid characters are `/{@link 0-9|a-z}-/`. - * @param {string} [request.requestId] - * Optional. A unique ID used to identify the request. If the service - * receives two - * [CreateBatchRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateBatchRequest)s - * with the same request_id, the second request is ignored and the - * Operation that corresponds to the first Batch created and stored - * in the backend is returned. - * - * Recommendation: Set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The value must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/batch_controller.create_batch.js - * region_tag:dataproc_v1_generated_BatchController_CreateBatch_async - */ - createBatch( - request?: protos.google.cloud.dataproc.v1.ICreateBatchRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - createBatch( - request: protos.google.cloud.dataproc.v1.ICreateBatchRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - createBatch( - request: protos.google.cloud.dataproc.v1.ICreateBatchRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - createBatch( - request?: protos.google.cloud.dataproc.v1.ICreateBatchRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createBatch(request, options, callback); - } -/** - * Check the status of the long running operation returned by `createBatch()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/batch_controller.create_batch.js - * region_tag:dataproc_v1_generated_BatchController_CreateBatch_async - */ - async checkCreateBatchProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.createBatch, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } - /** - * Lists batch workloads. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The parent, which owns this collection of batches. - * @param {number} [request.pageSize] - * Optional. The maximum number of batches to return in each response. - * The service may return fewer than this value. - * The default page size is 20; the maximum page size is 1000. - * @param {string} [request.pageToken] - * Optional. A page token received from a previous `ListBatches` call. - * Provide this token to retrieve the subsequent page. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [Batch]{@link google.cloud.dataproc.v1.Batch}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listBatchesAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listBatches( - request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IBatch[], - protos.google.cloud.dataproc.v1.IListBatchesRequest|null, - protos.google.cloud.dataproc.v1.IListBatchesResponse - ]>; - listBatches( - request: protos.google.cloud.dataproc.v1.IListBatchesRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListBatchesRequest, - protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IBatch>): void; - listBatches( - request: protos.google.cloud.dataproc.v1.IListBatchesRequest, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListBatchesRequest, - protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IBatch>): void; - listBatches( - request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.dataproc.v1.IListBatchesRequest, - protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IBatch>, - callback?: PaginationCallback< - protos.google.cloud.dataproc.v1.IListBatchesRequest, - protos.google.cloud.dataproc.v1.IListBatchesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IBatch>): - Promise<[ - protos.google.cloud.dataproc.v1.IBatch[], - protos.google.cloud.dataproc.v1.IListBatchesRequest|null, - protos.google.cloud.dataproc.v1.IListBatchesResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listBatches(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The parent, which owns this collection of batches. - * @param {number} [request.pageSize] - * Optional. The maximum number of batches to return in each response. - * The service may return fewer than this value. - * The default page size is 20; the maximum page size is 1000. - * @param {string} [request.pageToken] - * Optional. A page token received from a previous `ListBatches` call. - * Provide this token to retrieve the subsequent page. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [Batch]{@link google.cloud.dataproc.v1.Batch} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listBatchesAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listBatchesStream( - request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listBatches']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listBatches.createStream( - this.innerApiCalls.listBatches as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listBatches`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The parent, which owns this collection of batches. - * @param {number} [request.pageSize] - * Optional. The maximum number of batches to return in each response. - * The service may return fewer than this value. - * The default page size is 20; the maximum page size is 1000. - * @param {string} [request.pageToken] - * Optional. A page token received from a previous `ListBatches` call. - * Provide this token to retrieve the subsequent page. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [Batch]{@link google.cloud.dataproc.v1.Batch}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v1/batch_controller.list_batches.js - * region_tag:dataproc_v1_generated_BatchController_ListBatches_async - */ - listBatchesAsync( - request?: protos.google.cloud.dataproc.v1.IListBatchesRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listBatches']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listBatches.asyncIterate( - this.innerApiCalls['listBatches'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified batch resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} batch - * @returns {string} Resource name string. - */ - batchPath(project:string,location:string,batch:string) { - return this.pathTemplates.batchPathTemplate.render({ - project: project, - location: location, - batch: batch, - }); - } - - /** - * Parse the project from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the project. - */ - matchProjectFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).project; - } - - /** - * Parse the location from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the location. - */ - matchLocationFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).location; - } - - /** - * Parse the batch from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the batch. - */ - matchBatchFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).batch; - } - - /** - * Return a fully-qualified location resource name string. - * - * @param {string} project - * @param {string} location - * @returns {string} Resource name string. - */ - locationPath(project:string,location:string) { - return this.pathTemplates.locationPathTemplate.render({ - project: project, - location: location, - }); - } - - /** - * Parse the project from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the project. - */ - matchProjectFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).project; - } - - /** - * Parse the location from Location resource. - * - * @param {string} locationName - * A fully-qualified path representing Location resource. - * @returns {string} A string representing the location. - */ - matchLocationFromLocationName(locationName: string) { - return this.pathTemplates.locationPathTemplate.match(locationName).location; - } - - /** - * Return a fully-qualified project resource name string. - * - * @param {string} project - * @returns {string} Resource name string. - */ - projectPath(project:string) { - return this.pathTemplates.projectPathTemplate.render({ - project: project, - }); - } - - /** - * Parse the project from Project resource. - * - * @param {string} projectName - * A fully-qualified path representing Project resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectName(projectName: string) { - return this.pathTemplates.projectPathTemplate.match(projectName).project; - } - - /** - * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ - project: project, - location: location, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; - } - - /** - * Parse the location from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; - } - - /** - * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectLocationWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ - project: project, - location: location, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; - } - - /** - * Parse the location from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; - } - - /** - * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; - } - - /** - * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ - project: project, - region: region, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; - } - - /** - * Parse the region from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; - } - - /** - * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectRegionWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ - project: project, - region: region, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; - } - - /** - * Parse the region from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; - } - - /** - * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.batchControllerStub && !this._terminated) { - return this.batchControllerStub.then(stub => { - this._terminated = true; - stub.close(); - this.operationsClient.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v1/src/v1/batch_controller_client_config.json b/owl-bot-staging/v1/src/v1/batch_controller_client_config.json deleted file mode 100644 index a451087c..00000000 --- a/owl-bot-staging/v1/src/v1/batch_controller_client_config.json +++ /dev/null @@ -1,42 +0,0 @@ -{ - "interfaces": { - "google.cloud.dataproc.v1.BatchController": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateBatch": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "GetBatch": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "ListBatches": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "DeleteBatch": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v1/src/v1/batch_controller_proto_list.json b/owl-bot-staging/v1/src/v1/batch_controller_proto_list.json deleted file mode 100644 index b26a9be7..00000000 --- a/owl-bot-staging/v1/src/v1/batch_controller_proto_list.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", - "../../protos/google/cloud/dataproc/v1/batches.proto", - "../../protos/google/cloud/dataproc/v1/clusters.proto", - "../../protos/google/cloud/dataproc/v1/jobs.proto", - "../../protos/google/cloud/dataproc/v1/operations.proto", - "../../protos/google/cloud/dataproc/v1/shared.proto", - "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" -] diff --git a/owl-bot-staging/v1/src/v1/cluster_controller_client.ts b/owl-bot-staging/v1/src/v1/cluster_controller_client.ts deleted file mode 100644 index fce6fc31..00000000 --- a/owl-bot-staging/v1/src/v1/cluster_controller_client.ts +++ /dev/null @@ -1,1755 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1/cluster_controller_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './cluster_controller_client_config.json'; -import { operationsProtos } from 'google-gax'; -const version = require('../../../package.json').version; - -/** - * The ClusterControllerService provides methods to manage clusters - * of Compute Engine instances. - * @class - * @memberof v1 - */ -export class ClusterControllerClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - operationsClient: gax.OperationsClient; - clusterControllerStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of ClusterControllerClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof ClusterControllerClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - batchPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/batches/{batch}' - ), - projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' - ), - projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' - ), - projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' - ), - projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' - ), - servicePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/services/{service}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listClusters: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'clusters') - }; - - const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); - - // This API contains "long-running operations", which return a - // an Operation object that allows for tracking of the operation, - // rather than holding a request open. - - this.operationsClient = this._gaxModule.lro({ - auth: this.auth, - grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined - }).operationsClient(opts); - const createClusterResponse = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; - const createClusterMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; - const updateClusterResponse = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; - const updateClusterMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; - const stopClusterResponse = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; - const stopClusterMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; - const startClusterResponse = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.Cluster') as gax.protobuf.Type; - const startClusterMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; - const deleteClusterResponse = protoFilesRoot.lookup( - '.google.protobuf.Empty') as gax.protobuf.Type; - const deleteClusterMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; - const diagnoseClusterResponse = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.DiagnoseClusterResults') as gax.protobuf.Type; - const diagnoseClusterMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.ClusterOperationMetadata') as gax.protobuf.Type; - - this.descriptors.longrunning = { - createCluster: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - createClusterResponse.decode.bind(createClusterResponse), - createClusterMetadata.decode.bind(createClusterMetadata)), - updateCluster: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - updateClusterResponse.decode.bind(updateClusterResponse), - updateClusterMetadata.decode.bind(updateClusterMetadata)), - stopCluster: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - stopClusterResponse.decode.bind(stopClusterResponse), - stopClusterMetadata.decode.bind(stopClusterMetadata)), - startCluster: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - startClusterResponse.decode.bind(startClusterResponse), - startClusterMetadata.decode.bind(startClusterMetadata)), - deleteCluster: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - deleteClusterResponse.decode.bind(deleteClusterResponse), - deleteClusterMetadata.decode.bind(deleteClusterMetadata)), - diagnoseCluster: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - diagnoseClusterResponse.decode.bind(diagnoseClusterResponse), - diagnoseClusterMetadata.decode.bind(diagnoseClusterMetadata)) - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.dataproc.v1.ClusterController', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.clusterControllerStub) { - return this.clusterControllerStub; - } - - // Put together the "service stub" for - // google.cloud.dataproc.v1.ClusterController. - this.clusterControllerStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.ClusterController') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.dataproc.v1.ClusterController, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const clusterControllerStubMethods = - ['createCluster', 'updateCluster', 'stopCluster', 'startCluster', 'deleteCluster', 'getCluster', 'listClusters', 'diagnoseCluster']; - for (const methodName of clusterControllerStubMethods) { - const callPromise = this.clusterControllerStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - this.descriptors.longrunning[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.clusterControllerStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'dataproc.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'dataproc.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Gets the resource representation for a cluster in a project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.clusterName - * Required. The cluster name. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Cluster]{@link google.cloud.dataproc.v1.Cluster}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.get_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_GetCluster_async - */ - getCluster( - request?: protos.google.cloud.dataproc.v1.IGetClusterRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.ICluster, - protos.google.cloud.dataproc.v1.IGetClusterRequest|undefined, {}|undefined - ]>; - getCluster( - request: protos.google.cloud.dataproc.v1.IGetClusterRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.ICluster, - protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, - {}|null|undefined>): void; - getCluster( - request: protos.google.cloud.dataproc.v1.IGetClusterRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.ICluster, - protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, - {}|null|undefined>): void; - getCluster( - request?: protos.google.cloud.dataproc.v1.IGetClusterRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.ICluster, - protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.ICluster, - protos.google.cloud.dataproc.v1.IGetClusterRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.ICluster, - protos.google.cloud.dataproc.v1.IGetClusterRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'cluster_name': request.clusterName || '', - }); - this.initialize(); - return this.innerApiCalls.getCluster(request, options, callback); - } - -/** - * Creates a cluster in a project. The returned - * {@link google.longrunning.Operation.metadata|Operation.metadata} will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {google.cloud.dataproc.v1.Cluster} request.cluster - * Required. The cluster to create. - * @param {string} [request.requestId] - * Optional. A unique ID used to identify the request. If the server receives two - * [CreateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.CreateClusterRequest)s - * with the same id, then the second request will be ignored and the - * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the backend - * is returned. - * - * It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {google.cloud.dataproc.v1.FailureAction} [request.actionOnFailedPrimaryWorkers] - * Optional. Failure action when primary worker creation fails. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.create_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_CreateCluster_async - */ - createCluster( - request?: protos.google.cloud.dataproc.v1.ICreateClusterRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - createCluster( - request: protos.google.cloud.dataproc.v1.ICreateClusterRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - createCluster( - request: protos.google.cloud.dataproc.v1.ICreateClusterRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - createCluster( - request?: protos.google.cloud.dataproc.v1.ICreateClusterRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - this.initialize(); - return this.innerApiCalls.createCluster(request, options, callback); - } -/** - * Check the status of the long running operation returned by `createCluster()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.create_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_CreateCluster_async - */ - async checkCreateClusterProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.createCluster, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } -/** - * Updates a cluster in a project. The returned - * {@link google.longrunning.Operation.metadata|Operation.metadata} will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * The cluster must be in a {@link google.cloud.dataproc.v1.ClusterStatus.State|`RUNNING`} state or an error - * is returned. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project the - * cluster belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.clusterName - * Required. The cluster name. - * @param {google.cloud.dataproc.v1.Cluster} request.cluster - * Required. The changes to the cluster. - * @param {google.protobuf.Duration} [request.gracefulDecommissionTimeout] - * Optional. Timeout for graceful YARN decomissioning. Graceful - * decommissioning allows removing nodes from the cluster without - * interrupting jobs in progress. Timeout specifies how long to wait for jobs - * in progress to finish before forcefully removing nodes (and potentially - * interrupting jobs). Default timeout is 0 (for forceful decommission), and - * the maximum allowed timeout is 1 day. (see JSON representation of - * [Duration](https://developers.google.com/protocol-buffers/docs/proto3#json)). - * - * Only supported on Dataproc image versions 1.2 and higher. - * @param {google.protobuf.FieldMask} request.updateMask - * Required. Specifies the path, relative to `Cluster`, of - * the field to update. For example, to change the number of workers - * in a cluster to 5, the `update_mask` parameter would be - * specified as `config.worker_config.num_instances`, - * and the `PATCH` request body would specify the new value, as follows: - * - * { - * "config":{ - * "workerConfig":{ - * "numInstances":"5" - * } - * } - * } - * Similarly, to change the number of preemptible workers in a cluster to 5, - * the `update_mask` parameter would be - * `config.secondary_worker_config.num_instances`, and the `PATCH` request - * body would be set as follows: - * - * { - * "config":{ - * "secondaryWorkerConfig":{ - * "numInstances":"5" - * } - * } - * } - * Note: Currently, only the following fields can be updated: - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - * - *
MaskPurpose
labelsUpdate labels
config.worker_config.num_instancesResize primary worker group
config.secondary_worker_config.num_instancesResize secondary worker group
config.autoscaling_config.policy_uriUse, stop using, or - * change autoscaling policies
- * @param {string} [request.requestId] - * Optional. A unique ID used to identify the request. If the server - * receives two - * [UpdateClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.UpdateClusterRequest)s - * with the same id, then the second request will be ignored and the - * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the - * backend is returned. - * - * It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.update_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_UpdateCluster_async - */ - updateCluster( - request?: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - updateCluster( - request: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - updateCluster( - request: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - updateCluster( - request?: protos.google.cloud.dataproc.v1.IUpdateClusterRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'cluster_name': request.clusterName || '', - }); - this.initialize(); - return this.innerApiCalls.updateCluster(request, options, callback); - } -/** - * Check the status of the long running operation returned by `updateCluster()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.update_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_UpdateCluster_async - */ - async checkUpdateClusterProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.updateCluster, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } -/** - * Stops a cluster in a project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project the - * cluster belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.clusterName - * Required. The cluster name. - * @param {string} [request.clusterUuid] - * Optional. Specifying the `cluster_uuid` means the RPC will fail - * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. - * @param {string} [request.requestId] - * Optional. A unique ID used to identify the request. If the server - * receives two - * [StopClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StopClusterRequest)s - * with the same id, then the second request will be ignored and the - * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the - * backend is returned. - * - * Recommendation: Set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.stop_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_StopCluster_async - */ - stopCluster( - request?: protos.google.cloud.dataproc.v1.IStopClusterRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - stopCluster( - request: protos.google.cloud.dataproc.v1.IStopClusterRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - stopCluster( - request: protos.google.cloud.dataproc.v1.IStopClusterRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - stopCluster( - request?: protos.google.cloud.dataproc.v1.IStopClusterRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'cluster_name': request.clusterName || '', - }); - this.initialize(); - return this.innerApiCalls.stopCluster(request, options, callback); - } -/** - * Check the status of the long running operation returned by `stopCluster()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.stop_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_StopCluster_async - */ - async checkStopClusterProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.stopCluster, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } -/** - * Starts a cluster in a project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project the - * cluster belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.clusterName - * Required. The cluster name. - * @param {string} [request.clusterUuid] - * Optional. Specifying the `cluster_uuid` means the RPC will fail - * (with error NOT_FOUND) if a cluster with the specified UUID does not exist. - * @param {string} [request.requestId] - * Optional. A unique ID used to identify the request. If the server - * receives two - * [StartClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.StartClusterRequest)s - * with the same id, then the second request will be ignored and the - * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the - * backend is returned. - * - * Recommendation: Set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.start_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_StartCluster_async - */ - startCluster( - request?: protos.google.cloud.dataproc.v1.IStartClusterRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - startCluster( - request: protos.google.cloud.dataproc.v1.IStartClusterRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - startCluster( - request: protos.google.cloud.dataproc.v1.IStartClusterRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - startCluster( - request?: protos.google.cloud.dataproc.v1.IStartClusterRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'cluster_name': request.clusterName || '', - }); - this.initialize(); - return this.innerApiCalls.startCluster(request, options, callback); - } -/** - * Check the status of the long running operation returned by `startCluster()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.start_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_StartCluster_async - */ - async checkStartClusterProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.startCluster, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } -/** - * Deletes a cluster in a project. The returned - * {@link google.longrunning.Operation.metadata|Operation.metadata} will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.clusterName - * Required. The cluster name. - * @param {string} [request.clusterUuid] - * Optional. Specifying the `cluster_uuid` means the RPC should fail - * (with error NOT_FOUND) if cluster with specified UUID does not exist. - * @param {string} [request.requestId] - * Optional. A unique ID used to identify the request. If the server - * receives two - * [DeleteClusterRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.DeleteClusterRequest)s - * with the same id, then the second request will be ignored and the - * first {@link google.longrunning.Operation|google.longrunning.Operation} created and stored in the - * backend is returned. - * - * It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The ID must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.delete_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_DeleteCluster_async - */ - deleteCluster( - request?: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - deleteCluster( - request: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - deleteCluster( - request: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - deleteCluster( - request?: protos.google.cloud.dataproc.v1.IDeleteClusterRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'cluster_name': request.clusterName || '', - }); - this.initialize(); - return this.innerApiCalls.deleteCluster(request, options, callback); - } -/** - * Check the status of the long running operation returned by `deleteCluster()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.delete_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_DeleteCluster_async - */ - async checkDeleteClusterProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.deleteCluster, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } -/** - * Gets cluster diagnostic information. The returned - * {@link google.longrunning.Operation.metadata|Operation.metadata} will be - * [ClusterOperationMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#clusteroperationmetadata). - * After the operation completes, - * {@link google.longrunning.Operation.response|Operation.response} - * contains - * [DiagnoseClusterResults](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#diagnoseclusterresults). - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.clusterName - * Required. The cluster name. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.diagnose_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_DiagnoseCluster_async - */ - diagnoseCluster( - request?: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - diagnoseCluster( - request: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - diagnoseCluster( - request: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - diagnoseCluster( - request?: protos.google.cloud.dataproc.v1.IDiagnoseClusterRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'cluster_name': request.clusterName || '', - }); - this.initialize(); - return this.innerApiCalls.diagnoseCluster(request, options, callback); - } -/** - * Check the status of the long running operation returned by `diagnoseCluster()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.diagnose_cluster.js - * region_tag:dataproc_v1_generated_ClusterController_DiagnoseCluster_async - */ - async checkDiagnoseClusterProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.diagnoseCluster, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } - /** - * Lists all regions/{region}/clusters in a project alphabetically. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} [request.filter] - * Optional. A filter constraining the clusters to list. Filters are - * case-sensitive and have the following syntax: - * - * field = value [AND [field = value]] ... - * - * where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, - * and `[KEY]` is a label key. **value** can be `*` to match all values. - * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, - * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` - * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` - * contains the `DELETING` and `ERROR` states. - * `clusterName` is the name of the cluster provided at creation time. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * - * Example filter: - * - * status.state = ACTIVE AND clusterName = mycluster - * AND labels.env = staging AND labels.starred = * - * @param {number} [request.pageSize] - * Optional. The standard List page size. - * @param {string} [request.pageToken] - * Optional. The standard List page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [Cluster]{@link google.cloud.dataproc.v1.Cluster}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listClustersAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listClusters( - request?: protos.google.cloud.dataproc.v1.IListClustersRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.ICluster[], - protos.google.cloud.dataproc.v1.IListClustersRequest|null, - protos.google.cloud.dataproc.v1.IListClustersResponse - ]>; - listClusters( - request: protos.google.cloud.dataproc.v1.IListClustersRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListClustersRequest, - protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, - protos.google.cloud.dataproc.v1.ICluster>): void; - listClusters( - request: protos.google.cloud.dataproc.v1.IListClustersRequest, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListClustersRequest, - protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, - protos.google.cloud.dataproc.v1.ICluster>): void; - listClusters( - request?: protos.google.cloud.dataproc.v1.IListClustersRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.dataproc.v1.IListClustersRequest, - protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, - protos.google.cloud.dataproc.v1.ICluster>, - callback?: PaginationCallback< - protos.google.cloud.dataproc.v1.IListClustersRequest, - protos.google.cloud.dataproc.v1.IListClustersResponse|null|undefined, - protos.google.cloud.dataproc.v1.ICluster>): - Promise<[ - protos.google.cloud.dataproc.v1.ICluster[], - protos.google.cloud.dataproc.v1.IListClustersRequest|null, - protos.google.cloud.dataproc.v1.IListClustersResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - this.initialize(); - return this.innerApiCalls.listClusters(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} [request.filter] - * Optional. A filter constraining the clusters to list. Filters are - * case-sensitive and have the following syntax: - * - * field = value [AND [field = value]] ... - * - * where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, - * and `[KEY]` is a label key. **value** can be `*` to match all values. - * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, - * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` - * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` - * contains the `DELETING` and `ERROR` states. - * `clusterName` is the name of the cluster provided at creation time. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * - * Example filter: - * - * status.state = ACTIVE AND clusterName = mycluster - * AND labels.env = staging AND labels.starred = * - * @param {number} [request.pageSize] - * Optional. The standard List page size. - * @param {string} [request.pageToken] - * Optional. The standard List page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [Cluster]{@link google.cloud.dataproc.v1.Cluster} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listClustersAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listClustersStream( - request?: protos.google.cloud.dataproc.v1.IListClustersRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - const defaultCallSettings = this._defaults['listClusters']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listClusters.createStream( - this.innerApiCalls.listClusters as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listClusters`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the cluster - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} [request.filter] - * Optional. A filter constraining the clusters to list. Filters are - * case-sensitive and have the following syntax: - * - * field = value [AND [field = value]] ... - * - * where **field** is one of `status.state`, `clusterName`, or `labels.[KEY]`, - * and `[KEY]` is a label key. **value** can be `*` to match all values. - * `status.state` can be one of the following: `ACTIVE`, `INACTIVE`, - * `CREATING`, `RUNNING`, `ERROR`, `DELETING`, or `UPDATING`. `ACTIVE` - * contains the `CREATING`, `UPDATING`, and `RUNNING` states. `INACTIVE` - * contains the `DELETING` and `ERROR` states. - * `clusterName` is the name of the cluster provided at creation time. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * - * Example filter: - * - * status.state = ACTIVE AND clusterName = mycluster - * AND labels.env = staging AND labels.starred = * - * @param {number} [request.pageSize] - * Optional. The standard List page size. - * @param {string} [request.pageToken] - * Optional. The standard List page token. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [Cluster]{@link google.cloud.dataproc.v1.Cluster}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v1/cluster_controller.list_clusters.js - * region_tag:dataproc_v1_generated_ClusterController_ListClusters_async - */ - listClustersAsync( - request?: protos.google.cloud.dataproc.v1.IListClustersRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - const defaultCallSettings = this._defaults['listClusters']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listClusters.asyncIterate( - this.innerApiCalls['listClusters'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified batch resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} batch - * @returns {string} Resource name string. - */ - batchPath(project:string,location:string,batch:string) { - return this.pathTemplates.batchPathTemplate.render({ - project: project, - location: location, - batch: batch, - }); - } - - /** - * Parse the project from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the project. - */ - matchProjectFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).project; - } - - /** - * Parse the location from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the location. - */ - matchLocationFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).location; - } - - /** - * Parse the batch from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the batch. - */ - matchBatchFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).batch; - } - - /** - * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ - project: project, - location: location, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; - } - - /** - * Parse the location from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; - } - - /** - * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectLocationWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ - project: project, - location: location, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; - } - - /** - * Parse the location from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; - } - - /** - * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; - } - - /** - * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ - project: project, - region: region, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; - } - - /** - * Parse the region from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; - } - - /** - * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectRegionWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ - project: project, - region: region, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; - } - - /** - * Parse the region from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; - } - - /** - * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; - } - - /** - * Return a fully-qualified service resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} service - * @returns {string} Resource name string. - */ - servicePath(project:string,location:string,service:string) { - return this.pathTemplates.servicePathTemplate.render({ - project: project, - location: location, - service: service, - }); - } - - /** - * Parse the project from Service resource. - * - * @param {string} serviceName - * A fully-qualified path representing Service resource. - * @returns {string} A string representing the project. - */ - matchProjectFromServiceName(serviceName: string) { - return this.pathTemplates.servicePathTemplate.match(serviceName).project; - } - - /** - * Parse the location from Service resource. - * - * @param {string} serviceName - * A fully-qualified path representing Service resource. - * @returns {string} A string representing the location. - */ - matchLocationFromServiceName(serviceName: string) { - return this.pathTemplates.servicePathTemplate.match(serviceName).location; - } - - /** - * Parse the service from Service resource. - * - * @param {string} serviceName - * A fully-qualified path representing Service resource. - * @returns {string} A string representing the service. - */ - matchServiceFromServiceName(serviceName: string) { - return this.pathTemplates.servicePathTemplate.match(serviceName).service; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.clusterControllerStub && !this._terminated) { - return this.clusterControllerStub.then(stub => { - this._terminated = true; - stub.close(); - this.operationsClient.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v1/src/v1/cluster_controller_client_config.json b/owl-bot-staging/v1/src/v1/cluster_controller_client_config.json deleted file mode 100644 index 6f5f5f3d..00000000 --- a/owl-bot-staging/v1/src/v1/cluster_controller_client_config.json +++ /dev/null @@ -1,72 +0,0 @@ -{ - "interfaces": { - "google.cloud.dataproc.v1.ClusterController": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ], - "deadline_exceeded_internal_unavailable": [ - "DEADLINE_EXCEEDED", - "INTERNAL", - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateCluster": { - "timeout_millis": 300000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "UpdateCluster": { - "timeout_millis": 300000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "StopCluster": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "StartCluster": { - "retry_codes_name": "non_idempotent", - "retry_params_name": "default" - }, - "DeleteCluster": { - "timeout_millis": 300000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "GetCluster": { - "timeout_millis": 300000, - "retry_codes_name": "deadline_exceeded_internal_unavailable", - "retry_params_name": "default" - }, - "ListClusters": { - "timeout_millis": 300000, - "retry_codes_name": "deadline_exceeded_internal_unavailable", - "retry_params_name": "default" - }, - "DiagnoseCluster": { - "timeout_millis": 300000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json b/owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json deleted file mode 100644 index b26a9be7..00000000 --- a/owl-bot-staging/v1/src/v1/cluster_controller_proto_list.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", - "../../protos/google/cloud/dataproc/v1/batches.proto", - "../../protos/google/cloud/dataproc/v1/clusters.proto", - "../../protos/google/cloud/dataproc/v1/jobs.proto", - "../../protos/google/cloud/dataproc/v1/operations.proto", - "../../protos/google/cloud/dataproc/v1/shared.proto", - "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" -] diff --git a/owl-bot-staging/v1/src/v1/gapic_metadata.json b/owl-bot-staging/v1/src/v1/gapic_metadata.json deleted file mode 100644 index 6f5c9ee3..00000000 --- a/owl-bot-staging/v1/src/v1/gapic_metadata.json +++ /dev/null @@ -1,409 +0,0 @@ -{ - "schema": "1.0", - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "typescript", - "protoPackage": "google.cloud.dataproc.v1", - "libraryPackage": "@google-cloud/dataproc", - "services": { - "AutoscalingPolicyService": { - "clients": { - "grpc": { - "libraryClient": "AutoscalingPolicyServiceClient", - "rpcs": { - "CreateAutoscalingPolicy": { - "methods": [ - "createAutoscalingPolicy" - ] - }, - "UpdateAutoscalingPolicy": { - "methods": [ - "updateAutoscalingPolicy" - ] - }, - "GetAutoscalingPolicy": { - "methods": [ - "getAutoscalingPolicy" - ] - }, - "DeleteAutoscalingPolicy": { - "methods": [ - "deleteAutoscalingPolicy" - ] - }, - "ListAutoscalingPolicies": { - "methods": [ - "listAutoscalingPolicies", - "listAutoscalingPoliciesStream", - "listAutoscalingPoliciesAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "AutoscalingPolicyServiceClient", - "rpcs": { - "CreateAutoscalingPolicy": { - "methods": [ - "createAutoscalingPolicy" - ] - }, - "UpdateAutoscalingPolicy": { - "methods": [ - "updateAutoscalingPolicy" - ] - }, - "GetAutoscalingPolicy": { - "methods": [ - "getAutoscalingPolicy" - ] - }, - "DeleteAutoscalingPolicy": { - "methods": [ - "deleteAutoscalingPolicy" - ] - }, - "ListAutoscalingPolicies": { - "methods": [ - "listAutoscalingPolicies", - "listAutoscalingPoliciesStream", - "listAutoscalingPoliciesAsync" - ] - } - } - } - } - }, - "BatchController": { - "clients": { - "grpc": { - "libraryClient": "BatchControllerClient", - "rpcs": { - "GetBatch": { - "methods": [ - "getBatch" - ] - }, - "DeleteBatch": { - "methods": [ - "deleteBatch" - ] - }, - "CreateBatch": { - "methods": [ - "createBatch" - ] - }, - "ListBatches": { - "methods": [ - "listBatches", - "listBatchesStream", - "listBatchesAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "BatchControllerClient", - "rpcs": { - "GetBatch": { - "methods": [ - "getBatch" - ] - }, - "DeleteBatch": { - "methods": [ - "deleteBatch" - ] - }, - "CreateBatch": { - "methods": [ - "createBatch" - ] - }, - "ListBatches": { - "methods": [ - "listBatches", - "listBatchesStream", - "listBatchesAsync" - ] - } - } - } - } - }, - "ClusterController": { - "clients": { - "grpc": { - "libraryClient": "ClusterControllerClient", - "rpcs": { - "GetCluster": { - "methods": [ - "getCluster" - ] - }, - "CreateCluster": { - "methods": [ - "createCluster" - ] - }, - "UpdateCluster": { - "methods": [ - "updateCluster" - ] - }, - "StopCluster": { - "methods": [ - "stopCluster" - ] - }, - "StartCluster": { - "methods": [ - "startCluster" - ] - }, - "DeleteCluster": { - "methods": [ - "deleteCluster" - ] - }, - "DiagnoseCluster": { - "methods": [ - "diagnoseCluster" - ] - }, - "ListClusters": { - "methods": [ - "listClusters", - "listClustersStream", - "listClustersAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "ClusterControllerClient", - "rpcs": { - "GetCluster": { - "methods": [ - "getCluster" - ] - }, - "CreateCluster": { - "methods": [ - "createCluster" - ] - }, - "UpdateCluster": { - "methods": [ - "updateCluster" - ] - }, - "StopCluster": { - "methods": [ - "stopCluster" - ] - }, - "StartCluster": { - "methods": [ - "startCluster" - ] - }, - "DeleteCluster": { - "methods": [ - "deleteCluster" - ] - }, - "DiagnoseCluster": { - "methods": [ - "diagnoseCluster" - ] - }, - "ListClusters": { - "methods": [ - "listClusters", - "listClustersStream", - "listClustersAsync" - ] - } - } - } - } - }, - "JobController": { - "clients": { - "grpc": { - "libraryClient": "JobControllerClient", - "rpcs": { - "SubmitJob": { - "methods": [ - "submitJob" - ] - }, - "GetJob": { - "methods": [ - "getJob" - ] - }, - "UpdateJob": { - "methods": [ - "updateJob" - ] - }, - "CancelJob": { - "methods": [ - "cancelJob" - ] - }, - "DeleteJob": { - "methods": [ - "deleteJob" - ] - }, - "SubmitJobAsOperation": { - "methods": [ - "submitJobAsOperation" - ] - }, - "ListJobs": { - "methods": [ - "listJobs", - "listJobsStream", - "listJobsAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "JobControllerClient", - "rpcs": { - "SubmitJob": { - "methods": [ - "submitJob" - ] - }, - "GetJob": { - "methods": [ - "getJob" - ] - }, - "UpdateJob": { - "methods": [ - "updateJob" - ] - }, - "CancelJob": { - "methods": [ - "cancelJob" - ] - }, - "DeleteJob": { - "methods": [ - "deleteJob" - ] - }, - "SubmitJobAsOperation": { - "methods": [ - "submitJobAsOperation" - ] - }, - "ListJobs": { - "methods": [ - "listJobs", - "listJobsStream", - "listJobsAsync" - ] - } - } - } - } - }, - "WorkflowTemplateService": { - "clients": { - "grpc": { - "libraryClient": "WorkflowTemplateServiceClient", - "rpcs": { - "CreateWorkflowTemplate": { - "methods": [ - "createWorkflowTemplate" - ] - }, - "GetWorkflowTemplate": { - "methods": [ - "getWorkflowTemplate" - ] - }, - "UpdateWorkflowTemplate": { - "methods": [ - "updateWorkflowTemplate" - ] - }, - "DeleteWorkflowTemplate": { - "methods": [ - "deleteWorkflowTemplate" - ] - }, - "InstantiateWorkflowTemplate": { - "methods": [ - "instantiateWorkflowTemplate" - ] - }, - "InstantiateInlineWorkflowTemplate": { - "methods": [ - "instantiateInlineWorkflowTemplate" - ] - }, - "ListWorkflowTemplates": { - "methods": [ - "listWorkflowTemplates", - "listWorkflowTemplatesStream", - "listWorkflowTemplatesAsync" - ] - } - } - }, - "grpc-fallback": { - "libraryClient": "WorkflowTemplateServiceClient", - "rpcs": { - "CreateWorkflowTemplate": { - "methods": [ - "createWorkflowTemplate" - ] - }, - "GetWorkflowTemplate": { - "methods": [ - "getWorkflowTemplate" - ] - }, - "UpdateWorkflowTemplate": { - "methods": [ - "updateWorkflowTemplate" - ] - }, - "DeleteWorkflowTemplate": { - "methods": [ - "deleteWorkflowTemplate" - ] - }, - "InstantiateWorkflowTemplate": { - "methods": [ - "instantiateWorkflowTemplate" - ] - }, - "InstantiateInlineWorkflowTemplate": { - "methods": [ - "instantiateInlineWorkflowTemplate" - ] - }, - "ListWorkflowTemplates": { - "methods": [ - "listWorkflowTemplates", - "listWorkflowTemplatesStream", - "listWorkflowTemplatesAsync" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/src/v1/index.ts b/owl-bot-staging/v1/src/v1/index.ts deleted file mode 100644 index 098c50ae..00000000 --- a/owl-bot-staging/v1/src/v1/index.ts +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -export {AutoscalingPolicyServiceClient} from './autoscaling_policy_service_client'; -export {BatchControllerClient} from './batch_controller_client'; -export {ClusterControllerClient} from './cluster_controller_client'; -export {JobControllerClient} from './job_controller_client'; -export {WorkflowTemplateServiceClient} from './workflow_template_service_client'; diff --git a/owl-bot-staging/v1/src/v1/job_controller_client.ts b/owl-bot-staging/v1/src/v1/job_controller_client.ts deleted file mode 100644 index 164b97ba..00000000 --- a/owl-bot-staging/v1/src/v1/job_controller_client.ts +++ /dev/null @@ -1,1382 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1/job_controller_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './job_controller_client_config.json'; -import { operationsProtos } from 'google-gax'; -const version = require('../../../package.json').version; - -/** - * The JobController provides methods to manage jobs. - * @class - * @memberof v1 - */ -export class JobControllerClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - operationsClient: gax.OperationsClient; - jobControllerStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of JobControllerClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof JobControllerClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - batchPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/batches/{batch}' - ), - projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' - ), - projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' - ), - projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' - ), - projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listJobs: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'jobs') - }; - - const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); - - // This API contains "long-running operations", which return a - // an Operation object that allows for tracking of the operation, - // rather than holding a request open. - - this.operationsClient = this._gaxModule.lro({ - auth: this.auth, - grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined - }).operationsClient(opts); - const submitJobAsOperationResponse = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.Job') as gax.protobuf.Type; - const submitJobAsOperationMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.JobMetadata') as gax.protobuf.Type; - - this.descriptors.longrunning = { - submitJobAsOperation: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - submitJobAsOperationResponse.decode.bind(submitJobAsOperationResponse), - submitJobAsOperationMetadata.decode.bind(submitJobAsOperationMetadata)) - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.dataproc.v1.JobController', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.jobControllerStub) { - return this.jobControllerStub; - } - - // Put together the "service stub" for - // google.cloud.dataproc.v1.JobController. - this.jobControllerStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.JobController') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.dataproc.v1.JobController, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const jobControllerStubMethods = - ['submitJob', 'submitJobAsOperation', 'getJob', 'listJobs', 'updateJob', 'cancelJob', 'deleteJob']; - for (const methodName of jobControllerStubMethods) { - const callPromise = this.jobControllerStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - this.descriptors.longrunning[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.jobControllerStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'dataproc.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'dataproc.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Submits a job to a cluster. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {google.cloud.dataproc.v1.Job} request.job - * Required. The job resource. - * @param {string} [request.requestId] - * Optional. A unique id used to identify the request. If the server - * receives two - * [SubmitJobRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s - * with the same id, then the second request will be ignored and the - * first {@link google.cloud.dataproc.v1.Job|Job} created and stored in the backend - * is returned. - * - * It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The id must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.submit_job.js - * region_tag:dataproc_v1_generated_JobController_SubmitJob_async - */ - submitJob( - request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ISubmitJobRequest|undefined, {}|undefined - ]>; - submitJob( - request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, - {}|null|undefined>): void; - submitJob( - request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, - {}|null|undefined>): void; - submitJob( - request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ISubmitJobRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ISubmitJobRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - this.initialize(); - return this.innerApiCalls.submitJob(request, options, callback); - } -/** - * Gets the resource representation for a job in a project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.jobId - * Required. The job ID. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.get_job.js - * region_tag:dataproc_v1_generated_JobController_GetJob_async - */ - getJob( - request?: protos.google.cloud.dataproc.v1.IGetJobRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IGetJobRequest|undefined, {}|undefined - ]>; - getJob( - request: protos.google.cloud.dataproc.v1.IGetJobRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, - {}|null|undefined>): void; - getJob( - request: protos.google.cloud.dataproc.v1.IGetJobRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, - {}|null|undefined>): void; - getJob( - request?: protos.google.cloud.dataproc.v1.IGetJobRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IGetJobRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IGetJobRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'job_id': request.jobId || '', - }); - this.initialize(); - return this.innerApiCalls.getJob(request, options, callback); - } -/** - * Updates a job in a project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.jobId - * Required. The job ID. - * @param {google.cloud.dataproc.v1.Job} request.job - * Required. The changes to the job. - * @param {google.protobuf.FieldMask} request.updateMask - * Required. Specifies the path, relative to Job, of - * the field to update. For example, to update the labels of a Job the - * update_mask parameter would be specified as - * labels, and the `PATCH` request body would specify the new - * value. Note: Currently, labels is the only - * field that can be updated. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.update_job.js - * region_tag:dataproc_v1_generated_JobController_UpdateJob_async - */ - updateJob( - request?: protos.google.cloud.dataproc.v1.IUpdateJobRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IUpdateJobRequest|undefined, {}|undefined - ]>; - updateJob( - request: protos.google.cloud.dataproc.v1.IUpdateJobRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, - {}|null|undefined>): void; - updateJob( - request: protos.google.cloud.dataproc.v1.IUpdateJobRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, - {}|null|undefined>): void; - updateJob( - request?: protos.google.cloud.dataproc.v1.IUpdateJobRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IUpdateJobRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.IUpdateJobRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'job_id': request.jobId || '', - }); - this.initialize(); - return this.innerApiCalls.updateJob(request, options, callback); - } -/** - * Starts a job cancellation request. To access the job resource - * after cancellation, call - * [regions/{region}/jobs.list](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/list) - * or - * [regions/{region}/jobs.get](https://cloud.google.com/dataproc/docs/reference/rest/v1/projects.regions.jobs/get). - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.jobId - * Required. The job ID. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Job]{@link google.cloud.dataproc.v1.Job}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.cancel_job.js - * region_tag:dataproc_v1_generated_JobController_CancelJob_async - */ - cancelJob( - request?: protos.google.cloud.dataproc.v1.ICancelJobRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ICancelJobRequest|undefined, {}|undefined - ]>; - cancelJob( - request: protos.google.cloud.dataproc.v1.ICancelJobRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, - {}|null|undefined>): void; - cancelJob( - request: protos.google.cloud.dataproc.v1.ICancelJobRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, - {}|null|undefined>): void; - cancelJob( - request?: protos.google.cloud.dataproc.v1.ICancelJobRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ICancelJobRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IJob, - protos.google.cloud.dataproc.v1.ICancelJobRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'job_id': request.jobId || '', - }); - this.initialize(); - return this.innerApiCalls.cancelJob(request, options, callback); - } -/** - * Deletes the job from the project. If the job is active, the delete fails, - * and the response returns `FAILED_PRECONDITION`. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {string} request.jobId - * Required. The job ID. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.delete_job.js - * region_tag:dataproc_v1_generated_JobController_DeleteJob_async - */ - deleteJob( - request?: protos.google.cloud.dataproc.v1.IDeleteJobRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteJobRequest|undefined, {}|undefined - ]>; - deleteJob( - request: protos.google.cloud.dataproc.v1.IDeleteJobRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, - {}|null|undefined>): void; - deleteJob( - request: protos.google.cloud.dataproc.v1.IDeleteJobRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, - {}|null|undefined>): void; - deleteJob( - request?: protos.google.cloud.dataproc.v1.IDeleteJobRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteJobRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteJobRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - 'job_id': request.jobId || '', - }); - this.initialize(); - return this.innerApiCalls.deleteJob(request, options, callback); - } - -/** - * Submits job to a cluster. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {google.cloud.dataproc.v1.Job} request.job - * Required. The job resource. - * @param {string} [request.requestId] - * Optional. A unique id used to identify the request. If the server - * receives two - * [SubmitJobRequest](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#google.cloud.dataproc.v1.SubmitJobRequest)s - * with the same id, then the second request will be ignored and the - * first {@link google.cloud.dataproc.v1.Job|Job} created and stored in the backend - * is returned. - * - * It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The id must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.submit_job_as_operation.js - * region_tag:dataproc_v1_generated_JobController_SubmitJobAsOperation_async - */ - submitJobAsOperation( - request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - submitJobAsOperation( - request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - submitJobAsOperation( - request: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - submitJobAsOperation( - request?: protos.google.cloud.dataproc.v1.ISubmitJobRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - this.initialize(); - return this.innerApiCalls.submitJobAsOperation(request, options, callback); - } -/** - * Check the status of the long running operation returned by `submitJobAsOperation()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.submit_job_as_operation.js - * region_tag:dataproc_v1_generated_JobController_SubmitJobAsOperation_async - */ - async checkSubmitJobAsOperationProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.submitJobAsOperation, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } - /** - * Lists regions/{region}/jobs in a project. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {number} [request.pageSize] - * Optional. The number of results to return in each response. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {string} [request.clusterName] - * Optional. If set, the returned jobs list includes only jobs that were - * submitted to the named cluster. - * @param {google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher} [request.jobStateMatcher] - * Optional. Specifies enumerated categories of jobs to list. - * (default = match ALL jobs). - * - * If `filter` is provided, `jobStateMatcher` will be ignored. - * @param {string} [request.filter] - * Optional. A filter constraining the jobs to list. Filters are - * case-sensitive and have the following syntax: - * - * [field = value] AND [field [= value]] ... - * - * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label - * key. **value** can be `*` to match all values. - * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * - * Example filter: - * - * status.state = ACTIVE AND labels.env = staging AND labels.starred = * - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [Job]{@link google.cloud.dataproc.v1.Job}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listJobsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listJobs( - request?: protos.google.cloud.dataproc.v1.IListJobsRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IJob[], - protos.google.cloud.dataproc.v1.IListJobsRequest|null, - protos.google.cloud.dataproc.v1.IListJobsResponse - ]>; - listJobs( - request: protos.google.cloud.dataproc.v1.IListJobsRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListJobsRequest, - protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, - protos.google.cloud.dataproc.v1.IJob>): void; - listJobs( - request: protos.google.cloud.dataproc.v1.IListJobsRequest, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListJobsRequest, - protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, - protos.google.cloud.dataproc.v1.IJob>): void; - listJobs( - request?: protos.google.cloud.dataproc.v1.IListJobsRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.dataproc.v1.IListJobsRequest, - protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, - protos.google.cloud.dataproc.v1.IJob>, - callback?: PaginationCallback< - protos.google.cloud.dataproc.v1.IListJobsRequest, - protos.google.cloud.dataproc.v1.IListJobsResponse|null|undefined, - protos.google.cloud.dataproc.v1.IJob>): - Promise<[ - protos.google.cloud.dataproc.v1.IJob[], - protos.google.cloud.dataproc.v1.IListJobsRequest|null, - protos.google.cloud.dataproc.v1.IListJobsResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - this.initialize(); - return this.innerApiCalls.listJobs(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {number} [request.pageSize] - * Optional. The number of results to return in each response. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {string} [request.clusterName] - * Optional. If set, the returned jobs list includes only jobs that were - * submitted to the named cluster. - * @param {google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher} [request.jobStateMatcher] - * Optional. Specifies enumerated categories of jobs to list. - * (default = match ALL jobs). - * - * If `filter` is provided, `jobStateMatcher` will be ignored. - * @param {string} [request.filter] - * Optional. A filter constraining the jobs to list. Filters are - * case-sensitive and have the following syntax: - * - * [field = value] AND [field [= value]] ... - * - * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label - * key. **value** can be `*` to match all values. - * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * - * Example filter: - * - * status.state = ACTIVE AND labels.env = staging AND labels.starred = * - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [Job]{@link google.cloud.dataproc.v1.Job} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listJobsAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listJobsStream( - request?: protos.google.cloud.dataproc.v1.IListJobsRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - const defaultCallSettings = this._defaults['listJobs']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listJobs.createStream( - this.innerApiCalls.listJobs as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listJobs`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.projectId - * Required. The ID of the Google Cloud Platform project that the job - * belongs to. - * @param {string} request.region - * Required. The Dataproc region in which to handle the request. - * @param {number} [request.pageSize] - * Optional. The number of results to return in each response. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {string} [request.clusterName] - * Optional. If set, the returned jobs list includes only jobs that were - * submitted to the named cluster. - * @param {google.cloud.dataproc.v1.ListJobsRequest.JobStateMatcher} [request.jobStateMatcher] - * Optional. Specifies enumerated categories of jobs to list. - * (default = match ALL jobs). - * - * If `filter` is provided, `jobStateMatcher` will be ignored. - * @param {string} [request.filter] - * Optional. A filter constraining the jobs to list. Filters are - * case-sensitive and have the following syntax: - * - * [field = value] AND [field [= value]] ... - * - * where **field** is `status.state` or `labels.[KEY]`, and `[KEY]` is a label - * key. **value** can be `*` to match all values. - * `status.state` can be either `ACTIVE` or `NON_ACTIVE`. - * Only the logical `AND` operator is supported; space-separated items are - * treated as having an implicit `AND` operator. - * - * Example filter: - * - * status.state = ACTIVE AND labels.env = staging AND labels.starred = * - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [Job]{@link google.cloud.dataproc.v1.Job}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v1/job_controller.list_jobs.js - * region_tag:dataproc_v1_generated_JobController_ListJobs_async - */ - listJobsAsync( - request?: protos.google.cloud.dataproc.v1.IListJobsRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'project_id': request.projectId || '', - 'region': request.region || '', - }); - const defaultCallSettings = this._defaults['listJobs']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listJobs.asyncIterate( - this.innerApiCalls['listJobs'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified batch resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} batch - * @returns {string} Resource name string. - */ - batchPath(project:string,location:string,batch:string) { - return this.pathTemplates.batchPathTemplate.render({ - project: project, - location: location, - batch: batch, - }); - } - - /** - * Parse the project from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the project. - */ - matchProjectFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).project; - } - - /** - * Parse the location from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the location. - */ - matchLocationFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).location; - } - - /** - * Parse the batch from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the batch. - */ - matchBatchFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).batch; - } - - /** - * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ - project: project, - location: location, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; - } - - /** - * Parse the location from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; - } - - /** - * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectLocationWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ - project: project, - location: location, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; - } - - /** - * Parse the location from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; - } - - /** - * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; - } - - /** - * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ - project: project, - region: region, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; - } - - /** - * Parse the region from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; - } - - /** - * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectRegionWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ - project: project, - region: region, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; - } - - /** - * Parse the region from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; - } - - /** - * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.jobControllerStub && !this._terminated) { - return this.jobControllerStub.then(stub => { - this._terminated = true; - stub.close(); - this.operationsClient.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v1/src/v1/job_controller_client_config.json b/owl-bot-staging/v1/src/v1/job_controller_client_config.json deleted file mode 100644 index 5d757110..00000000 --- a/owl-bot-staging/v1/src/v1/job_controller_client_config.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "interfaces": { - "google.cloud.dataproc.v1.JobController": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ], - "deadline_exceeded_internal_unavailable": [ - "DEADLINE_EXCEEDED", - "INTERNAL", - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "SubmitJob": { - "timeout_millis": 900000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "SubmitJobAsOperation": { - "timeout_millis": 900000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "GetJob": { - "timeout_millis": 900000, - "retry_codes_name": "deadline_exceeded_internal_unavailable", - "retry_params_name": "default" - }, - "ListJobs": { - "timeout_millis": 900000, - "retry_codes_name": "deadline_exceeded_internal_unavailable", - "retry_params_name": "default" - }, - "UpdateJob": { - "timeout_millis": 900000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "CancelJob": { - "timeout_millis": 900000, - "retry_codes_name": "deadline_exceeded_internal_unavailable", - "retry_params_name": "default" - }, - "DeleteJob": { - "timeout_millis": 900000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v1/src/v1/job_controller_proto_list.json b/owl-bot-staging/v1/src/v1/job_controller_proto_list.json deleted file mode 100644 index b26a9be7..00000000 --- a/owl-bot-staging/v1/src/v1/job_controller_proto_list.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", - "../../protos/google/cloud/dataproc/v1/batches.proto", - "../../protos/google/cloud/dataproc/v1/clusters.proto", - "../../protos/google/cloud/dataproc/v1/jobs.proto", - "../../protos/google/cloud/dataproc/v1/operations.proto", - "../../protos/google/cloud/dataproc/v1/shared.proto", - "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" -] diff --git a/owl-bot-staging/v1/src/v1/workflow_template_service_client.ts b/owl-bot-staging/v1/src/v1/workflow_template_service_client.ts deleted file mode 100644 index 89902bc6..00000000 --- a/owl-bot-staging/v1/src/v1/workflow_template_service_client.ts +++ /dev/null @@ -1,1478 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -/* global window */ -import * as gax from 'google-gax'; -import {Callback, CallOptions, Descriptors, ClientOptions, LROperation, PaginationCallback, GaxCall} from 'google-gax'; - -import { Transform } from 'stream'; -import { RequestType } from 'google-gax/build/src/apitypes'; -import * as protos from '../../protos/protos'; -import jsonProtos = require('../../protos/protos.json'); -/** - * Client JSON configuration object, loaded from - * `src/v1/workflow_template_service_client_config.json`. - * This file defines retry strategy and timeouts for all API methods in this library. - */ -import * as gapicConfig from './workflow_template_service_client_config.json'; -import { operationsProtos } from 'google-gax'; -const version = require('../../../package.json').version; - -/** - * The API interface for managing Workflow Templates in the - * Dataproc API. - * @class - * @memberof v1 - */ -export class WorkflowTemplateServiceClient { - private _terminated = false; - private _opts: ClientOptions; - private _providedCustomServicePath: boolean; - private _gaxModule: typeof gax | typeof gax.fallback; - private _gaxGrpc: gax.GrpcClient | gax.fallback.GrpcClient; - private _protos: {}; - private _defaults: {[method: string]: gax.CallSettings}; - auth: gax.GoogleAuth; - descriptors: Descriptors = { - page: {}, - stream: {}, - longrunning: {}, - batching: {}, - }; - warn: (code: string, message: string, warnType?: string) => void; - innerApiCalls: {[name: string]: Function}; - pathTemplates: {[name: string]: gax.PathTemplate}; - operationsClient: gax.OperationsClient; - workflowTemplateServiceStub?: Promise<{[name: string]: Function}>; - - /** - * Construct an instance of WorkflowTemplateServiceClient. - * - * @param {object} [options] - The configuration object. - * The options accepted by the constructor are described in detail - * in [this document](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#creating-the-client-instance). - * The common options are: - * @param {object} [options.credentials] - Credentials object. - * @param {string} [options.credentials.client_email] - * @param {string} [options.credentials.private_key] - * @param {string} [options.email] - Account email address. Required when - * using a .pem or .p12 keyFilename. - * @param {string} [options.keyFilename] - Full path to the a .json, .pem, or - * .p12 key downloaded from the Google Developers Console. If you provide - * a path to a JSON file, the projectId option below is not necessary. - * NOTE: .pem and .p12 require you to specify options.email as well. - * @param {number} [options.port] - The port on which to connect to - * the remote host. - * @param {string} [options.projectId] - The project ID from the Google - * Developer's Console, e.g. 'grape-spaceship-123'. We will also check - * the environment variable GCLOUD_PROJECT for your project ID. If your - * app is running in an environment which supports - * {@link https://developers.google.com/identity/protocols/application-default-credentials Application Default Credentials}, - * your project ID will be detected automatically. - * @param {string} [options.apiEndpoint] - The domain name of the - * API remote host. - * @param {gax.ClientConfig} [options.clientConfig] - Client configuration override. - * Follows the structure of {@link gapicConfig}. - * @param {boolean} [options.fallback] - Use HTTP fallback mode. - * In fallback mode, a special browser-compatible transport implementation is used - * instead of gRPC transport. In browser context (if the `window` object is defined) - * the fallback mode is enabled automatically; set `options.fallback` to `false` - * if you need to override this behavior. - */ - constructor(opts?: ClientOptions) { - // Ensure that options include all the required fields. - const staticMembers = this.constructor as typeof WorkflowTemplateServiceClient; - const servicePath = opts?.servicePath || opts?.apiEndpoint || staticMembers.servicePath; - this._providedCustomServicePath = !!(opts?.servicePath || opts?.apiEndpoint); - const port = opts?.port || staticMembers.port; - const clientConfig = opts?.clientConfig ?? {}; - const fallback = opts?.fallback ?? (typeof window !== 'undefined' && typeof window?.fetch === 'function'); - opts = Object.assign({servicePath, port, clientConfig, fallback}, opts); - - // If scopes are unset in options and we're connecting to a non-default endpoint, set scopes just in case. - if (servicePath !== staticMembers.servicePath && !('scopes' in opts)) { - opts['scopes'] = staticMembers.scopes; - } - - // Choose either gRPC or proto-over-HTTP implementation of google-gax. - this._gaxModule = opts.fallback ? gax.fallback : gax; - - // Create a `gaxGrpc` object, with any grpc-specific options sent to the client. - this._gaxGrpc = new this._gaxModule.GrpcClient(opts); - - // Save options to use in initialize() method. - this._opts = opts; - - // Save the auth object to the client, for use by other methods. - this.auth = (this._gaxGrpc.auth as gax.GoogleAuth); - - // Set useJWTAccessWithScope on the auth object. - this.auth.useJWTAccessWithScope = true; - - // Set defaultServicePath on the auth object. - this.auth.defaultServicePath = staticMembers.servicePath; - - // Set the default scopes in auth client if needed. - if (servicePath === staticMembers.servicePath) { - this.auth.defaultScopes = staticMembers.scopes; - } - - // Determine the client header string. - const clientHeader = [ - `gax/${this._gaxModule.version}`, - `gapic/${version}`, - ]; - if (typeof process !== 'undefined' && 'versions' in process) { - clientHeader.push(`gl-node/${process.versions.node}`); - } else { - clientHeader.push(`gl-web/${this._gaxModule.version}`); - } - if (!opts.fallback) { - clientHeader.push(`grpc/${this._gaxGrpc.grpcVersion}`); - } else if (opts.fallback === 'rest' ) { - clientHeader.push(`rest/${this._gaxGrpc.grpcVersion}`); - } - if (opts.libName && opts.libVersion) { - clientHeader.push(`${opts.libName}/${opts.libVersion}`); - } - // Load the applicable protos. - this._protos = this._gaxGrpc.loadProtoJSON(jsonProtos); - - // This API contains "path templates"; forward-slash-separated - // identifiers to uniquely identify resources within the API. - // Create useful helper objects for these. - this.pathTemplates = { - batchPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/batches/{batch}' - ), - projectPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}' - ), - projectLocationAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/autoscalingPolicies/{autoscaling_policy}' - ), - projectLocationWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/locations/{location}/workflowTemplates/{workflow_template}' - ), - projectRegionAutoscalingPolicyPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/autoscalingPolicies/{autoscaling_policy}' - ), - projectRegionWorkflowTemplatePathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}/workflowTemplates/{workflow_template}' - ), - regionPathTemplate: new this._gaxModule.PathTemplate( - 'projects/{project}/regions/{region}' - ), - }; - - // Some of the methods on this service return "paged" results, - // (e.g. 50 results at a time, with tokens to get subsequent - // pages). Denote the keys used for pagination and results. - this.descriptors.page = { - listWorkflowTemplates: - new this._gaxModule.PageDescriptor('pageToken', 'nextPageToken', 'templates') - }; - - const protoFilesRoot = this._gaxModule.protobuf.Root.fromJSON(jsonProtos); - - // This API contains "long-running operations", which return a - // an Operation object that allows for tracking of the operation, - // rather than holding a request open. - - this.operationsClient = this._gaxModule.lro({ - auth: this.auth, - grpc: 'grpc' in this._gaxGrpc ? this._gaxGrpc.grpc : undefined - }).operationsClient(opts); - const instantiateWorkflowTemplateResponse = protoFilesRoot.lookup( - '.google.protobuf.Empty') as gax.protobuf.Type; - const instantiateWorkflowTemplateMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.WorkflowMetadata') as gax.protobuf.Type; - const instantiateInlineWorkflowTemplateResponse = protoFilesRoot.lookup( - '.google.protobuf.Empty') as gax.protobuf.Type; - const instantiateInlineWorkflowTemplateMetadata = protoFilesRoot.lookup( - '.google.cloud.dataproc.v1.WorkflowMetadata') as gax.protobuf.Type; - - this.descriptors.longrunning = { - instantiateWorkflowTemplate: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - instantiateWorkflowTemplateResponse.decode.bind(instantiateWorkflowTemplateResponse), - instantiateWorkflowTemplateMetadata.decode.bind(instantiateWorkflowTemplateMetadata)), - instantiateInlineWorkflowTemplate: new this._gaxModule.LongrunningDescriptor( - this.operationsClient, - instantiateInlineWorkflowTemplateResponse.decode.bind(instantiateInlineWorkflowTemplateResponse), - instantiateInlineWorkflowTemplateMetadata.decode.bind(instantiateInlineWorkflowTemplateMetadata)) - }; - - // Put together the default options sent with requests. - this._defaults = this._gaxGrpc.constructSettings( - 'google.cloud.dataproc.v1.WorkflowTemplateService', gapicConfig as gax.ClientConfig, - opts.clientConfig || {}, {'x-goog-api-client': clientHeader.join(' ')}); - - // Set up a dictionary of "inner API calls"; the core implementation - // of calling the API is handled in `google-gax`, with this code - // merely providing the destination and request information. - this.innerApiCalls = {}; - - // Add a warn function to the client constructor so it can be easily tested. - this.warn = gax.warn; - } - - /** - * Initialize the client. - * Performs asynchronous operations (such as authentication) and prepares the client. - * This function will be called automatically when any class method is called for the - * first time, but if you need to initialize it before calling an actual method, - * feel free to call initialize() directly. - * - * You can await on this method if you want to make sure the client is initialized. - * - * @returns {Promise} A promise that resolves to an authenticated service stub. - */ - initialize() { - // If the client stub promise is already initialized, return immediately. - if (this.workflowTemplateServiceStub) { - return this.workflowTemplateServiceStub; - } - - // Put together the "service stub" for - // google.cloud.dataproc.v1.WorkflowTemplateService. - this.workflowTemplateServiceStub = this._gaxGrpc.createStub( - this._opts.fallback ? - (this._protos as protobuf.Root).lookupService('google.cloud.dataproc.v1.WorkflowTemplateService') : - // eslint-disable-next-line @typescript-eslint/no-explicit-any - (this._protos as any).google.cloud.dataproc.v1.WorkflowTemplateService, - this._opts, this._providedCustomServicePath) as Promise<{[method: string]: Function}>; - - // Iterate over each of the methods that the service provides - // and create an API call method for each. - const workflowTemplateServiceStubMethods = - ['createWorkflowTemplate', 'getWorkflowTemplate', 'instantiateWorkflowTemplate', 'instantiateInlineWorkflowTemplate', 'updateWorkflowTemplate', 'listWorkflowTemplates', 'deleteWorkflowTemplate']; - for (const methodName of workflowTemplateServiceStubMethods) { - const callPromise = this.workflowTemplateServiceStub.then( - stub => (...args: Array<{}>) => { - if (this._terminated) { - return Promise.reject('The client has already been closed.'); - } - const func = stub[methodName]; - return func.apply(stub, args); - }, - (err: Error|null|undefined) => () => { - throw err; - }); - - const descriptor = - this.descriptors.page[methodName] || - this.descriptors.longrunning[methodName] || - undefined; - const apiCall = this._gaxModule.createApiCall( - callPromise, - this._defaults[methodName], - descriptor - ); - - this.innerApiCalls[methodName] = apiCall; - } - - return this.workflowTemplateServiceStub; - } - - /** - * The DNS address for this API service. - * @returns {string} The DNS address for this service. - */ - static get servicePath() { - return 'dataproc.googleapis.com'; - } - - /** - * The DNS address for this API service - same as servicePath(), - * exists for compatibility reasons. - * @returns {string} The DNS address for this service. - */ - static get apiEndpoint() { - return 'dataproc.googleapis.com'; - } - - /** - * The port for this API service. - * @returns {number} The default port for this service. - */ - static get port() { - return 443; - } - - /** - * The scopes needed to make gRPC calls for every method defined - * in this service. - * @returns {string[]} List of default scopes. - */ - static get scopes() { - return [ - 'https://www.googleapis.com/auth/cloud-platform' - ]; - } - - getProjectId(): Promise; - getProjectId(callback: Callback): void; - /** - * Return the project ID used by this class. - * @returns {Promise} A promise that resolves to string containing the project ID. - */ - getProjectId(callback?: Callback): - Promise|void { - if (callback) { - this.auth.getProjectId(callback); - return; - } - return this.auth.getProjectId(); - } - - // ------------------- - // -- Service calls -- - // ------------------- -/** - * Creates new workflow template. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates.create`, the resource name of the - * region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.workflowTemplates.create`, the resource name of - * the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {google.cloud.dataproc.v1.WorkflowTemplate} request.template - * Required. The Dataproc workflow template to create. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.create_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_CreateWorkflowTemplate_async - */ - createWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|undefined, {}|undefined - ]>; - createWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - createWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - createWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.ICreateWorkflowTemplateRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.createWorkflowTemplate(request, options, callback); - } -/** - * Retrieves the latest workflow template. - * - * Can retrieve previously instantiated template by specifying optional - * version parameter. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The resource name of the workflow template, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates.get`, the resource name of the - * template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * - * * For `projects.locations.workflowTemplates.get`, the resource name of the - * template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - * @param {number} [request.version] - * Optional. The version of workflow template to retrieve. Only previously - * instantiated versions can be retrieved. - * - * If unspecified, retrieves the current version. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.get_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_GetWorkflowTemplate_async - */ - getWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|undefined, {}|undefined - ]>; - getWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - getWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - getWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IGetWorkflowTemplateRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.getWorkflowTemplate(request, options, callback); - } -/** - * Updates (replaces) workflow template. The updated template - * must contain version that matches the current server version. - * - * @param {Object} request - * The request object that will be sent. - * @param {google.cloud.dataproc.v1.WorkflowTemplate} request.template - * Required. The updated workflow template. - * - * The `template.version` field must match the current version. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.update_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_UpdateWorkflowTemplate_async - */ - updateWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|undefined, {}|undefined - ]>; - updateWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, - options: CallOptions, - callback: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - updateWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, - callback: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - updateWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate, - protos.google.cloud.dataproc.v1.IUpdateWorkflowTemplateRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'template.name': request.template!.name || '', - }); - this.initialize(); - return this.innerApiCalls.updateWorkflowTemplate(request, options, callback); - } -/** - * Deletes a workflow template. It does not cancel in-progress workflows. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The resource name of the workflow template, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates.delete`, the resource name - * of the template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * - * * For `projects.locations.workflowTemplates.instantiate`, the resource name - * of the template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - * @param {number} [request.version] - * Optional. The version of workflow template to delete. If specified, - * will only delete the template if the current server version matches - * specified version. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing [Empty]{@link google.protobuf.Empty}. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#regular-methods) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.delete_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_DeleteWorkflowTemplate_async - */ - deleteWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, - options?: CallOptions): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|undefined, {}|undefined - ]>; - deleteWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, - options: CallOptions, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - deleteWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, - callback: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): void; - deleteWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest, - optionsOrCallback?: CallOptions|Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, - {}|null|undefined>, - callback?: Callback< - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|null|undefined, - {}|null|undefined>): - Promise<[ - protos.google.protobuf.IEmpty, - protos.google.cloud.dataproc.v1.IDeleteWorkflowTemplateRequest|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.deleteWorkflowTemplate(request, options, callback); - } - -/** - * Instantiates a template and begins execution. - * - * The returned Operation can be used to track execution of - * workflow by polling - * {@link google.longrunning.Operations.GetOperation|operations.get}. - * The Operation will complete when entire workflow is finished. - * - * The running workflow can be aborted via - * {@link google.longrunning.Operations.CancelOperation|operations.cancel}. - * This will cause any inflight jobs to be cancelled and workflow-owned - * clusters to be deleted. - * - * The {@link google.longrunning.Operation.metadata|Operation.metadata} will be - * [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - * Also see [Using - * WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - * - * On successful completion, - * {@link google.longrunning.Operation.response|Operation.response} will be - * {@link google.protobuf.Empty|Empty}. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.name - * Required. The resource name of the workflow template, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates.instantiate`, the resource name - * of the template has the following format: - * `projects/{project_id}/regions/{region}/workflowTemplates/{template_id}` - * - * * For `projects.locations.workflowTemplates.instantiate`, the resource name - * of the template has the following format: - * `projects/{project_id}/locations/{location}/workflowTemplates/{template_id}` - * @param {number} [request.version] - * Optional. The version of workflow template to instantiate. If specified, - * the workflow will be instantiated only if the current version of - * the workflow template has the supplied version. - * - * This option cannot be used to instantiate a previous version of - * workflow template. - * @param {string} [request.requestId] - * Optional. A tag that prevents multiple concurrent workflow - * instances with the same tag from running. This mitigates risk of - * concurrent instances started due to retries. - * - * It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The tag must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {number[]} [request.parameters] - * Optional. Map from parameter names to values that should be used for those - * parameters. Values may not exceed 1000 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.instantiate_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async - */ - instantiateWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - instantiateWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - instantiateWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - instantiateWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IInstantiateWorkflowTemplateRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'name': request.name || '', - }); - this.initialize(); - return this.innerApiCalls.instantiateWorkflowTemplate(request, options, callback); - } -/** - * Check the status of the long running operation returned by `instantiateWorkflowTemplate()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.instantiate_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateWorkflowTemplate_async - */ - async checkInstantiateWorkflowTemplateProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.instantiateWorkflowTemplate, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } -/** - * Instantiates a template and begins execution. - * - * This method is equivalent to executing the sequence - * {@link google.cloud.dataproc.v1.WorkflowTemplateService.CreateWorkflowTemplate|CreateWorkflowTemplate}, {@link google.cloud.dataproc.v1.WorkflowTemplateService.InstantiateWorkflowTemplate|InstantiateWorkflowTemplate}, - * {@link google.cloud.dataproc.v1.WorkflowTemplateService.DeleteWorkflowTemplate|DeleteWorkflowTemplate}. - * - * The returned Operation can be used to track execution of - * workflow by polling - * {@link google.longrunning.Operations.GetOperation|operations.get}. - * The Operation will complete when entire workflow is finished. - * - * The running workflow can be aborted via - * {@link google.longrunning.Operations.CancelOperation|operations.cancel}. - * This will cause any inflight jobs to be cancelled and workflow-owned - * clusters to be deleted. - * - * The {@link google.longrunning.Operation.metadata|Operation.metadata} will be - * [WorkflowMetadata](https://cloud.google.com/dataproc/docs/reference/rpc/google.cloud.dataproc.v1#workflowmetadata). - * Also see [Using - * WorkflowMetadata](https://cloud.google.com/dataproc/docs/concepts/workflows/debugging#using_workflowmetadata). - * - * On successful completion, - * {@link google.longrunning.Operation.response|Operation.response} will be - * {@link google.protobuf.Empty|Empty}. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates,instantiateinline`, the resource - * name of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.workflowTemplates.instantiateinline`, the - * resource name of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {google.cloud.dataproc.v1.WorkflowTemplate} request.template - * Required. The workflow template to instantiate. - * @param {string} [request.requestId] - * Optional. A tag that prevents multiple concurrent workflow - * instances with the same tag from running. This mitigates risk of - * concurrent instances started due to retries. - * - * It is recommended to always set this value to a - * [UUID](https://en.wikipedia.org/wiki/Universally_unique_identifier). - * - * The tag must contain only letters (a-z, A-Z), numbers (0-9), - * underscores (_), and hyphens (-). The maximum length is 40 characters. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is an object representing - * a long running operation. Its `promise()` method returns a promise - * you can `await` for. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async - */ - instantiateInlineWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, - options?: CallOptions): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>; - instantiateInlineWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, - options: CallOptions, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - instantiateInlineWorkflowTemplate( - request: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, - callback: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): void; - instantiateInlineWorkflowTemplate( - request?: protos.google.cloud.dataproc.v1.IInstantiateInlineWorkflowTemplateRequest, - optionsOrCallback?: CallOptions|Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>, - callback?: Callback< - LROperation, - protos.google.longrunning.IOperation|null|undefined, - {}|null|undefined>): - Promise<[ - LROperation, - protos.google.longrunning.IOperation|undefined, {}|undefined - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.instantiateInlineWorkflowTemplate(request, options, callback); - } -/** - * Check the status of the long running operation returned by `instantiateInlineWorkflowTemplate()`. - * @param {String} name - * The operation name that will be passed. - * @returns {Promise} - The promise which resolves to an object. - * The decoded operation object has result and metadata field to get information from. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#long-running-operations) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.instantiate_inline_workflow_template.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_InstantiateInlineWorkflowTemplate_async - */ - async checkInstantiateInlineWorkflowTemplateProgress(name: string): Promise>{ - const request = new operationsProtos.google.longrunning.GetOperationRequest({name}); - const [operation] = await this.operationsClient.getOperation(request); - const decodeOperation = new gax.Operation(operation, this.descriptors.longrunning.instantiateInlineWorkflowTemplate, gax.createDefaultBackoffSettings()); - return decodeOperation as LROperation; - } - /** - * Lists workflows that match the specified filter in the request. - * - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates,list`, the resource - * name of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.workflowTemplates.list`, the - * resource name of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {number} [request.pageSize] - * Optional. The maximum number of results to return in each response. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Promise} - The promise which resolves to an array. - * The first element of the array is Array of [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed and will merge results from all the pages into this array. - * Note that it can affect your quota. - * We recommend using `listWorkflowTemplatesAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listWorkflowTemplates( - request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - options?: CallOptions): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate[], - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest|null, - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse - ]>; - listWorkflowTemplates( - request: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - options: CallOptions, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IWorkflowTemplate>): void; - listWorkflowTemplates( - request: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - callback: PaginationCallback< - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IWorkflowTemplate>): void; - listWorkflowTemplates( - request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - optionsOrCallback?: CallOptions|PaginationCallback< - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IWorkflowTemplate>, - callback?: PaginationCallback< - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse|null|undefined, - protos.google.cloud.dataproc.v1.IWorkflowTemplate>): - Promise<[ - protos.google.cloud.dataproc.v1.IWorkflowTemplate[], - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest|null, - protos.google.cloud.dataproc.v1.IListWorkflowTemplatesResponse - ]>|void { - request = request || {}; - let options: CallOptions; - if (typeof optionsOrCallback === 'function' && callback === undefined) { - callback = optionsOrCallback; - options = {}; - } - else { - options = optionsOrCallback as CallOptions; - } - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - this.initialize(); - return this.innerApiCalls.listWorkflowTemplates(request, options, callback); - } - -/** - * Equivalent to `method.name.toCamelCase()`, but returns a NodeJS Stream object. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates,list`, the resource - * name of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.workflowTemplates.list`, the - * resource name of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {number} [request.pageSize] - * Optional. The maximum number of results to return in each response. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Stream} - * An object stream which emits an object representing [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate} on 'data' event. - * The client library will perform auto-pagination by default: it will call the API as many - * times as needed. Note that it can affect your quota. - * We recommend using `listWorkflowTemplatesAsync()` - * method described below for async iteration which you can stop as needed. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - */ - listWorkflowTemplatesStream( - request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - options?: CallOptions): - Transform{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listWorkflowTemplates']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listWorkflowTemplates.createStream( - this.innerApiCalls.listWorkflowTemplates as gax.GaxCall, - request, - callSettings - ); - } - -/** - * Equivalent to `listWorkflowTemplates`, but returns an iterable object. - * - * `for`-`await`-`of` syntax is used with the iterable to get response elements on-demand. - * @param {Object} request - * The request object that will be sent. - * @param {string} request.parent - * Required. The resource name of the region or location, as described - * in https://cloud.google.com/apis/design/resource_names. - * - * * For `projects.regions.workflowTemplates,list`, the resource - * name of the region has the following format: - * `projects/{project_id}/regions/{region}` - * - * * For `projects.locations.workflowTemplates.list`, the - * resource name of the location has the following format: - * `projects/{project_id}/locations/{location}` - * @param {number} [request.pageSize] - * Optional. The maximum number of results to return in each response. - * @param {string} [request.pageToken] - * Optional. The page token, returned by a previous call, to request the - * next page of results. - * @param {object} [options] - * Call options. See {@link https://googleapis.dev/nodejs/google-gax/latest/interfaces/CallOptions.html|CallOptions} for more details. - * @returns {Object} - * An iterable Object that allows [async iteration](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Iteration_protocols). - * When you iterate the returned iterable, each element will be an object representing - * [WorkflowTemplate]{@link google.cloud.dataproc.v1.WorkflowTemplate}. The API will be called under the hood as needed, once per the page, - * so you can stop the iteration when you don't need more results. - * Please see the - * [documentation](https://github.com/googleapis/gax-nodejs/blob/master/client-libraries.md#auto-pagination) - * for more details and examples. - * @example include:samples/generated/v1/workflow_template_service.list_workflow_templates.js - * region_tag:dataproc_v1_generated_WorkflowTemplateService_ListWorkflowTemplates_async - */ - listWorkflowTemplatesAsync( - request?: protos.google.cloud.dataproc.v1.IListWorkflowTemplatesRequest, - options?: CallOptions): - AsyncIterable{ - request = request || {}; - options = options || {}; - options.otherArgs = options.otherArgs || {}; - options.otherArgs.headers = options.otherArgs.headers || {}; - options.otherArgs.headers[ - 'x-goog-request-params' - ] = gax.routingHeader.fromParams({ - 'parent': request.parent || '', - }); - const defaultCallSettings = this._defaults['listWorkflowTemplates']; - const callSettings = defaultCallSettings.merge(options); - this.initialize(); - return this.descriptors.page.listWorkflowTemplates.asyncIterate( - this.innerApiCalls['listWorkflowTemplates'] as GaxCall, - request as unknown as RequestType, - callSettings - ) as AsyncIterable; - } - // -------------------- - // -- Path templates -- - // -------------------- - - /** - * Return a fully-qualified batch resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} batch - * @returns {string} Resource name string. - */ - batchPath(project:string,location:string,batch:string) { - return this.pathTemplates.batchPathTemplate.render({ - project: project, - location: location, - batch: batch, - }); - } - - /** - * Parse the project from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the project. - */ - matchProjectFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).project; - } - - /** - * Parse the location from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the location. - */ - matchLocationFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).location; - } - - /** - * Parse the batch from Batch resource. - * - * @param {string} batchName - * A fully-qualified path representing Batch resource. - * @returns {string} A string representing the batch. - */ - matchBatchFromBatchName(batchName: string) { - return this.pathTemplates.batchPathTemplate.match(batchName).batch; - } - - /** - * Return a fully-qualified project resource name string. - * - * @param {string} project - * @returns {string} Resource name string. - */ - projectPath(project:string) { - return this.pathTemplates.projectPathTemplate.render({ - project: project, - }); - } - - /** - * Parse the project from Project resource. - * - * @param {string} projectName - * A fully-qualified path representing Project resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectName(projectName: string) { - return this.pathTemplates.projectPathTemplate.match(projectName).project; - } - - /** - * Return a fully-qualified projectLocationAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectLocationAutoscalingPolicyPath(project:string,location:string,autoscalingPolicy:string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render({ - project: project, - location: location, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).project; - } - - /** - * Parse the location from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).location; - } - - /** - * Parse the autoscaling_policy from ProjectLocationAutoscalingPolicy resource. - * - * @param {string} projectLocationAutoscalingPolicyName - * A fully-qualified path representing project_location_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(projectLocationAutoscalingPolicyName: string) { - return this.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match(projectLocationAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectLocationWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} location - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectLocationWorkflowTemplatePath(project:string,location:string,workflowTemplate:string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render({ - project: project, - location: location, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).project; - } - - /** - * Parse the location from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the location. - */ - matchLocationFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).location; - } - - /** - * Parse the workflow_template from ProjectLocationWorkflowTemplate resource. - * - * @param {string} projectLocationWorkflowTemplateName - * A fully-qualified path representing project_location_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(projectLocationWorkflowTemplateName: string) { - return this.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match(projectLocationWorkflowTemplateName).workflow_template; - } - - /** - * Return a fully-qualified projectRegionAutoscalingPolicy resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} autoscaling_policy - * @returns {string} Resource name string. - */ - projectRegionAutoscalingPolicyPath(project:string,region:string,autoscalingPolicy:string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render({ - project: project, - region: region, - autoscaling_policy: autoscalingPolicy, - }); - } - - /** - * Parse the project from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).project; - } - - /** - * Parse the region from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).region; - } - - /** - * Parse the autoscaling_policy from ProjectRegionAutoscalingPolicy resource. - * - * @param {string} projectRegionAutoscalingPolicyName - * A fully-qualified path representing project_region_autoscaling_policy resource. - * @returns {string} A string representing the autoscaling_policy. - */ - matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(projectRegionAutoscalingPolicyName: string) { - return this.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match(projectRegionAutoscalingPolicyName).autoscaling_policy; - } - - /** - * Return a fully-qualified projectRegionWorkflowTemplate resource name string. - * - * @param {string} project - * @param {string} region - * @param {string} workflow_template - * @returns {string} Resource name string. - */ - projectRegionWorkflowTemplatePath(project:string,region:string,workflowTemplate:string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render({ - project: project, - region: region, - workflow_template: workflowTemplate, - }); - } - - /** - * Parse the project from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the project. - */ - matchProjectFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).project; - } - - /** - * Parse the region from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the region. - */ - matchRegionFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).region; - } - - /** - * Parse the workflow_template from ProjectRegionWorkflowTemplate resource. - * - * @param {string} projectRegionWorkflowTemplateName - * A fully-qualified path representing project_region_workflow_template resource. - * @returns {string} A string representing the workflow_template. - */ - matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(projectRegionWorkflowTemplateName: string) { - return this.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match(projectRegionWorkflowTemplateName).workflow_template; - } - - /** - * Return a fully-qualified region resource name string. - * - * @param {string} project - * @param {string} region - * @returns {string} Resource name string. - */ - regionPath(project:string,region:string) { - return this.pathTemplates.regionPathTemplate.render({ - project: project, - region: region, - }); - } - - /** - * Parse the project from Region resource. - * - * @param {string} regionName - * A fully-qualified path representing Region resource. - * @returns {string} A string representing the project. - */ - matchProjectFromRegionName(regionName: string) { - return this.pathTemplates.regionPathTemplate.match(regionName).project; - } - - /** - * Parse the region from Region resource. - * - * @param {string} regionName - * A fully-qualified path representing Region resource. - * @returns {string} A string representing the region. - */ - matchRegionFromRegionName(regionName: string) { - return this.pathTemplates.regionPathTemplate.match(regionName).region; - } - - /** - * Terminate the gRPC channel and close the client. - * - * The client will no longer be usable and all future behavior is undefined. - * @returns {Promise} A promise that resolves when the client is closed. - */ - close(): Promise { - if (this.workflowTemplateServiceStub && !this._terminated) { - return this.workflowTemplateServiceStub.then(stub => { - this._terminated = true; - stub.close(); - this.operationsClient.close(); - }); - } - return Promise.resolve(); - } -} diff --git a/owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json b/owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json deleted file mode 100644 index 62d3aa9b..00000000 --- a/owl-bot-staging/v1/src/v1/workflow_template_service_client_config.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - "interfaces": { - "google.cloud.dataproc.v1.WorkflowTemplateService": { - "retry_codes": { - "non_idempotent": [], - "idempotent": [ - "DEADLINE_EXCEEDED", - "UNAVAILABLE" - ], - "unavailable": [ - "UNAVAILABLE" - ], - "deadline_exceeded_internal_unavailable": [ - "DEADLINE_EXCEEDED", - "INTERNAL", - "UNAVAILABLE" - ] - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 60000, - "rpc_timeout_multiplier": 1, - "max_rpc_timeout_millis": 60000, - "total_timeout_millis": 600000 - } - }, - "methods": { - "CreateWorkflowTemplate": { - "timeout_millis": 600000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "GetWorkflowTemplate": { - "timeout_millis": 600000, - "retry_codes_name": "deadline_exceeded_internal_unavailable", - "retry_params_name": "default" - }, - "InstantiateWorkflowTemplate": { - "timeout_millis": 600000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "InstantiateInlineWorkflowTemplate": { - "timeout_millis": 600000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "UpdateWorkflowTemplate": { - "timeout_millis": 600000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - }, - "ListWorkflowTemplates": { - "timeout_millis": 600000, - "retry_codes_name": "deadline_exceeded_internal_unavailable", - "retry_params_name": "default" - }, - "DeleteWorkflowTemplate": { - "timeout_millis": 600000, - "retry_codes_name": "unavailable", - "retry_params_name": "default" - } - } - } - } -} diff --git a/owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json b/owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json deleted file mode 100644 index b26a9be7..00000000 --- a/owl-bot-staging/v1/src/v1/workflow_template_service_proto_list.json +++ /dev/null @@ -1,9 +0,0 @@ -[ - "../../protos/google/cloud/dataproc/v1/autoscaling_policies.proto", - "../../protos/google/cloud/dataproc/v1/batches.proto", - "../../protos/google/cloud/dataproc/v1/clusters.proto", - "../../protos/google/cloud/dataproc/v1/jobs.proto", - "../../protos/google/cloud/dataproc/v1/operations.proto", - "../../protos/google/cloud/dataproc/v1/shared.proto", - "../../protos/google/cloud/dataproc/v1/workflow_templates.proto" -] diff --git a/owl-bot-staging/v1/system-test/fixtures/sample/src/index.js b/owl-bot-staging/v1/system-test/fixtures/sample/src/index.js deleted file mode 100644 index 8835b621..00000000 --- a/owl-bot-staging/v1/system-test/fixtures/sample/src/index.js +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - - -/* eslint-disable node/no-missing-require, no-unused-vars */ -const dataproc = require('@google-cloud/dataproc'); - -function main() { - const autoscalingPolicyServiceClient = new dataproc.AutoscalingPolicyServiceClient(); - const batchControllerClient = new dataproc.BatchControllerClient(); - const clusterControllerClient = new dataproc.ClusterControllerClient(); - const jobControllerClient = new dataproc.JobControllerClient(); - const workflowTemplateServiceClient = new dataproc.WorkflowTemplateServiceClient(); -} - -main(); diff --git a/owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts b/owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts deleted file mode 100644 index 83479d35..00000000 --- a/owl-bot-staging/v1/system-test/fixtures/sample/src/index.ts +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import {AutoscalingPolicyServiceClient, BatchControllerClient, ClusterControllerClient, JobControllerClient, WorkflowTemplateServiceClient} from '@google-cloud/dataproc'; - -// check that the client class type name can be used -function doStuffWithAutoscalingPolicyServiceClient(client: AutoscalingPolicyServiceClient) { - client.close(); -} -function doStuffWithBatchControllerClient(client: BatchControllerClient) { - client.close(); -} -function doStuffWithClusterControllerClient(client: ClusterControllerClient) { - client.close(); -} -function doStuffWithJobControllerClient(client: JobControllerClient) { - client.close(); -} -function doStuffWithWorkflowTemplateServiceClient(client: WorkflowTemplateServiceClient) { - client.close(); -} - -function main() { - // check that the client instance can be created - const autoscalingPolicyServiceClient = new AutoscalingPolicyServiceClient(); - doStuffWithAutoscalingPolicyServiceClient(autoscalingPolicyServiceClient); - // check that the client instance can be created - const batchControllerClient = new BatchControllerClient(); - doStuffWithBatchControllerClient(batchControllerClient); - // check that the client instance can be created - const clusterControllerClient = new ClusterControllerClient(); - doStuffWithClusterControllerClient(clusterControllerClient); - // check that the client instance can be created - const jobControllerClient = new JobControllerClient(); - doStuffWithJobControllerClient(jobControllerClient); - // check that the client instance can be created - const workflowTemplateServiceClient = new WorkflowTemplateServiceClient(); - doStuffWithWorkflowTemplateServiceClient(workflowTemplateServiceClient); -} - -main(); diff --git a/owl-bot-staging/v1/system-test/install.ts b/owl-bot-staging/v1/system-test/install.ts deleted file mode 100644 index 8ec45222..00000000 --- a/owl-bot-staging/v1/system-test/install.ts +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import { packNTest } from 'pack-n-play'; -import { readFileSync } from 'fs'; -import { describe, it } from 'mocha'; - -describe('📦 pack-n-play test', () => { - - it('TypeScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'TypeScript user can use the type definitions', - ts: readFileSync('./system-test/fixtures/sample/src/index.ts').toString() - } - }; - await packNTest(options); - }); - - it('JavaScript code', async function() { - this.timeout(300000); - const options = { - packageDir: process.cwd(), - sample: { - description: 'JavaScript user can use the library', - ts: readFileSync('./system-test/fixtures/sample/src/index.js').toString() - } - }; - await packNTest(options); - }); - -}); diff --git a/owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts b/owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts deleted file mode 100644 index 13b33f6b..00000000 --- a/owl-bot-staging/v1/test/gapic_autoscaling_policy_service_v1.ts +++ /dev/null @@ -1,1098 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as autoscalingpolicyserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v1.AutoscalingPolicyServiceClient', () => { - it('has servicePath', () => { - const servicePath = autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.autoscalingPolicyServiceStub, undefined); - await client.initialize(); - assert(client.autoscalingPolicyServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.autoscalingPolicyServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.autoscalingPolicyServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('createAutoscalingPolicy', () => { - it('invokes createAutoscalingPolicy without error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); - client.innerApiCalls.createAutoscalingPolicy = stubSimpleCall(expectedResponse); - const [response] = await client.createAutoscalingPolicy(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createAutoscalingPolicy without error using callback', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); - client.innerApiCalls.createAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createAutoscalingPolicy( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createAutoscalingPolicy with error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createAutoscalingPolicy = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createAutoscalingPolicy(request), expectedError); - assert((client.innerApiCalls.createAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createAutoscalingPolicy with closed client', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateAutoscalingPolicyRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createAutoscalingPolicy(request), expectedError); - }); - }); - - describe('updateAutoscalingPolicy', () => { - it('invokes updateAutoscalingPolicy without error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); - request.policy = {}; - request.policy.name = ''; - const expectedHeaderRequestParams = "policy.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); - client.innerApiCalls.updateAutoscalingPolicy = stubSimpleCall(expectedResponse); - const [response] = await client.updateAutoscalingPolicy(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateAutoscalingPolicy without error using callback', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); - request.policy = {}; - request.policy.name = ''; - const expectedHeaderRequestParams = "policy.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); - client.innerApiCalls.updateAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.updateAutoscalingPolicy( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes updateAutoscalingPolicy with error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); - request.policy = {}; - request.policy.name = ''; - const expectedHeaderRequestParams = "policy.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.updateAutoscalingPolicy = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.updateAutoscalingPolicy(request), expectedError); - assert((client.innerApiCalls.updateAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateAutoscalingPolicy with closed client', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateAutoscalingPolicyRequest()); - request.policy = {}; - request.policy.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.updateAutoscalingPolicy(request), expectedError); - }); - }); - - describe('getAutoscalingPolicy', () => { - it('invokes getAutoscalingPolicy without error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); - client.innerApiCalls.getAutoscalingPolicy = stubSimpleCall(expectedResponse); - const [response] = await client.getAutoscalingPolicy(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getAutoscalingPolicy without error using callback', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()); - client.innerApiCalls.getAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getAutoscalingPolicy( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getAutoscalingPolicy with error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getAutoscalingPolicy = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getAutoscalingPolicy(request), expectedError); - assert((client.innerApiCalls.getAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getAutoscalingPolicy with closed client', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetAutoscalingPolicyRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getAutoscalingPolicy(request), expectedError); - }); - }); - - describe('deleteAutoscalingPolicy', () => { - it('invokes deleteAutoscalingPolicy without error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteAutoscalingPolicy = stubSimpleCall(expectedResponse); - const [response] = await client.deleteAutoscalingPolicy(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteAutoscalingPolicy without error using callback', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteAutoscalingPolicy = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteAutoscalingPolicy( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteAutoscalingPolicy with error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteAutoscalingPolicy = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteAutoscalingPolicy(request), expectedError); - assert((client.innerApiCalls.deleteAutoscalingPolicy as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteAutoscalingPolicy with closed client', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteAutoscalingPolicyRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteAutoscalingPolicy(request), expectedError); - }); - }); - - describe('listAutoscalingPolicies', () => { - it('invokes listAutoscalingPolicies without error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - ]; - client.innerApiCalls.listAutoscalingPolicies = stubSimpleCall(expectedResponse); - const [response] = await client.listAutoscalingPolicies(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listAutoscalingPolicies as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listAutoscalingPolicies without error using callback', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - ]; - client.innerApiCalls.listAutoscalingPolicies = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listAutoscalingPolicies( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IAutoscalingPolicy[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listAutoscalingPolicies as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listAutoscalingPolicies with error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listAutoscalingPolicies = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listAutoscalingPolicies(request), expectedError); - assert((client.innerApiCalls.listAutoscalingPolicies as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listAutoscalingPoliciesStream without error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - ]; - client.descriptors.page.listAutoscalingPolicies.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listAutoscalingPoliciesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.AutoscalingPolicy[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.AutoscalingPolicy) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listAutoscalingPolicies, request)); - assert.strictEqual( - (client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listAutoscalingPoliciesStream with error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listAutoscalingPolicies.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listAutoscalingPoliciesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.AutoscalingPolicy[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.AutoscalingPolicy) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listAutoscalingPolicies, request)); - assert.strictEqual( - (client.descriptors.page.listAutoscalingPolicies.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listAutoscalingPolicies without error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.AutoscalingPolicy()), - ]; - client.descriptors.page.listAutoscalingPolicies.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.dataproc.v1.IAutoscalingPolicy[] = []; - const iterable = client.listAutoscalingPoliciesAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listAutoscalingPolicies with error', async () => { - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListAutoscalingPoliciesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listAutoscalingPolicies.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listAutoscalingPoliciesAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.dataproc.v1.IAutoscalingPolicy[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listAutoscalingPolicies.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('batch', () => { - const fakePath = "/rendered/path/batch"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - batch: "batchValue", - }; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.batchPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.batchPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('batchPath', () => { - const result = client.batchPath("projectValue", "locationValue", "batchValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.batchPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromBatchName', () => { - const result = client.matchProjectFromBatchName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromBatchName', () => { - const result = client.matchLocationFromBatchName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchBatchFromBatchName', () => { - const result = client.matchBatchFromBatchName(fakePath); - assert.strictEqual(result, "batchValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('project', () => { - const fakePath = "/rendered/path/project"; - const expectedParameters = { - project: "projectValue", - }; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectPath', () => { - const result = client.projectPath("projectValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectName', () => { - const result = client.matchProjectFromProjectName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationAutoscalingPolicyPath', () => { - const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow_template: "workflowTemplateValue", - }; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationWorkflowTemplatePath', () => { - const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionAutoscalingPolicyPath', () => { - const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - workflow_template: "workflowTemplateValue", - }; - const client = new autoscalingpolicyserviceModule.v1.AutoscalingPolicyServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionWorkflowTemplatePath', () => { - const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v1/test/gapic_batch_controller_v1.ts b/owl-bot-staging/v1/test/gapic_batch_controller_v1.ts deleted file mode 100644 index 7cf9c2bd..00000000 --- a/owl-bot-staging/v1/test/gapic_batch_controller_v1.ts +++ /dev/null @@ -1,1060 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as batchcontrollerModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf, LROperation, operationsProtos} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); -} - -function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v1.BatchControllerClient', () => { - it('has servicePath', () => { - const servicePath = batchcontrollerModule.v1.BatchControllerClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = batchcontrollerModule.v1.BatchControllerClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = batchcontrollerModule.v1.BatchControllerClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new batchcontrollerModule.v1.BatchControllerClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.batchControllerStub, undefined); - await client.initialize(); - assert(client.batchControllerStub); - }); - - it('has close method for the initialized client', done => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.batchControllerStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.batchControllerStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('getBatch', () => { - it('invokes getBatch without error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()); - client.innerApiCalls.getBatch = stubSimpleCall(expectedResponse); - const [response] = await client.getBatch(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getBatch without error using callback', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()); - client.innerApiCalls.getBatch = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getBatch( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IBatch|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getBatch with error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getBatch = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getBatch(request), expectedError); - assert((client.innerApiCalls.getBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getBatch with closed client', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetBatchRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getBatch(request), expectedError); - }); - }); - - describe('deleteBatch', () => { - it('invokes deleteBatch without error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteBatch = stubSimpleCall(expectedResponse); - const [response] = await client.deleteBatch(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteBatch without error using callback', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteBatch = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteBatch( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteBatch with error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteBatch = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteBatch(request), expectedError); - assert((client.innerApiCalls.deleteBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteBatch with closed client', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteBatchRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteBatch(request), expectedError); - }); - }); - - describe('createBatch', () => { - it('invokes createBatch without error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.createBatch = stubLongRunningCall(expectedResponse); - const [operation] = await client.createBatch(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createBatch without error using callback', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.createBatch = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createBatch( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createBatch with call error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createBatch = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.createBatch(request), expectedError); - assert((client.innerApiCalls.createBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createBatch with LRO error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateBatchRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createBatch = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.createBatch(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.createBatch as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkCreateBatchProgress without error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkCreateBatchProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkCreateBatchProgress with error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkCreateBatchProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('listBatches', () => { - it('invokes listBatches without error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - ]; - client.innerApiCalls.listBatches = stubSimpleCall(expectedResponse); - const [response] = await client.listBatches(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listBatches as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listBatches without error using callback', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - ]; - client.innerApiCalls.listBatches = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listBatches( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IBatch[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listBatches as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listBatches with error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listBatches = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listBatches(request), expectedError); - assert((client.innerApiCalls.listBatches as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listBatchesStream without error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - ]; - client.descriptors.page.listBatches.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listBatchesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.Batch[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.Batch) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listBatches.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listBatches, request)); - assert.strictEqual( - (client.descriptors.page.listBatches.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listBatchesStream with error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listBatches.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listBatchesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.Batch[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.Batch) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listBatches.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listBatches, request)); - assert.strictEqual( - (client.descriptors.page.listBatches.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listBatches without error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Batch()), - ]; - client.descriptors.page.listBatches.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.dataproc.v1.IBatch[] = []; - const iterable = client.listBatchesAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listBatches.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listBatches.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listBatches with error', async () => { - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListBatchesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listBatches.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listBatchesAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.dataproc.v1.IBatch[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listBatches.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listBatches.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('batch', () => { - const fakePath = "/rendered/path/batch"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - batch: "batchValue", - }; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.batchPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.batchPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('batchPath', () => { - const result = client.batchPath("projectValue", "locationValue", "batchValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.batchPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromBatchName', () => { - const result = client.matchProjectFromBatchName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromBatchName', () => { - const result = client.matchLocationFromBatchName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchBatchFromBatchName', () => { - const result = client.matchBatchFromBatchName(fakePath); - assert.strictEqual(result, "batchValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('location', () => { - const fakePath = "/rendered/path/location"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - }; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.locationPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.locationPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('locationPath', () => { - const result = client.locationPath("projectValue", "locationValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.locationPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromLocationName', () => { - const result = client.matchProjectFromLocationName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromLocationName', () => { - const result = client.matchLocationFromLocationName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.locationPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('project', () => { - const fakePath = "/rendered/path/project"; - const expectedParameters = { - project: "projectValue", - }; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectPath', () => { - const result = client.projectPath("projectValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectName', () => { - const result = client.matchProjectFromProjectName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationAutoscalingPolicyPath', () => { - const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow_template: "workflowTemplateValue", - }; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationWorkflowTemplatePath', () => { - const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionAutoscalingPolicyPath', () => { - const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - workflow_template: "workflowTemplateValue", - }; - const client = new batchcontrollerModule.v1.BatchControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionWorkflowTemplatePath', () => { - const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts b/owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts deleted file mode 100644 index cc84ac17..00000000 --- a/owl-bot-staging/v1/test/gapic_cluster_controller_v1.ts +++ /dev/null @@ -1,1720 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as clustercontrollerModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf, LROperation, operationsProtos} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); -} - -function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v1.ClusterControllerClient', () => { - it('has servicePath', () => { - const servicePath = clustercontrollerModule.v1.ClusterControllerClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = clustercontrollerModule.v1.ClusterControllerClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = clustercontrollerModule.v1.ClusterControllerClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.clusterControllerStub, undefined); - await client.initialize(); - assert(client.clusterControllerStub); - }); - - it('has close method for the initialized client', done => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.clusterControllerStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.clusterControllerStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('getCluster', () => { - it('invokes getCluster without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()); - client.innerApiCalls.getCluster = stubSimpleCall(expectedResponse); - const [response] = await client.getCluster(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getCluster without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()); - client.innerApiCalls.getCluster = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getCluster( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.ICluster|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getCluster with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getCluster = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getCluster(request), expectedError); - assert((client.innerApiCalls.getCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getCluster with closed client', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getCluster(request), expectedError); - }); - }); - - describe('createCluster', () => { - it('invokes createCluster without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.createCluster = stubLongRunningCall(expectedResponse); - const [operation] = await client.createCluster(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createCluster without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.createCluster = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createCluster( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createCluster with call error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createCluster = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.createCluster(request), expectedError); - assert((client.innerApiCalls.createCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createCluster with LRO error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateClusterRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createCluster = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.createCluster(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.createCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkCreateClusterProgress without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkCreateClusterProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkCreateClusterProgress with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkCreateClusterProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('updateCluster', () => { - it('invokes updateCluster without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.updateCluster = stubLongRunningCall(expectedResponse); - const [operation] = await client.updateCluster(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateCluster without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.updateCluster = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.updateCluster( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes updateCluster with call error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.updateCluster = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.updateCluster(request), expectedError); - assert((client.innerApiCalls.updateCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateCluster with LRO error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.updateCluster = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.updateCluster(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.updateCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkUpdateClusterProgress without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkUpdateClusterProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkUpdateClusterProgress with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkUpdateClusterProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('stopCluster', () => { - it('invokes stopCluster without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.stopCluster = stubLongRunningCall(expectedResponse); - const [operation] = await client.stopCluster(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.stopCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes stopCluster without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.stopCluster = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.stopCluster( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.stopCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes stopCluster with call error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.stopCluster = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.stopCluster(request), expectedError); - assert((client.innerApiCalls.stopCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes stopCluster with LRO error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StopClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.stopCluster = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.stopCluster(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.stopCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkStopClusterProgress without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkStopClusterProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkStopClusterProgress with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkStopClusterProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('startCluster', () => { - it('invokes startCluster without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.startCluster = stubLongRunningCall(expectedResponse); - const [operation] = await client.startCluster(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startCluster without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.startCluster = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.startCluster( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.startCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes startCluster with call error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startCluster = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.startCluster(request), expectedError); - assert((client.innerApiCalls.startCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes startCluster with LRO error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.StartClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.startCluster = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.startCluster(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.startCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkStartClusterProgress without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkStartClusterProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkStartClusterProgress with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkStartClusterProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('deleteCluster', () => { - it('invokes deleteCluster without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.deleteCluster = stubLongRunningCall(expectedResponse); - const [operation] = await client.deleteCluster(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteCluster without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.deleteCluster = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteCluster( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteCluster with call error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteCluster = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.deleteCluster(request), expectedError); - assert((client.innerApiCalls.deleteCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteCluster with LRO error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteCluster = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.deleteCluster(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.deleteCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkDeleteClusterProgress without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkDeleteClusterProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkDeleteClusterProgress with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkDeleteClusterProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('diagnoseCluster', () => { - it('invokes diagnoseCluster without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.diagnoseCluster = stubLongRunningCall(expectedResponse); - const [operation] = await client.diagnoseCluster(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.diagnoseCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes diagnoseCluster without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.diagnoseCluster = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.diagnoseCluster( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.diagnoseCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes diagnoseCluster with call error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.diagnoseCluster = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.diagnoseCluster(request), expectedError); - assert((client.innerApiCalls.diagnoseCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes diagnoseCluster with LRO error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest()); - request.projectId = ''; - request.region = ''; - request.clusterName = ''; - const expectedHeaderRequestParams = "project_id=®ion=&cluster_name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.diagnoseCluster = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.diagnoseCluster(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.diagnoseCluster as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkDiagnoseClusterProgress without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkDiagnoseClusterProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkDiagnoseClusterProgress with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkDiagnoseClusterProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('listClusters', () => { - it('invokes listClusters without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - ]; - client.innerApiCalls.listClusters = stubSimpleCall(expectedResponse); - const [response] = await client.listClusters(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listClusters as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listClusters without error using callback', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - ]; - client.innerApiCalls.listClusters = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listClusters( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.ICluster[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listClusters as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listClusters with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listClusters = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listClusters(request), expectedError); - assert((client.innerApiCalls.listClusters as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listClustersStream without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - ]; - client.descriptors.page.listClusters.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listClustersStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.Cluster[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.Cluster) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listClusters.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listClusters, request)); - assert.strictEqual( - (client.descriptors.page.listClusters.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listClustersStream with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedError = new Error('expected'); - client.descriptors.page.listClusters.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listClustersStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.Cluster[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.Cluster) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listClusters.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listClusters, request)); - assert.strictEqual( - (client.descriptors.page.listClusters.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listClusters without error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), - ]; - client.descriptors.page.listClusters.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.dataproc.v1.ICluster[] = []; - const iterable = client.listClustersAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listClusters.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listClusters.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listClusters with error', async () => { - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListClustersRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion=";const expectedError = new Error('expected'); - client.descriptors.page.listClusters.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listClustersAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.dataproc.v1.ICluster[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listClusters.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listClusters.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('batch', () => { - const fakePath = "/rendered/path/batch"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - batch: "batchValue", - }; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.batchPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.batchPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('batchPath', () => { - const result = client.batchPath("projectValue", "locationValue", "batchValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.batchPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromBatchName', () => { - const result = client.matchProjectFromBatchName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromBatchName', () => { - const result = client.matchLocationFromBatchName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchBatchFromBatchName', () => { - const result = client.matchBatchFromBatchName(fakePath); - assert.strictEqual(result, "batchValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationAutoscalingPolicyPath', () => { - const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow_template: "workflowTemplateValue", - }; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationWorkflowTemplatePath', () => { - const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionAutoscalingPolicyPath', () => { - const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - workflow_template: "workflowTemplateValue", - }; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionWorkflowTemplatePath', () => { - const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('service', () => { - const fakePath = "/rendered/path/service"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - service: "serviceValue", - }; - const client = new clustercontrollerModule.v1.ClusterControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.servicePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.servicePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('servicePath', () => { - const result = client.servicePath("projectValue", "locationValue", "serviceValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.servicePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromServiceName', () => { - const result = client.matchProjectFromServiceName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.servicePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromServiceName', () => { - const result = client.matchLocationFromServiceName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.servicePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchServiceFromServiceName', () => { - const result = client.matchServiceFromServiceName(fakePath); - assert.strictEqual(result, "serviceValue"); - assert((client.pathTemplates.servicePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v1/test/gapic_job_controller_v1.ts b/owl-bot-staging/v1/test/gapic_job_controller_v1.ts deleted file mode 100644 index 0241f3ac..00000000 --- a/owl-bot-staging/v1/test/gapic_job_controller_v1.ts +++ /dev/null @@ -1,1330 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as jobcontrollerModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf, LROperation, operationsProtos} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); -} - -function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v1.JobControllerClient', () => { - it('has servicePath', () => { - const servicePath = jobcontrollerModule.v1.JobControllerClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = jobcontrollerModule.v1.JobControllerClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = jobcontrollerModule.v1.JobControllerClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new jobcontrollerModule.v1.JobControllerClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.jobControllerStub, undefined); - await client.initialize(); - assert(client.jobControllerStub); - }); - - it('has close method for the initialized client', done => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.jobControllerStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.jobControllerStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('submitJob', () => { - it('invokes submitJob without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.submitJob = stubSimpleCall(expectedResponse); - const [response] = await client.submitJob(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.submitJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes submitJob without error using callback', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.submitJob = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.submitJob( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.submitJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes submitJob with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.submitJob = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.submitJob(request), expectedError); - assert((client.innerApiCalls.submitJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes submitJob with closed client', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.submitJob(request), expectedError); - }); - }); - - describe('getJob', () => { - it('invokes getJob without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.getJob = stubSimpleCall(expectedResponse); - const [response] = await client.getJob(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getJob without error using callback', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.getJob = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getJob( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getJob with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getJob = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getJob(request), expectedError); - assert((client.innerApiCalls.getJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getJob with closed client', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getJob(request), expectedError); - }); - }); - - describe('updateJob', () => { - it('invokes updateJob without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.updateJob = stubSimpleCall(expectedResponse); - const [response] = await client.updateJob(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateJob without error using callback', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.updateJob = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.updateJob( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes updateJob with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.updateJob = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.updateJob(request), expectedError); - assert((client.innerApiCalls.updateJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateJob with closed client', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.updateJob(request), expectedError); - }); - }); - - describe('cancelJob', () => { - it('invokes cancelJob without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.cancelJob = stubSimpleCall(expectedResponse); - const [response] = await client.cancelJob(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.cancelJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes cancelJob without error using callback', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()); - client.innerApiCalls.cancelJob = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.cancelJob( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.cancelJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes cancelJob with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.cancelJob = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.cancelJob(request), expectedError); - assert((client.innerApiCalls.cancelJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes cancelJob with closed client', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CancelJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.cancelJob(request), expectedError); - }); - }); - - describe('deleteJob', () => { - it('invokes deleteJob without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteJob = stubSimpleCall(expectedResponse); - const [response] = await client.deleteJob(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteJob without error using callback', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteJob = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteJob( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteJob with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedHeaderRequestParams = "project_id=®ion=&job_id="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteJob = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteJob(request), expectedError); - assert((client.innerApiCalls.deleteJob as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteJob with closed client', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteJobRequest()); - request.projectId = ''; - request.region = ''; - request.jobId = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteJob(request), expectedError); - }); - }); - - describe('submitJobAsOperation', () => { - it('invokes submitJobAsOperation without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.submitJobAsOperation = stubLongRunningCall(expectedResponse); - const [operation] = await client.submitJobAsOperation(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.submitJobAsOperation as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes submitJobAsOperation without error using callback', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.submitJobAsOperation = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.submitJobAsOperation( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.submitJobAsOperation as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes submitJobAsOperation with call error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.submitJobAsOperation = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.submitJobAsOperation(request), expectedError); - assert((client.innerApiCalls.submitJobAsOperation as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes submitJobAsOperation with LRO error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.SubmitJobRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.submitJobAsOperation = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.submitJobAsOperation(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.submitJobAsOperation as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkSubmitJobAsOperationProgress without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkSubmitJobAsOperationProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkSubmitJobAsOperationProgress with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkSubmitJobAsOperationProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('listJobs', () => { - it('invokes listJobs without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - ]; - client.innerApiCalls.listJobs = stubSimpleCall(expectedResponse); - const [response] = await client.listJobs(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listJobs as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listJobs without error using callback', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - ]; - client.innerApiCalls.listJobs = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listJobs( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IJob[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listJobs as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listJobs with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listJobs = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listJobs(request), expectedError); - assert((client.innerApiCalls.listJobs as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listJobsStream without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - ]; - client.descriptors.page.listJobs.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listJobsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.Job[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.Job) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listJobs.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listJobs, request)); - assert.strictEqual( - (client.descriptors.page.listJobs.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listJobsStream with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedError = new Error('expected'); - client.descriptors.page.listJobs.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listJobsStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.Job[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.Job) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listJobs.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listJobs, request)); - assert.strictEqual( - (client.descriptors.page.listJobs.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listJobs without error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), - ]; - client.descriptors.page.listJobs.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.dataproc.v1.IJob[] = []; - const iterable = client.listJobsAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listJobs.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listJobs.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listJobs with error', async () => { - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListJobsRequest()); - request.projectId = ''; - request.region = ''; - const expectedHeaderRequestParams = "project_id=®ion=";const expectedError = new Error('expected'); - client.descriptors.page.listJobs.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listJobsAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.dataproc.v1.IJob[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listJobs.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listJobs.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('batch', () => { - const fakePath = "/rendered/path/batch"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - batch: "batchValue", - }; - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.batchPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.batchPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('batchPath', () => { - const result = client.batchPath("projectValue", "locationValue", "batchValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.batchPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromBatchName', () => { - const result = client.matchProjectFromBatchName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromBatchName', () => { - const result = client.matchLocationFromBatchName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchBatchFromBatchName', () => { - const result = client.matchBatchFromBatchName(fakePath); - assert.strictEqual(result, "batchValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationAutoscalingPolicyPath', () => { - const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow_template: "workflowTemplateValue", - }; - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationWorkflowTemplatePath', () => { - const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionAutoscalingPolicyPath', () => { - const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - workflow_template: "workflowTemplateValue", - }; - const client = new jobcontrollerModule.v1.JobControllerClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionWorkflowTemplatePath', () => { - const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts b/owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts deleted file mode 100644 index b7e7828b..00000000 --- a/owl-bot-staging/v1/test/gapic_workflow_template_service_v1.ts +++ /dev/null @@ -1,1402 +0,0 @@ -// Copyright 2022 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// -// ** This file is automatically generated by gapic-generator-typescript. ** -// ** https://github.com/googleapis/gapic-generator-typescript ** -// ** All changes to this file may be overwritten. ** - -import * as protos from '../protos/protos'; -import * as assert from 'assert'; -import * as sinon from 'sinon'; -import {SinonStub} from 'sinon'; -import { describe, it } from 'mocha'; -import * as workflowtemplateserviceModule from '../src'; - -import {PassThrough} from 'stream'; - -import {protobuf, LROperation, operationsProtos} from 'google-gax'; - -function generateSampleMessage(instance: T) { - const filledObject = (instance.constructor as typeof protobuf.Message) - .toObject(instance as protobuf.Message, {defaults: true}); - return (instance.constructor as typeof protobuf.Message).fromObject(filledObject) as T; -} - -function stubSimpleCall(response?: ResponseType, error?: Error) { - return error ? sinon.stub().rejects(error) : sinon.stub().resolves([response]); -} - -function stubSimpleCallWithCallback(response?: ResponseType, error?: Error) { - return error ? sinon.stub().callsArgWith(2, error) : sinon.stub().callsArgWith(2, null, response); -} - -function stubLongRunningCall(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().rejects(callError) : sinon.stub().resolves([mockOperation]); -} - -function stubLongRunningCallWithCallback(response?: ResponseType, callError?: Error, lroError?: Error) { - const innerStub = lroError ? sinon.stub().rejects(lroError) : sinon.stub().resolves([response]); - const mockOperation = { - promise: innerStub, - }; - return callError ? sinon.stub().callsArgWith(2, callError) : sinon.stub().callsArgWith(2, null, mockOperation); -} - -function stubPageStreamingCall(responses?: ResponseType[], error?: Error) { - const pagingStub = sinon.stub(); - if (responses) { - for (let i = 0; i < responses.length; ++i) { - pagingStub.onCall(i).callsArgWith(2, null, responses[i]); - } - } - const transformStub = error ? sinon.stub().callsArgWith(2, error) : pagingStub; - const mockStream = new PassThrough({ - objectMode: true, - transform: transformStub, - }); - // trigger as many responses as needed - if (responses) { - for (let i = 0; i < responses.length; ++i) { - setImmediate(() => { mockStream.write({}); }); - } - setImmediate(() => { mockStream.end(); }); - } else { - setImmediate(() => { mockStream.write({}); }); - setImmediate(() => { mockStream.end(); }); - } - return sinon.stub().returns(mockStream); -} - -function stubAsyncIterationCall(responses?: ResponseType[], error?: Error) { - let counter = 0; - const asyncIterable = { - [Symbol.asyncIterator]() { - return { - async next() { - if (error) { - return Promise.reject(error); - } - if (counter >= responses!.length) { - return Promise.resolve({done: true, value: undefined}); - } - return Promise.resolve({done: false, value: responses![counter++]}); - } - }; - } - }; - return sinon.stub().returns(asyncIterable); -} - -describe('v1.WorkflowTemplateServiceClient', () => { - it('has servicePath', () => { - const servicePath = workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient.servicePath; - assert(servicePath); - }); - - it('has apiEndpoint', () => { - const apiEndpoint = workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient.apiEndpoint; - assert(apiEndpoint); - }); - - it('has port', () => { - const port = workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient.port; - assert(port); - assert(typeof port === 'number'); - }); - - it('should create a client with no option', () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient(); - assert(client); - }); - - it('should create a client with gRPC fallback', () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - fallback: true, - }); - assert(client); - }); - - it('has initialize method and supports deferred initialization', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.workflowTemplateServiceStub, undefined); - await client.initialize(); - assert(client.workflowTemplateServiceStub); - }); - - it('has close method for the initialized client', done => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - assert(client.workflowTemplateServiceStub); - client.close().then(() => { - done(); - }); - }); - - it('has close method for the non-initialized client', done => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - assert.strictEqual(client.workflowTemplateServiceStub, undefined); - client.close().then(() => { - done(); - }); - }); - - it('has getProjectId method', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().resolves(fakeProjectId); - const result = await client.getProjectId(); - assert.strictEqual(result, fakeProjectId); - assert((client.auth.getProjectId as SinonStub).calledWithExactly()); - }); - - it('has getProjectId method with callback', async () => { - const fakeProjectId = 'fake-project-id'; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.auth.getProjectId = sinon.stub().callsArgWith(0, null, fakeProjectId); - const promise = new Promise((resolve, reject) => { - client.getProjectId((err?: Error|null, projectId?: string|null) => { - if (err) { - reject(err); - } else { - resolve(projectId); - } - }); - }); - const result = await promise; - assert.strictEqual(result, fakeProjectId); - }); - - describe('createWorkflowTemplate', () => { - it('invokes createWorkflowTemplate without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); - client.innerApiCalls.createWorkflowTemplate = stubSimpleCall(expectedResponse); - const [response] = await client.createWorkflowTemplate(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createWorkflowTemplate without error using callback', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); - client.innerApiCalls.createWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.createWorkflowTemplate( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.createWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes createWorkflowTemplate with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.createWorkflowTemplate = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.createWorkflowTemplate(request), expectedError); - assert((client.innerApiCalls.createWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes createWorkflowTemplate with closed client', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.CreateWorkflowTemplateRequest()); - request.parent = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.createWorkflowTemplate(request), expectedError); - }); - }); - - describe('getWorkflowTemplate', () => { - it('invokes getWorkflowTemplate without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); - client.innerApiCalls.getWorkflowTemplate = stubSimpleCall(expectedResponse); - const [response] = await client.getWorkflowTemplate(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getWorkflowTemplate without error using callback', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); - client.innerApiCalls.getWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.getWorkflowTemplate( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.getWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes getWorkflowTemplate with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.getWorkflowTemplate = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.getWorkflowTemplate(request), expectedError); - assert((client.innerApiCalls.getWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes getWorkflowTemplate with closed client', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.GetWorkflowTemplateRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.getWorkflowTemplate(request), expectedError); - }); - }); - - describe('updateWorkflowTemplate', () => { - it('invokes updateWorkflowTemplate without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); - request.template = {}; - request.template.name = ''; - const expectedHeaderRequestParams = "template.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); - client.innerApiCalls.updateWorkflowTemplate = stubSimpleCall(expectedResponse); - const [response] = await client.updateWorkflowTemplate(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateWorkflowTemplate without error using callback', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); - request.template = {}; - request.template.name = ''; - const expectedHeaderRequestParams = "template.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()); - client.innerApiCalls.updateWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.updateWorkflowTemplate( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.updateWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes updateWorkflowTemplate with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); - request.template = {}; - request.template.name = ''; - const expectedHeaderRequestParams = "template.name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.updateWorkflowTemplate = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.updateWorkflowTemplate(request), expectedError); - assert((client.innerApiCalls.updateWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes updateWorkflowTemplate with closed client', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.UpdateWorkflowTemplateRequest()); - request.template = {}; - request.template.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.updateWorkflowTemplate(request), expectedError); - }); - }); - - describe('deleteWorkflowTemplate', () => { - it('invokes deleteWorkflowTemplate without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteWorkflowTemplate = stubSimpleCall(expectedResponse); - const [response] = await client.deleteWorkflowTemplate(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteWorkflowTemplate without error using callback', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.protobuf.Empty()); - client.innerApiCalls.deleteWorkflowTemplate = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.deleteWorkflowTemplate( - request, - (err?: Error|null, result?: protos.google.protobuf.IEmpty|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.deleteWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes deleteWorkflowTemplate with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.deleteWorkflowTemplate = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.deleteWorkflowTemplate(request), expectedError); - assert((client.innerApiCalls.deleteWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes deleteWorkflowTemplate with closed client', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.DeleteWorkflowTemplateRequest()); - request.name = ''; - const expectedError = new Error('The client has already been closed.'); - client.close(); - await assert.rejects(client.deleteWorkflowTemplate(request), expectedError); - }); - }); - - describe('instantiateWorkflowTemplate', () => { - it('invokes instantiateWorkflowTemplate without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCall(expectedResponse); - const [operation] = await client.instantiateWorkflowTemplate(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes instantiateWorkflowTemplate without error using callback', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.instantiateWorkflowTemplate( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes instantiateWorkflowTemplate with call error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.instantiateWorkflowTemplate(request), expectedError); - assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes instantiateWorkflowTemplate with LRO error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateWorkflowTemplateRequest()); - request.name = ''; - const expectedHeaderRequestParams = "name="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.instantiateWorkflowTemplate = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.instantiateWorkflowTemplate(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.instantiateWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkInstantiateWorkflowTemplateProgress without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkInstantiateWorkflowTemplateProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkInstantiateWorkflowTemplateProgress with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkInstantiateWorkflowTemplateProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('instantiateInlineWorkflowTemplate', () => { - it('invokes instantiateInlineWorkflowTemplate without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCall(expectedResponse); - const [operation] = await client.instantiateInlineWorkflowTemplate(request); - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes instantiateInlineWorkflowTemplate without error using callback', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = generateSampleMessage(new protos.google.longrunning.Operation()); - client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.instantiateInlineWorkflowTemplate( - request, - (err?: Error|null, - result?: LROperation|null - ) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const operation = await promise as LROperation; - const [response] = await operation.promise(); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes instantiateInlineWorkflowTemplate with call error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCall(undefined, expectedError); - await assert.rejects(client.instantiateInlineWorkflowTemplate(request), expectedError); - assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes instantiateInlineWorkflowTemplate with LRO error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.InstantiateInlineWorkflowTemplateRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.instantiateInlineWorkflowTemplate = stubLongRunningCall(undefined, undefined, expectedError); - const [operation] = await client.instantiateInlineWorkflowTemplate(request); - await assert.rejects(operation.promise(), expectedError); - assert((client.innerApiCalls.instantiateInlineWorkflowTemplate as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes checkInstantiateInlineWorkflowTemplateProgress without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedResponse = generateSampleMessage(new operationsProtos.google.longrunning.Operation()); - expectedResponse.name = 'test'; - expectedResponse.response = {type_url: 'url', value: Buffer.from('')}; - expectedResponse.metadata = {type_url: 'url', value: Buffer.from('')} - - client.operationsClient.getOperation = stubSimpleCall(expectedResponse); - const decodedOperation = await client.checkInstantiateInlineWorkflowTemplateProgress(expectedResponse.name); - assert.deepStrictEqual(decodedOperation.name, expectedResponse.name); - assert(decodedOperation.metadata); - assert((client.operationsClient.getOperation as SinonStub).getCall(0)); - }); - - it('invokes checkInstantiateInlineWorkflowTemplateProgress with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const expectedError = new Error('expected'); - - client.operationsClient.getOperation = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.checkInstantiateInlineWorkflowTemplateProgress(''), expectedError); - assert((client.operationsClient.getOperation as SinonStub) - .getCall(0)); - }); - }); - - describe('listWorkflowTemplates', () => { - it('invokes listWorkflowTemplates without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - ]; - client.innerApiCalls.listWorkflowTemplates = stubSimpleCall(expectedResponse); - const [response] = await client.listWorkflowTemplates(request); - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listWorkflowTemplates as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listWorkflowTemplates without error using callback', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - ]; - client.innerApiCalls.listWorkflowTemplates = stubSimpleCallWithCallback(expectedResponse); - const promise = new Promise((resolve, reject) => { - client.listWorkflowTemplates( - request, - (err?: Error|null, result?: protos.google.cloud.dataproc.v1.IWorkflowTemplate[]|null) => { - if (err) { - reject(err); - } else { - resolve(result); - } - }); - }); - const response = await promise; - assert.deepStrictEqual(response, expectedResponse); - assert((client.innerApiCalls.listWorkflowTemplates as SinonStub) - .getCall(0).calledWith(request, expectedOptions /*, callback defined above */)); - }); - - it('invokes listWorkflowTemplates with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedOptions = { - otherArgs: { - headers: { - 'x-goog-request-params': expectedHeaderRequestParams, - }, - }, - }; - const expectedError = new Error('expected'); - client.innerApiCalls.listWorkflowTemplates = stubSimpleCall(undefined, expectedError); - await assert.rejects(client.listWorkflowTemplates(request), expectedError); - assert((client.innerApiCalls.listWorkflowTemplates as SinonStub) - .getCall(0).calledWith(request, expectedOptions, undefined)); - }); - - it('invokes listWorkflowTemplatesStream without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - ]; - client.descriptors.page.listWorkflowTemplates.createStream = stubPageStreamingCall(expectedResponse); - const stream = client.listWorkflowTemplatesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.WorkflowTemplate[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.WorkflowTemplate) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - const responses = await promise; - assert.deepStrictEqual(responses, expectedResponse); - assert((client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listWorkflowTemplates, request)); - assert.strictEqual( - (client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('invokes listWorkflowTemplatesStream with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedError = new Error('expected'); - client.descriptors.page.listWorkflowTemplates.createStream = stubPageStreamingCall(undefined, expectedError); - const stream = client.listWorkflowTemplatesStream(request); - const promise = new Promise((resolve, reject) => { - const responses: protos.google.cloud.dataproc.v1.WorkflowTemplate[] = []; - stream.on('data', (response: protos.google.cloud.dataproc.v1.WorkflowTemplate) => { - responses.push(response); - }); - stream.on('end', () => { - resolve(responses); - }); - stream.on('error', (err: Error) => { - reject(err); - }); - }); - await assert.rejects(promise, expectedError); - assert((client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) - .getCall(0).calledWith(client.innerApiCalls.listWorkflowTemplates, request)); - assert.strictEqual( - (client.descriptors.page.listWorkflowTemplates.createStream as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listWorkflowTemplates without error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent="; - const expectedResponse = [ - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - generateSampleMessage(new protos.google.cloud.dataproc.v1.WorkflowTemplate()), - ]; - client.descriptors.page.listWorkflowTemplates.asyncIterate = stubAsyncIterationCall(expectedResponse); - const responses: protos.google.cloud.dataproc.v1.IWorkflowTemplate[] = []; - const iterable = client.listWorkflowTemplatesAsync(request); - for await (const resource of iterable) { - responses.push(resource!); - } - assert.deepStrictEqual(responses, expectedResponse); - assert.deepStrictEqual( - (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - - it('uses async iteration with listWorkflowTemplates with error', async () => { - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - const request = generateSampleMessage(new protos.google.cloud.dataproc.v1.ListWorkflowTemplatesRequest()); - request.parent = ''; - const expectedHeaderRequestParams = "parent=";const expectedError = new Error('expected'); - client.descriptors.page.listWorkflowTemplates.asyncIterate = stubAsyncIterationCall(undefined, expectedError); - const iterable = client.listWorkflowTemplatesAsync(request); - await assert.rejects(async () => { - const responses: protos.google.cloud.dataproc.v1.IWorkflowTemplate[] = []; - for await (const resource of iterable) { - responses.push(resource!); - } - }); - assert.deepStrictEqual( - (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) - .getCall(0).args[1], request); - assert.strictEqual( - (client.descriptors.page.listWorkflowTemplates.asyncIterate as SinonStub) - .getCall(0).args[2].otherArgs.headers['x-goog-request-params'], - expectedHeaderRequestParams - ); - }); - }); - - describe('Path templates', () => { - - describe('batch', () => { - const fakePath = "/rendered/path/batch"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - batch: "batchValue", - }; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.batchPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.batchPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('batchPath', () => { - const result = client.batchPath("projectValue", "locationValue", "batchValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.batchPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromBatchName', () => { - const result = client.matchProjectFromBatchName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromBatchName', () => { - const result = client.matchLocationFromBatchName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchBatchFromBatchName', () => { - const result = client.matchBatchFromBatchName(fakePath); - assert.strictEqual(result, "batchValue"); - assert((client.pathTemplates.batchPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('project', () => { - const fakePath = "/rendered/path/project"; - const expectedParameters = { - project: "projectValue", - }; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectPath', () => { - const result = client.projectPath("projectValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectName', () => { - const result = client.matchProjectFromProjectName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectLocationAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationAutoscalingPolicyPath', () => { - const result = client.projectLocationAutoscalingPolicyPath("projectValue", "locationValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchLocationFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectLocationAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectLocationAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectLocationWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectLocationWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - location: "locationValue", - workflow_template: "workflowTemplateValue", - }; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectLocationWorkflowTemplatePath', () => { - const result = client.projectLocationWorkflowTemplatePath("projectValue", "locationValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchLocationFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchLocationFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "locationValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectLocationWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectLocationWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectLocationWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionAutoscalingPolicy', () => { - const fakePath = "/rendered/path/projectRegionAutoscalingPolicy"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - autoscaling_policy: "autoscalingPolicyValue", - }; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionAutoscalingPolicyPath', () => { - const result = client.projectRegionAutoscalingPolicyPath("projectValue", "regionValue", "autoscalingPolicyValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchProjectFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchRegionFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName', () => { - const result = client.matchAutoscalingPolicyFromProjectRegionAutoscalingPolicyName(fakePath); - assert.strictEqual(result, "autoscalingPolicyValue"); - assert((client.pathTemplates.projectRegionAutoscalingPolicyPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('projectRegionWorkflowTemplate', () => { - const fakePath = "/rendered/path/projectRegionWorkflowTemplate"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - workflow_template: "workflowTemplateValue", - }; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('projectRegionWorkflowTemplatePath', () => { - const result = client.projectRegionWorkflowTemplatePath("projectValue", "regionValue", "workflowTemplateValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchProjectFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchRegionFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchWorkflowTemplateFromProjectRegionWorkflowTemplateName', () => { - const result = client.matchWorkflowTemplateFromProjectRegionWorkflowTemplateName(fakePath); - assert.strictEqual(result, "workflowTemplateValue"); - assert((client.pathTemplates.projectRegionWorkflowTemplatePathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - - describe('region', () => { - const fakePath = "/rendered/path/region"; - const expectedParameters = { - project: "projectValue", - region: "regionValue", - }; - const client = new workflowtemplateserviceModule.v1.WorkflowTemplateServiceClient({ - credentials: {client_email: 'bogus', private_key: 'bogus'}, - projectId: 'bogus', - }); - client.initialize(); - client.pathTemplates.regionPathTemplate.render = - sinon.stub().returns(fakePath); - client.pathTemplates.regionPathTemplate.match = - sinon.stub().returns(expectedParameters); - - it('regionPath', () => { - const result = client.regionPath("projectValue", "regionValue"); - assert.strictEqual(result, fakePath); - assert((client.pathTemplates.regionPathTemplate.render as SinonStub) - .getCall(-1).calledWith(expectedParameters)); - }); - - it('matchProjectFromRegionName', () => { - const result = client.matchProjectFromRegionName(fakePath); - assert.strictEqual(result, "projectValue"); - assert((client.pathTemplates.regionPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - - it('matchRegionFromRegionName', () => { - const result = client.matchRegionFromRegionName(fakePath); - assert.strictEqual(result, "regionValue"); - assert((client.pathTemplates.regionPathTemplate.match as SinonStub) - .getCall(-1).calledWith(fakePath)); - }); - }); - }); -}); diff --git a/owl-bot-staging/v1/tsconfig.json b/owl-bot-staging/v1/tsconfig.json deleted file mode 100644 index c78f1c88..00000000 --- a/owl-bot-staging/v1/tsconfig.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "extends": "./node_modules/gts/tsconfig-google.json", - "compilerOptions": { - "rootDir": ".", - "outDir": "build", - "resolveJsonModule": true, - "lib": [ - "es2018", - "dom" - ] - }, - "include": [ - "src/*.ts", - "src/**/*.ts", - "test/*.ts", - "test/**/*.ts", - "system-test/*.ts" - ] -} diff --git a/owl-bot-staging/v1/webpack.config.js b/owl-bot-staging/v1/webpack.config.js deleted file mode 100644 index 050b272c..00000000 --- a/owl-bot-staging/v1/webpack.config.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright 2021 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// https://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -const path = require('path'); - -module.exports = { - entry: './src/index.ts', - output: { - library: 'dataproc', - filename: './dataproc.js', - }, - node: { - child_process: 'empty', - fs: 'empty', - crypto: 'empty', - }, - resolve: { - alias: { - '../../../package.json': path.resolve(__dirname, 'package.json'), - }, - extensions: ['.js', '.json', '.ts'], - }, - module: { - rules: [ - { - test: /\.tsx?$/, - use: 'ts-loader', - exclude: /node_modules/ - }, - { - test: /node_modules[\\/]@grpc[\\/]grpc-js/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]grpc/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]retry-request/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]https?-proxy-agent/, - use: 'null-loader' - }, - { - test: /node_modules[\\/]gtoken/, - use: 'null-loader' - }, - ], - }, - mode: 'production', -}; diff --git a/src/v1/cluster_controller_client.ts b/src/v1/cluster_controller_client.ts index d403133a..55978ccb 100644 --- a/src/v1/cluster_controller_client.ts +++ b/src/v1/cluster_controller_client.ts @@ -511,6 +511,8 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + cluster_name: request.clusterName || '', }); this.initialize(); return this.innerApiCalls.getCluster(request, options, callback); @@ -636,6 +638,7 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); this.initialize(); return this.innerApiCalls.createCluster(request, options, callback); @@ -859,6 +862,8 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + cluster_name: request.clusterName || '', }); this.initialize(); return this.innerApiCalls.updateCluster(request, options, callback); @@ -1017,6 +1022,8 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + cluster_name: request.clusterName || '', }); this.initialize(); return this.innerApiCalls.stopCluster(request, options, callback); @@ -1175,6 +1182,8 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + cluster_name: request.clusterName || '', }); this.initialize(); return this.innerApiCalls.startCluster(request, options, callback); @@ -1335,6 +1344,8 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + cluster_name: request.clusterName || '', }); this.initialize(); return this.innerApiCalls.deleteCluster(request, options, callback); @@ -1483,6 +1494,8 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + cluster_name: request.clusterName || '', }); this.initialize(); return this.innerApiCalls.diagnoseCluster(request, options, callback); @@ -1632,6 +1645,7 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); this.initialize(); return this.innerApiCalls.listClusters(request, options, callback); @@ -1693,6 +1707,7 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); const defaultCallSettings = this._defaults['listClusters']; const callSettings = defaultCallSettings.merge(options); @@ -1763,6 +1778,7 @@ export class ClusterControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); const defaultCallSettings = this._defaults['listClusters']; const callSettings = defaultCallSettings.merge(options); diff --git a/src/v1/job_controller_client.ts b/src/v1/job_controller_client.ts index 291fbd17..6dcbf9f9 100644 --- a/src/v1/job_controller_client.ts +++ b/src/v1/job_controller_client.ts @@ -464,6 +464,7 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); this.initialize(); return this.innerApiCalls.submitJob(request, options, callback); @@ -552,6 +553,8 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + job_id: request.jobId || '', }); this.initialize(); return this.innerApiCalls.getJob(request, options, callback); @@ -649,6 +652,8 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + job_id: request.jobId || '', }); this.initialize(); return this.innerApiCalls.updateJob(request, options, callback); @@ -741,6 +746,8 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + job_id: request.jobId || '', }); this.initialize(); return this.innerApiCalls.cancelJob(request, options, callback); @@ -830,6 +837,8 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', + job_id: request.jobId || '', }); this.initialize(); return this.innerApiCalls.deleteJob(request, options, callback); @@ -952,6 +961,7 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); this.initialize(); return this.innerApiCalls.submitJobAsOperation(request, options, callback); @@ -1103,6 +1113,7 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); this.initialize(); return this.innerApiCalls.listJobs(request, options, callback); @@ -1168,6 +1179,7 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); const defaultCallSettings = this._defaults['listJobs']; const callSettings = defaultCallSettings.merge(options); @@ -1242,6 +1254,7 @@ export class JobControllerClient { options.otherArgs.headers['x-goog-request-params'] = gax.routingHeader.fromParams({ project_id: request.projectId || '', + region: request.region || '', }); const defaultCallSettings = this._defaults['listJobs']; const callSettings = defaultCallSettings.merge(options); diff --git a/test/gapic_cluster_controller_v1.ts b/test/gapic_cluster_controller_v1.ts index 5e578696..8950e4b1 100644 --- a/test/gapic_cluster_controller_v1.ts +++ b/test/gapic_cluster_controller_v1.ts @@ -253,7 +253,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.GetClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -284,7 +286,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.GetClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -331,7 +335,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.GetClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -362,6 +368,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.GetClusterRequest() ); request.projectId = ''; + request.region = ''; + request.clusterName = ''; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.getCluster(request), expectedError); @@ -379,7 +387,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.CreateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -412,7 +421,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.CreateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -466,7 +476,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.CreateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -497,7 +508,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.CreateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -573,7 +585,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -606,7 +620,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -660,7 +676,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -691,7 +709,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -767,7 +787,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StopClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -799,7 +821,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StopClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -853,7 +877,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StopClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -884,7 +910,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StopClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -957,7 +985,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StartClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -989,7 +1019,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StartClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1043,7 +1075,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StartClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1074,7 +1108,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.StartClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1147,7 +1183,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1180,7 +1218,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1234,7 +1274,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1265,7 +1307,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1341,7 +1385,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1374,7 +1420,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1428,7 +1476,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1459,7 +1509,9 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.DiagnoseClusterRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.clusterName = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&cluster_name='; const expectedOptions = { otherArgs: { headers: { @@ -1535,7 +1587,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.ListClustersRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -1568,7 +1621,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.ListClustersRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -1617,7 +1671,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.ListClustersRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -1648,7 +1703,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.ListClustersRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedResponse = [ generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), @@ -1697,7 +1753,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.ListClustersRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedError = new Error('expected'); client.descriptors.page.listClusters.createStream = stubPageStreamingCall( undefined, @@ -1743,7 +1800,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.ListClustersRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedResponse = [ generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), generateSampleMessage(new protos.google.cloud.dataproc.v1.Cluster()), @@ -1781,7 +1839,8 @@ describe('v1.ClusterControllerClient', () => { new protos.google.cloud.dataproc.v1.ListClustersRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedError = new Error('expected'); client.descriptors.page.listClusters.asyncIterate = stubAsyncIterationCall(undefined, expectedError); diff --git a/test/gapic_job_controller_v1.ts b/test/gapic_job_controller_v1.ts index 35009677..1ea33e2a 100644 --- a/test/gapic_job_controller_v1.ts +++ b/test/gapic_job_controller_v1.ts @@ -251,7 +251,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -282,7 +283,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -329,7 +331,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -357,6 +360,7 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; + request.region = ''; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.submitJob(request), expectedError); @@ -374,7 +378,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.GetJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -405,7 +411,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.GetJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -452,7 +460,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.GetJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -480,6 +490,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.GetJobRequest() ); request.projectId = ''; + request.region = ''; + request.jobId = ''; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.getJob(request), expectedError); @@ -497,7 +509,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -528,7 +542,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -575,7 +591,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -603,6 +621,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.UpdateJobRequest() ); request.projectId = ''; + request.region = ''; + request.jobId = ''; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.updateJob(request), expectedError); @@ -620,7 +640,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.CancelJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -651,7 +673,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.CancelJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -698,7 +722,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.CancelJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -726,6 +752,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.CancelJobRequest() ); request.projectId = ''; + request.region = ''; + request.jobId = ''; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.cancelJob(request), expectedError); @@ -743,7 +771,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -774,7 +804,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -821,7 +853,9 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + request.jobId = ''; + const expectedHeaderRequestParams = 'project_id=®ion=&job_id='; const expectedOptions = { otherArgs: { headers: { @@ -849,6 +883,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.DeleteJobRequest() ); request.projectId = ''; + request.region = ''; + request.jobId = ''; const expectedError = new Error('The client has already been closed.'); client.close(); await assert.rejects(client.deleteJob(request), expectedError); @@ -866,7 +902,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -899,7 +936,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -953,7 +991,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -984,7 +1023,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.SubmitJobRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -1060,7 +1100,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.ListJobsRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -1093,7 +1134,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.ListJobsRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -1142,7 +1184,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.ListJobsRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedOptions = { otherArgs: { headers: { @@ -1170,7 +1213,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.ListJobsRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedResponse = [ generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), @@ -1215,7 +1259,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.ListJobsRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedError = new Error('expected'); client.descriptors.page.listJobs.createStream = stubPageStreamingCall( undefined, @@ -1257,7 +1302,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.ListJobsRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedResponse = [ generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), generateSampleMessage(new protos.google.cloud.dataproc.v1.Job()), @@ -1293,7 +1339,8 @@ describe('v1.JobControllerClient', () => { new protos.google.cloud.dataproc.v1.ListJobsRequest() ); request.projectId = ''; - const expectedHeaderRequestParams = 'project_id='; + request.region = ''; + const expectedHeaderRequestParams = 'project_id=®ion='; const expectedError = new Error('expected'); client.descriptors.page.listJobs.asyncIterate = stubAsyncIterationCall( undefined,