Skip to content

Commit

Permalink
docs: add sample for update transfer config (#46)
Browse files Browse the repository at this point in the history
* docs: add sample for update transfer config

* docs: make id more unique and pass all errors

* fix: nit
  • Loading branch information
HemangChothani authored Sep 15, 2020
1 parent 6859385 commit 8aa4609
Show file tree
Hide file tree
Showing 4 changed files with 159 additions and 57 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import datetime
import os
import uuid

import google.auth
import google.cloud.bigquery
import pytest


@pytest.fixture
def project_id():
return os.environ["GOOGLE_CLOUD_PROJECT"]


@pytest.fixture(scope="module")
def credentials():
# If using a service account, the BQ DTS robot associated with your project
# requires the roles/iam.serviceAccountShortTermTokenMinter permission to
# act on behalf of the account.
creds, _ = google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
return creds


@pytest.fixture(scope="module")
def bqdts_client(credentials):
from google.cloud import bigquery_datatransfer_v1

return bigquery_datatransfer_v1.DataTransferServiceClient(credentials=credentials)


@pytest.fixture(scope="module")
def bigquery_client(credentials):
return google.cloud.bigquery.Client(credentials=credentials)


@pytest.fixture(scope="module")
def dataset_id(bigquery_client):
# Ensure the test account has owner permissions on the dataset by creating
# one from scratch.
now = datetime.datetime.now()
temp_ds_id = "bqdts_{}_{}".format(
now.strftime("%Y%m%d%H%M%S"), uuid.uuid4().hex[:8]
)
bigquery_client.create_dataset(temp_ds_id)
yield temp_ds_id
bigquery_client.delete_dataset(temp_ds_id)


@pytest.fixture
def to_delete(bqdts_client):
doomed = []
yield doomed

for resource_name in doomed:
try:
bqdts_client.delete_transfer_config(resource_name)
except Exception:
pass
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
Expand All @@ -14,65 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import time

import google.api_core.exceptions
import google.auth
import google.cloud.bigquery
import pytest

from .. import create_scheduled_query


@pytest.fixture
def project_id():
return os.environ["GOOGLE_CLOUD_PROJECT"]


@pytest.fixture(scope="module")
def credentials():
# If using a service account, the BQ DTS robot associated with your project
# requires the roles/iam.serviceAccountShortTermTokenMinter permission to
# act on behalf of the account.
creds, _ = google.auth.default(["https://www.googleapis.com/auth/cloud-platform"])
return creds


@pytest.fixture(scope="module")
def bqdts_client(credentials):
from google.cloud import bigquery_datatransfer_v1

return bigquery_datatransfer_v1.DataTransferServiceClient(credentials=credentials)


@pytest.fixture(scope="module")
def bigquery_client(credentials):
return google.cloud.bigquery.Client(credentials=credentials)


@pytest.fixture(scope="module")
def dataset_id(bigquery_client):
# Ensure the test account has owner permissions on the dataset by creating
# one from scratch.
temp_ds_id = "bqdts_{}".format(int(time.process_time() * 1000000))
bigquery_client.create_dataset(temp_ds_id)
yield temp_ds_id
bigquery_client.delete_dataset(temp_ds_id)


@pytest.fixture
def to_delete(bqdts_client):
doomed = []
yield doomed

for resource_name in doomed:
try:
bqdts_client.delete_transfer_config(resource_name)
except google.api_core.exceptions.NotFound:
pass


def test_sample(project_id, dataset_id, capsys, to_delete):
config_name = create_scheduled_query.sample_create_transfer_config(
project_id, dataset_id
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from .. import create_scheduled_query, update_transfer_config


def test_update_config_sample(project_id, dataset_id, capsys, to_delete):
config_name = create_scheduled_query.sample_create_transfer_config(
project_id, dataset_id
)

display_name = "Transfer config updated"
config = update_transfer_config.sample_update_transfer_config(config_name, display_name)
to_delete.append(config.name)
out, err = capsys.readouterr()
assert config.name in out
assert config.display_name == display_name
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
# -*- coding: utf-8 -*-
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

# To install the latest published package dependency, execute the following:
# pip install google-cloud-bigquery-datatransfer


def sample_update_transfer_config(config_name, display_name):
# [START bigquerydatatransfer_update_transfer_config]
from google.cloud import bigquery_datatransfer_v1

client = bigquery_datatransfer_v1.DataTransferServiceClient()
# TODO(developer): Set the config_name which user wants to update.
# config_name = "your-created-transfer-config-name"

# TODO(developer): Set the display_name of transfer_config.
# config_name = "your-created-transfer-config-name"

transfer_config = client.get_transfer_config(name=config_name)
transfer_config.display_name = display_name
field_mask = {"paths": ["display_name"]}
response = client.update_transfer_config(transfer_config, field_mask)

print("Transfer config updated for '{}'".format(response.name))
# [END bigquerydatatransfer_update_transfer_config]
# Return the config name for testing purposes, so that it can be deleted.
return response


def main():
import argparse

parser = argparse.ArgumentParser()
parser.add_argument("--transfer_config_name", type=str, default="your-created-transfer-config-name")
args = parser.parse_args()

sample_update_transfer_config(args.transfer_config_name)


if __name__ == "__main__":
main()

0 comments on commit 8aa4609

Please sign in to comment.