Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Regenerate Dataflow Python client #31997

Merged
merged 1 commit into from
Jul 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
"""Generated client library for dataflow version v1b3."""
# NOTE: This file is autogenerated and should not be edited by hand.

from __future__ import absolute_import

from apitools.base.py import base_api
from . import dataflow_v1b3_messages as messages

Expand All @@ -17,9 +14,7 @@ class DataflowV1b3(base_api.BaseApiClient):
_PACKAGE = 'dataflow'
_SCOPES = [
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/compute',
'https://www.googleapis.com/auth/compute.readonly',
'https://www.googleapis.com/auth/userinfo.email'
'https://www.googleapis.com/auth/compute'
]
_VERSION = 'v1b3'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
Expand Down Expand Up @@ -75,7 +70,6 @@ def __init__(
self.projects_locations_jobs = self.ProjectsLocationsJobsService(self)
self.projects_locations_snapshots = self.ProjectsLocationsSnapshotsService(
self)
self.projects_locations_sql = self.ProjectsLocationsSqlService(self)
self.projects_locations_templates = self.ProjectsLocationsTemplatesService(
self)
self.projects_locations = self.ProjectsLocationsService(self)
Expand Down Expand Up @@ -254,7 +248,7 @@ def __init__(self, client):
self._upload_configs = {}

def Aggregated(self, request, global_params=None):
r"""List the jobs of a project across all regions.
r"""List the jobs of a project across all regions. **Note:** This method doesn't support filtering the list of jobs by name.

Args:
request: (DataflowProjectsJobsAggregatedRequest) input message
Expand All @@ -270,7 +264,8 @@ def Aggregated(self, request, global_params=None):
method_id='dataflow.projects.jobs.aggregated',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['filter', 'location', 'pageSize', 'pageToken', 'view'],
query_params=
['filter', 'location', 'name', 'pageSize', 'pageToken', 'view'],
relative_path='v1b3/projects/{projectId}/jobs:aggregated',
request_field='',
request_type_name='DataflowProjectsJobsAggregatedRequest',
Expand All @@ -279,7 +274,7 @@ def Aggregated(self, request, global_params=None):
)

def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`.
r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`. Do not enter confidential information when you supply string values using the API.

Args:
request: (DataflowProjectsJobsCreateRequest) input message
Expand Down Expand Up @@ -354,7 +349,7 @@ def GetMetrics(self, request, global_params=None):
)

def List(self, request, global_params=None):
r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, as you can only get the list of jobs that are running in `us-central1`.
r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, because you can only get the list of jobs that are running in `us-central1`. `projects.locations.jobs.list` and `projects.jobs.list` support filtering the list of jobs by name. Filtering by name isn't supported by `projects.jobs.aggregated`.

Args:
request: (DataflowProjectsJobsListRequest) input message
Expand All @@ -370,7 +365,8 @@ def List(self, request, global_params=None):
method_id='dataflow.projects.jobs.list',
ordered_params=['projectId'],
path_params=['projectId'],
query_params=['filter', 'location', 'pageSize', 'pageToken', 'view'],
query_params=
['filter', 'location', 'name', 'pageSize', 'pageToken', 'view'],
relative_path='v1b3/projects/{projectId}/jobs',
request_field='',
request_type_name='DataflowProjectsJobsListRequest',
Expand Down Expand Up @@ -420,7 +416,7 @@ def Update(self, request, global_params=None):
method_id='dataflow.projects.jobs.update',
ordered_params=['projectId', 'jobId'],
path_params=['jobId', 'projectId'],
query_params=['location'],
query_params=['location', 'updateMask'],
relative_path='v1b3/projects/{projectId}/jobs/{jobId}',
request_field='job',
request_type_name='DataflowProjectsJobsUpdateRequest',
Expand Down Expand Up @@ -611,7 +607,7 @@ def __init__(self, client):
self._upload_configs = {}

def GetExecutionDetails(self, request, global_params=None):
r"""Request detailed information about the execution status of a stage of the job.
r"""Request detailed information about the execution status of a stage of the job. EXPERIMENTAL. This API is subject to change or removal without notice.

Args:
request: (DataflowProjectsLocationsJobsStagesGetExecutionDetailsRequest) input message
Expand Down Expand Up @@ -710,7 +706,7 @@ def __init__(self, client):
self._upload_configs = {}

def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`.
r"""Creates a Cloud Dataflow job. To create a job, we recommend using `projects.locations.jobs.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.jobs.create` is not recommended, as your job will always start in `us-central1`. Do not enter confidential information when you supply string values using the API.

Args:
request: (DataflowProjectsLocationsJobsCreateRequest) input message
Expand Down Expand Up @@ -761,7 +757,7 @@ def Get(self, request, global_params=None):
)

def GetExecutionDetails(self, request, global_params=None):
r"""Request detailed information about the execution status of the job.
r"""Request detailed information about the execution status of the job. EXPERIMENTAL. This API is subject to change or removal without notice.

Args:
request: (DataflowProjectsLocationsJobsGetExecutionDetailsRequest) input message
Expand Down Expand Up @@ -814,7 +810,7 @@ def GetMetrics(self, request, global_params=None):
)

def List(self, request, global_params=None):
r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, as you can only get the list of jobs that are running in `us-central1`.
r"""List the jobs of a project. To list the jobs of a project in a region, we recommend using `projects.locations.jobs.list` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). To list the all jobs across all regions, use `projects.jobs.aggregated`. Using `projects.jobs.list` is not recommended, because you can only get the list of jobs that are running in `us-central1`. `projects.locations.jobs.list` and `projects.jobs.list` support filtering the list of jobs by name. Filtering by name isn't supported by `projects.jobs.aggregated`.

Args:
request: (DataflowProjectsLocationsJobsListRequest) input message
Expand All @@ -830,7 +826,7 @@ def List(self, request, global_params=None):
method_id='dataflow.projects.locations.jobs.list',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['filter', 'pageSize', 'pageToken', 'view'],
query_params=['filter', 'name', 'pageSize', 'pageToken', 'view'],
relative_path='v1b3/projects/{projectId}/locations/{location}/jobs',
request_field='',
request_type_name='DataflowProjectsLocationsJobsListRequest',
Expand Down Expand Up @@ -881,7 +877,7 @@ def Update(self, request, global_params=None):
method_id='dataflow.projects.locations.jobs.update',
ordered_params=['projectId', 'location', 'jobId'],
path_params=['jobId', 'location', 'projectId'],
query_params=[],
query_params=['updateMask'],
relative_path=
'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
request_field='job',
Expand Down Expand Up @@ -978,41 +974,6 @@ def List(self, request, global_params=None):
supports_download=False,
)

class ProjectsLocationsSqlService(base_api.BaseApiService):
"""Service class for the projects_locations_sql resource."""

_NAME = 'projects_locations_sql'

def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsSqlService, self).__init__(client)
self._upload_configs = {}

def Validate(self, request, global_params=None):
r"""Validates a GoogleSQL query for Cloud Dataflow syntax. Will always confirm the given query parses correctly, and if able to look up schema information from DataCatalog, will validate that the query analyzes properly as well.

Args:
request: (DataflowProjectsLocationsSqlValidateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ValidateResponse) The response message.
"""
config = self.GetMethodConfig('Validate')
return self._RunMethod(config, request, global_params=global_params)

Validate.method_config = lambda: base_api.ApiMethodInfo(
http_method='GET',
method_id='dataflow.projects.locations.sql.validate',
ordered_params=['projectId', 'location'],
path_params=['location', 'projectId'],
query_params=['query'],
relative_path=
'v1b3/projects/{projectId}/locations/{location}/sql:validate',
request_field='',
request_type_name='DataflowProjectsLocationsSqlValidateRequest',
response_type_name='ValidateResponse',
supports_download=False,
)

class ProjectsLocationsTemplatesService(base_api.BaseApiService):
"""Service class for the projects_locations_templates resource."""

Expand All @@ -1024,7 +985,7 @@ def __init__(self, client):
self._upload_configs = {}

def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job from a template.
r"""Creates a Cloud Dataflow job from a template. Do not enter confidential information when you supply string values using the API. To create a job, we recommend using `projects.locations.templates.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.create` is not recommended, because your job will always start in `us-central1`.

Args:
request: (DataflowProjectsLocationsTemplatesCreateRequest) input message
Expand All @@ -1050,7 +1011,7 @@ def Create(self, request, global_params=None):
)

def Get(self, request, global_params=None):
r"""Get the template associated with a template.
r"""Get the template associated with a template. To get the template, we recommend using `projects.locations.templates.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.get` is not recommended, because only templates that are running in `us-central1` are retrieved.

Args:
request: (DataflowProjectsLocationsTemplatesGetRequest) input message
Expand All @@ -1076,7 +1037,7 @@ def Get(self, request, global_params=None):
)

def Launch(self, request, global_params=None):
r"""Launch a template.
r"""Launches a template. To launch a template, we recommend using `projects.locations.templates.launch` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.launch` is not recommended, because jobs launched from the template will always start in `us-central1`.

Args:
request: (DataflowProjectsLocationsTemplatesLaunchRequest) input message
Expand Down Expand Up @@ -1210,7 +1171,7 @@ def __init__(self, client):
self._upload_configs = {}

def Create(self, request, global_params=None):
r"""Creates a Cloud Dataflow job from a template.
r"""Creates a Cloud Dataflow job from a template. Do not enter confidential information when you supply string values using the API. To create a job, we recommend using `projects.locations.templates.create` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.create` is not recommended, because your job will always start in `us-central1`.

Args:
request: (DataflowProjectsTemplatesCreateRequest) input message
Expand All @@ -1235,7 +1196,7 @@ def Create(self, request, global_params=None):
)

def Get(self, request, global_params=None):
r"""Get the template associated with a template.
r"""Get the template associated with a template. To get the template, we recommend using `projects.locations.templates.get` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.get` is not recommended, because only templates that are running in `us-central1` are retrieved.

Args:
request: (DataflowProjectsTemplatesGetRequest) input message
Expand All @@ -1260,7 +1221,7 @@ def Get(self, request, global_params=None):
)

def Launch(self, request, global_params=None):
r"""Launch a template.
r"""Launches a template. To launch a template, we recommend using `projects.locations.templates.launch` with a [regional endpoint] (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints). Using `projects.templates.launch` is not recommended, because jobs launched from the template will always start in `us-central1`.

Args:
request: (DataflowProjectsTemplatesLaunchRequest) input message
Expand Down
Loading
Loading