beam-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (JIRA)" <j...@apache.org>
Subject [jira] [Work logged] (BEAM-5460) Update Dataflow Python API client
Date Fri, 28 Sep 2018 20:16:02 GMT

     [ https://issues.apache.org/jira/browse/BEAM-5460?focusedWorklogId=149451&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-149451 ]

ASF GitHub Bot logged work on BEAM-5460:
----------------------------------------

                Author: ASF GitHub Bot
            Created on: 28/Sep/18 20:16
            Start Date: 28/Sep/18 20:16
    Worklog Time Spent: 10m 
      Work Description: aaltay closed pull request #6460: [BEAM-5460] Update Python Dataflow API client
URL: https://github.com/apache/beam/pull/6460
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
index 02b424eb994..ce0d625fcda 100644
--- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
+++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
@@ -45,7 +45,7 @@ def __init__(self, url='', credentials=None,
                get_credentials=True, http=None, model=None,
                log_request=False, log_response=False,
                credentials_args=None, default_global_params=None,
-               additional_http_headers=None):
+               additional_http_headers=None, response_encoding=None):
     """Create a new dataflow handle."""
     url = url or self.BASE_URL
     super(DataflowV1b3, self).__init__(
@@ -54,7 +54,8 @@ def __init__(self, url='', credentials=None,
         log_request=log_request, log_response=log_response,
         credentials_args=credentials_args,
         default_global_params=default_global_params,
-        additional_http_headers=additional_http_headers)
+        additional_http_headers=additional_http_headers,
+        response_encoding=response_encoding)
     self.projects_jobs_debug = self.ProjectsJobsDebugService(self)
     self.projects_jobs_messages = self.ProjectsJobsMessagesService(self)
     self.projects_jobs_workItems = self.ProjectsJobsWorkItemsService(self)
@@ -75,38 +76,11 @@ class ProjectsJobsDebugService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsJobsDebugService, self).__init__(client)
-      self._method_configs = {
-          'GetConfig': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.jobs.debug.getConfig',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/getConfig',
-              request_field=u'getDebugConfigRequest',
-              request_type_name=u'DataflowProjectsJobsDebugGetConfigRequest',
-              response_type_name=u'GetDebugConfigResponse',
-              supports_download=False,
-          ),
-          'SendCapture': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.jobs.debug.sendCapture',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture',
-              request_field=u'sendDebugCaptureRequest',
-              request_type_name=u'DataflowProjectsJobsDebugSendCaptureRequest',
-              response_type_name=u'SendDebugCaptureResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def GetConfig(self, request, global_params=None):
-      """Get encoded debug configuration for component. Not cacheable.
+      r"""Get encoded debug configuration for component. Not cacheable.
 
       Args:
         request: (DataflowProjectsJobsDebugGetConfigRequest) input message
@@ -118,8 +92,21 @@ def GetConfig(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    GetConfig.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.jobs.debug.getConfig',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/getConfig',
+        request_field=u'get_debug_config_request',
+        request_type_name=u'DataflowProjectsJobsDebugGetConfigRequest',
+        response_type_name=u'GetDebugConfigResponse',
+        supports_download=False,
+    )
+
     def SendCapture(self, request, global_params=None):
-      """Send encoded debug capture data for component.
+      r"""Send encoded debug capture data for component.
 
       Args:
         request: (DataflowProjectsJobsDebugSendCaptureRequest) input message
@@ -131,6 +118,19 @@ def SendCapture(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    SendCapture.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.jobs.debug.sendCapture',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture',
+        request_field=u'send_debug_capture_request',
+        request_type_name=u'DataflowProjectsJobsDebugSendCaptureRequest',
+        response_type_name=u'SendDebugCaptureResponse',
+        supports_download=False,
+    )
+
   class ProjectsJobsMessagesService(base_api.BaseApiService):
     """Service class for the projects_jobs_messages resource."""
 
@@ -138,26 +138,11 @@ class ProjectsJobsMessagesService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsJobsMessagesService, self).__init__(client)
-      self._method_configs = {
-          'List': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.jobs.messages.list',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[u'endTime', u'location', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/messages',
-              request_field='',
-              request_type_name=u'DataflowProjectsJobsMessagesListRequest',
-              response_type_name=u'ListJobMessagesResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def List(self, request, global_params=None):
-      """Request the job status.
+      r"""Request the job status.
 
       Args:
         request: (DataflowProjectsJobsMessagesListRequest) input message
@@ -169,6 +154,19 @@ def List(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.jobs.messages.list',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[u'endTime', u'location', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/messages',
+        request_field='',
+        request_type_name=u'DataflowProjectsJobsMessagesListRequest',
+        response_type_name=u'ListJobMessagesResponse',
+        supports_download=False,
+    )
+
   class ProjectsJobsWorkItemsService(base_api.BaseApiService):
     """Service class for the projects_jobs_workItems resource."""
 
@@ -176,38 +174,11 @@ class ProjectsJobsWorkItemsService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsJobsWorkItemsService, self).__init__(client)
-      self._method_configs = {
-          'Lease': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.jobs.workItems.lease',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:lease',
-              request_field=u'leaseWorkItemRequest',
-              request_type_name=u'DataflowProjectsJobsWorkItemsLeaseRequest',
-              response_type_name=u'LeaseWorkItemResponse',
-              supports_download=False,
-          ),
-          'ReportStatus': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.jobs.workItems.reportStatus',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus',
-              request_field=u'reportWorkItemStatusRequest',
-              request_type_name=u'DataflowProjectsJobsWorkItemsReportStatusRequest',
-              response_type_name=u'ReportWorkItemStatusResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def Lease(self, request, global_params=None):
-      """Leases a dataflow WorkItem to run.
+      r"""Leases a dataflow WorkItem to run.
 
       Args:
         request: (DataflowProjectsJobsWorkItemsLeaseRequest) input message
@@ -219,8 +190,21 @@ def Lease(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Lease.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.jobs.workItems.lease',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:lease',
+        request_field=u'lease_work_item_request',
+        request_type_name=u'DataflowProjectsJobsWorkItemsLeaseRequest',
+        response_type_name=u'LeaseWorkItemResponse',
+        supports_download=False,
+    )
+
     def ReportStatus(self, request, global_params=None):
-      """Reports the status of dataflow WorkItems leased by a worker.
+      r"""Reports the status of dataflow WorkItems leased by a worker.
 
       Args:
         request: (DataflowProjectsJobsWorkItemsReportStatusRequest) input message
@@ -232,6 +216,19 @@ def ReportStatus(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    ReportStatus.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.jobs.workItems.reportStatus',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus',
+        request_field=u'report_work_item_status_request',
+        request_type_name=u'DataflowProjectsJobsWorkItemsReportStatusRequest',
+        response_type_name=u'ReportWorkItemStatusResponse',
+        supports_download=False,
+    )
+
   class ProjectsJobsService(base_api.BaseApiService):
     """Service class for the projects_jobs resource."""
 
@@ -239,86 +236,11 @@ class ProjectsJobsService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsJobsService, self).__init__(client)
-      self._method_configs = {
-          'Aggregated': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.jobs.aggregated',
-              ordered_params=[u'projectId'],
-              path_params=[u'projectId'],
-              query_params=[u'filter', u'location', u'pageSize', u'pageToken', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/jobs:aggregated',
-              request_field='',
-              request_type_name=u'DataflowProjectsJobsAggregatedRequest',
-              response_type_name=u'ListJobsResponse',
-              supports_download=False,
-          ),
-          'Create': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.jobs.create',
-              ordered_params=[u'projectId'],
-              path_params=[u'projectId'],
-              query_params=[u'location', u'replaceJobId', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/jobs',
-              request_field=u'job',
-              request_type_name=u'DataflowProjectsJobsCreateRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          'Get': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.jobs.get',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[u'location', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
-              request_field='',
-              request_type_name=u'DataflowProjectsJobsGetRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          'GetMetrics': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.jobs.getMetrics',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[u'location', u'startTime'],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/metrics',
-              request_field='',
-              request_type_name=u'DataflowProjectsJobsGetMetricsRequest',
-              response_type_name=u'JobMetrics',
-              supports_download=False,
-          ),
-          'List': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.jobs.list',
-              ordered_params=[u'projectId'],
-              path_params=[u'projectId'],
-              query_params=[u'filter', u'location', u'pageSize', u'pageToken', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/jobs',
-              request_field='',
-              request_type_name=u'DataflowProjectsJobsListRequest',
-              response_type_name=u'ListJobsResponse',
-              supports_download=False,
-          ),
-          'Update': base_api.ApiMethodInfo(
-              http_method=u'PUT',
-              method_id=u'dataflow.projects.jobs.update',
-              ordered_params=[u'projectId', u'jobId'],
-              path_params=[u'jobId', u'projectId'],
-              query_params=[u'location'],
-              relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
-              request_field=u'job',
-              request_type_name=u'DataflowProjectsJobsUpdateRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def Aggregated(self, request, global_params=None):
-      """List the jobs of a project across all regions.
+      r"""List the jobs of a project across all regions.
 
       Args:
         request: (DataflowProjectsJobsAggregatedRequest) input message
@@ -330,8 +252,21 @@ def Aggregated(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Aggregated.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.jobs.aggregated',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[u'filter', u'location', u'pageSize', u'pageToken', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/jobs:aggregated',
+        request_field='',
+        request_type_name=u'DataflowProjectsJobsAggregatedRequest',
+        response_type_name=u'ListJobsResponse',
+        supports_download=False,
+    )
+
     def Create(self, request, global_params=None):
-      """Creates a Cloud Dataflow job.
+      r"""Creates a Cloud Dataflow job.
 
       Args:
         request: (DataflowProjectsJobsCreateRequest) input message
@@ -343,8 +278,21 @@ def Create(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.jobs.create',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[u'location', u'replaceJobId', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/jobs',
+        request_field=u'job',
+        request_type_name=u'DataflowProjectsJobsCreateRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
     def Get(self, request, global_params=None):
-      """Gets the state of the specified Cloud Dataflow job.
+      r"""Gets the state of the specified Cloud Dataflow job.
 
       Args:
         request: (DataflowProjectsJobsGetRequest) input message
@@ -356,8 +304,21 @@ def Get(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.jobs.get',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[u'location', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
+        request_field='',
+        request_type_name=u'DataflowProjectsJobsGetRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
     def GetMetrics(self, request, global_params=None):
-      """Request the job status.
+      r"""Request the job status.
 
       Args:
         request: (DataflowProjectsJobsGetMetricsRequest) input message
@@ -369,8 +330,21 @@ def GetMetrics(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    GetMetrics.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.jobs.getMetrics',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[u'location', u'startTime'],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/metrics',
+        request_field='',
+        request_type_name=u'DataflowProjectsJobsGetMetricsRequest',
+        response_type_name=u'JobMetrics',
+        supports_download=False,
+    )
+
     def List(self, request, global_params=None):
-      """List the jobs of a project in a given region.
+      r"""List the jobs of a project in a given region.
 
       Args:
         request: (DataflowProjectsJobsListRequest) input message
@@ -382,8 +356,21 @@ def List(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.jobs.list',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[u'filter', u'location', u'pageSize', u'pageToken', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/jobs',
+        request_field='',
+        request_type_name=u'DataflowProjectsJobsListRequest',
+        response_type_name=u'ListJobsResponse',
+        supports_download=False,
+    )
+
     def Update(self, request, global_params=None):
-      """Updates the state of an existing Cloud Dataflow job.
+      r"""Updates the state of an existing Cloud Dataflow job.
 
       Args:
         request: (DataflowProjectsJobsUpdateRequest) input message
@@ -395,6 +382,19 @@ def Update(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'dataflow.projects.jobs.update',
+        ordered_params=[u'projectId', u'jobId'],
+        path_params=[u'jobId', u'projectId'],
+        query_params=[u'location'],
+        relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
+        request_field=u'job',
+        request_type_name=u'DataflowProjectsJobsUpdateRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
   class ProjectsLocationsJobsDebugService(base_api.BaseApiService):
     """Service class for the projects_locations_jobs_debug resource."""
 
@@ -402,38 +402,11 @@ class ProjectsLocationsJobsDebugService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsLocationsJobsDebugService, self).__init__(client)
-      self._method_configs = {
-          'GetConfig': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.jobs.debug.getConfig',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/getConfig',
-              request_field=u'getDebugConfigRequest',
-              request_type_name=u'DataflowProjectsLocationsJobsDebugGetConfigRequest',
-              response_type_name=u'GetDebugConfigResponse',
-              supports_download=False,
-          ),
-          'SendCapture': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.jobs.debug.sendCapture',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/sendCapture',
-              request_field=u'sendDebugCaptureRequest',
-              request_type_name=u'DataflowProjectsLocationsJobsDebugSendCaptureRequest',
-              response_type_name=u'SendDebugCaptureResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def GetConfig(self, request, global_params=None):
-      """Get encoded debug configuration for component. Not cacheable.
+      r"""Get encoded debug configuration for component. Not cacheable.
 
       Args:
         request: (DataflowProjectsLocationsJobsDebugGetConfigRequest) input message
@@ -445,8 +418,21 @@ def GetConfig(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    GetConfig.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.jobs.debug.getConfig',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/getConfig',
+        request_field=u'get_debug_config_request',
+        request_type_name=u'DataflowProjectsLocationsJobsDebugGetConfigRequest',
+        response_type_name=u'GetDebugConfigResponse',
+        supports_download=False,
+    )
+
     def SendCapture(self, request, global_params=None):
-      """Send encoded debug capture data for component.
+      r"""Send encoded debug capture data for component.
 
       Args:
         request: (DataflowProjectsLocationsJobsDebugSendCaptureRequest) input message
@@ -458,6 +444,19 @@ def SendCapture(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    SendCapture.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.jobs.debug.sendCapture',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/debug/sendCapture',
+        request_field=u'send_debug_capture_request',
+        request_type_name=u'DataflowProjectsLocationsJobsDebugSendCaptureRequest',
+        response_type_name=u'SendDebugCaptureResponse',
+        supports_download=False,
+    )
+
   class ProjectsLocationsJobsMessagesService(base_api.BaseApiService):
     """Service class for the projects_locations_jobs_messages resource."""
 
@@ -465,26 +464,11 @@ class ProjectsLocationsJobsMessagesService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsLocationsJobsMessagesService, self).__init__(client)
-      self._method_configs = {
-          'List': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.locations.jobs.messages.list',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[u'endTime', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages',
-              request_field='',
-              request_type_name=u'DataflowProjectsLocationsJobsMessagesListRequest',
-              response_type_name=u'ListJobMessagesResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def List(self, request, global_params=None):
-      """Request the job status.
+      r"""Request the job status.
 
       Args:
         request: (DataflowProjectsLocationsJobsMessagesListRequest) input message
@@ -496,6 +480,19 @@ def List(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.locations.jobs.messages.list',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[u'endTime', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages',
+        request_field='',
+        request_type_name=u'DataflowProjectsLocationsJobsMessagesListRequest',
+        response_type_name=u'ListJobMessagesResponse',
+        supports_download=False,
+    )
+
   class ProjectsLocationsJobsWorkItemsService(base_api.BaseApiService):
     """Service class for the projects_locations_jobs_workItems resource."""
 
@@ -503,38 +500,11 @@ class ProjectsLocationsJobsWorkItemsService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsLocationsJobsWorkItemsService, self).__init__(client)
-      self._method_configs = {
-          'Lease': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.jobs.workItems.lease',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:lease',
-              request_field=u'leaseWorkItemRequest',
-              request_type_name=u'DataflowProjectsLocationsJobsWorkItemsLeaseRequest',
-              response_type_name=u'LeaseWorkItemResponse',
-              supports_download=False,
-          ),
-          'ReportStatus': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.jobs.workItems.reportStatus',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:reportStatus',
-              request_field=u'reportWorkItemStatusRequest',
-              request_type_name=u'DataflowProjectsLocationsJobsWorkItemsReportStatusRequest',
-              response_type_name=u'ReportWorkItemStatusResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def Lease(self, request, global_params=None):
-      """Leases a dataflow WorkItem to run.
+      r"""Leases a dataflow WorkItem to run.
 
       Args:
         request: (DataflowProjectsLocationsJobsWorkItemsLeaseRequest) input message
@@ -546,8 +516,21 @@ def Lease(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Lease.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.jobs.workItems.lease',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:lease',
+        request_field=u'lease_work_item_request',
+        request_type_name=u'DataflowProjectsLocationsJobsWorkItemsLeaseRequest',
+        response_type_name=u'LeaseWorkItemResponse',
+        supports_download=False,
+    )
+
     def ReportStatus(self, request, global_params=None):
-      """Reports the status of dataflow WorkItems leased by a worker.
+      r"""Reports the status of dataflow WorkItems leased by a worker.
 
       Args:
         request: (DataflowProjectsLocationsJobsWorkItemsReportStatusRequest) input message
@@ -559,6 +542,19 @@ def ReportStatus(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    ReportStatus.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.jobs.workItems.reportStatus',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:reportStatus',
+        request_field=u'report_work_item_status_request',
+        request_type_name=u'DataflowProjectsLocationsJobsWorkItemsReportStatusRequest',
+        response_type_name=u'ReportWorkItemStatusResponse',
+        supports_download=False,
+    )
+
   class ProjectsLocationsJobsService(base_api.BaseApiService):
     """Service class for the projects_locations_jobs resource."""
 
@@ -566,74 +562,11 @@ class ProjectsLocationsJobsService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsLocationsJobsService, self).__init__(client)
-      self._method_configs = {
-          'Create': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.jobs.create',
-              ordered_params=[u'projectId', u'location'],
-              path_params=[u'location', u'projectId'],
-              query_params=[u'replaceJobId', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
-              request_field=u'job',
-              request_type_name=u'DataflowProjectsLocationsJobsCreateRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          'Get': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.locations.jobs.get',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[u'view'],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
-              request_field='',
-              request_type_name=u'DataflowProjectsLocationsJobsGetRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          'GetMetrics': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.locations.jobs.getMetrics',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[u'startTime'],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/metrics',
-              request_field='',
-              request_type_name=u'DataflowProjectsLocationsJobsGetMetricsRequest',
-              response_type_name=u'JobMetrics',
-              supports_download=False,
-          ),
-          'List': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.locations.jobs.list',
-              ordered_params=[u'projectId', u'location'],
-              path_params=[u'location', u'projectId'],
-              query_params=[u'filter', u'pageSize', u'pageToken', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
-              request_field='',
-              request_type_name=u'DataflowProjectsLocationsJobsListRequest',
-              response_type_name=u'ListJobsResponse',
-              supports_download=False,
-          ),
-          'Update': base_api.ApiMethodInfo(
-              http_method=u'PUT',
-              method_id=u'dataflow.projects.locations.jobs.update',
-              ordered_params=[u'projectId', u'location', u'jobId'],
-              path_params=[u'jobId', u'location', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
-              request_field=u'job',
-              request_type_name=u'DataflowProjectsLocationsJobsUpdateRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def Create(self, request, global_params=None):
-      """Creates a Cloud Dataflow job.
+      r"""Creates a Cloud Dataflow job.
 
       Args:
         request: (DataflowProjectsLocationsJobsCreateRequest) input message
@@ -645,8 +578,21 @@ def Create(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.jobs.create',
+        ordered_params=[u'projectId', u'location'],
+        path_params=[u'location', u'projectId'],
+        query_params=[u'replaceJobId', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
+        request_field=u'job',
+        request_type_name=u'DataflowProjectsLocationsJobsCreateRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
     def Get(self, request, global_params=None):
-      """Gets the state of the specified Cloud Dataflow job.
+      r"""Gets the state of the specified Cloud Dataflow job.
 
       Args:
         request: (DataflowProjectsLocationsJobsGetRequest) input message
@@ -658,8 +604,21 @@ def Get(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.locations.jobs.get',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[u'view'],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
+        request_field='',
+        request_type_name=u'DataflowProjectsLocationsJobsGetRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
     def GetMetrics(self, request, global_params=None):
-      """Request the job status.
+      r"""Request the job status.
 
       Args:
         request: (DataflowProjectsLocationsJobsGetMetricsRequest) input message
@@ -671,8 +630,21 @@ def GetMetrics(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    GetMetrics.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.locations.jobs.getMetrics',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[u'startTime'],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/metrics',
+        request_field='',
+        request_type_name=u'DataflowProjectsLocationsJobsGetMetricsRequest',
+        response_type_name=u'JobMetrics',
+        supports_download=False,
+    )
+
     def List(self, request, global_params=None):
-      """List the jobs of a project in a given region.
+      r"""List the jobs of a project in a given region.
 
       Args:
         request: (DataflowProjectsLocationsJobsListRequest) input message
@@ -684,8 +656,21 @@ def List(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    List.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.locations.jobs.list',
+        ordered_params=[u'projectId', u'location'],
+        path_params=[u'location', u'projectId'],
+        query_params=[u'filter', u'pageSize', u'pageToken', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
+        request_field='',
+        request_type_name=u'DataflowProjectsLocationsJobsListRequest',
+        response_type_name=u'ListJobsResponse',
+        supports_download=False,
+    )
+
     def Update(self, request, global_params=None):
-      """Updates the state of an existing Cloud Dataflow job.
+      r"""Updates the state of an existing Cloud Dataflow job.
 
       Args:
         request: (DataflowProjectsLocationsJobsUpdateRequest) input message
@@ -697,6 +682,19 @@ def Update(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Update.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'PUT',
+        method_id=u'dataflow.projects.locations.jobs.update',
+        ordered_params=[u'projectId', u'location', u'jobId'],
+        path_params=[u'jobId', u'location', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
+        request_field=u'job',
+        request_type_name=u'DataflowProjectsLocationsJobsUpdateRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
   class ProjectsLocationsTemplatesService(base_api.BaseApiService):
     """Service class for the projects_locations_templates resource."""
 
@@ -704,50 +702,11 @@ class ProjectsLocationsTemplatesService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsLocationsTemplatesService, self).__init__(client)
-      self._method_configs = {
-          'Create': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.templates.create',
-              ordered_params=[u'projectId', u'location'],
-              path_params=[u'location', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/templates',
-              request_field=u'createJobFromTemplateRequest',
-              request_type_name=u'DataflowProjectsLocationsTemplatesCreateRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          'Get': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.locations.templates.get',
-              ordered_params=[u'projectId', u'location'],
-              path_params=[u'location', u'projectId'],
-              query_params=[u'gcsPath', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/templates:get',
-              request_field='',
-              request_type_name=u'DataflowProjectsLocationsTemplatesGetRequest',
-              response_type_name=u'GetTemplateResponse',
-              supports_download=False,
-          ),
-          'Launch': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.templates.launch',
-              ordered_params=[u'projectId', u'location'],
-              path_params=[u'location', u'projectId'],
-              query_params=[u'gcsPath', u'validateOnly'],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/templates:launch',
-              request_field=u'launchTemplateParameters',
-              request_type_name=u'DataflowProjectsLocationsTemplatesLaunchRequest',
-              response_type_name=u'LaunchTemplateResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def Create(self, request, global_params=None):
-      """Creates a Cloud Dataflow job from a template.
+      r"""Creates a Cloud Dataflow job from a template.
 
       Args:
         request: (DataflowProjectsLocationsTemplatesCreateRequest) input message
@@ -759,8 +718,21 @@ def Create(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.templates.create',
+        ordered_params=[u'projectId', u'location'],
+        path_params=[u'location', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/templates',
+        request_field=u'create_job_from_template_request',
+        request_type_name=u'DataflowProjectsLocationsTemplatesCreateRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
     def Get(self, request, global_params=None):
-      """Get the template associated with a template.
+      r"""Get the template associated with a template.
 
       Args:
         request: (DataflowProjectsLocationsTemplatesGetRequest) input message
@@ -772,8 +744,21 @@ def Get(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.locations.templates.get',
+        ordered_params=[u'projectId', u'location'],
+        path_params=[u'location', u'projectId'],
+        query_params=[u'gcsPath', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/templates:get',
+        request_field='',
+        request_type_name=u'DataflowProjectsLocationsTemplatesGetRequest',
+        response_type_name=u'GetTemplateResponse',
+        supports_download=False,
+    )
+
     def Launch(self, request, global_params=None):
-      """Launch a template.
+      r"""Launch a template.
 
       Args:
         request: (DataflowProjectsLocationsTemplatesLaunchRequest) input message
@@ -785,6 +770,19 @@ def Launch(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Launch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.templates.launch',
+        ordered_params=[u'projectId', u'location'],
+        path_params=[u'location', u'projectId'],
+        query_params=[u'gcsPath', u'validateOnly'],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/templates:launch',
+        request_field=u'launch_template_parameters',
+        request_type_name=u'DataflowProjectsLocationsTemplatesLaunchRequest',
+        response_type_name=u'LaunchTemplateResponse',
+        supports_download=False,
+    )
+
   class ProjectsLocationsService(base_api.BaseApiService):
     """Service class for the projects_locations resource."""
 
@@ -792,26 +790,11 @@ class ProjectsLocationsService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsLocationsService, self).__init__(client)
-      self._method_configs = {
-          'WorkerMessages': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.locations.workerMessages',
-              ordered_params=[u'projectId', u'location'],
-              path_params=[u'location', u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/locations/{location}/WorkerMessages',
-              request_field=u'sendWorkerMessagesRequest',
-              request_type_name=u'DataflowProjectsLocationsWorkerMessagesRequest',
-              response_type_name=u'SendWorkerMessagesResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def WorkerMessages(self, request, global_params=None):
-      """Send a worker_message to the service.
+      r"""Send a worker_message to the service.
 
       Args:
         request: (DataflowProjectsLocationsWorkerMessagesRequest) input message
@@ -823,6 +806,19 @@ def WorkerMessages(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    WorkerMessages.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.locations.workerMessages',
+        ordered_params=[u'projectId', u'location'],
+        path_params=[u'location', u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/locations/{location}/WorkerMessages',
+        request_field=u'send_worker_messages_request',
+        request_type_name=u'DataflowProjectsLocationsWorkerMessagesRequest',
+        response_type_name=u'SendWorkerMessagesResponse',
+        supports_download=False,
+    )
+
   class ProjectsTemplatesService(base_api.BaseApiService):
     """Service class for the projects_templates resource."""
 
@@ -830,50 +826,11 @@ class ProjectsTemplatesService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsTemplatesService, self).__init__(client)
-      self._method_configs = {
-          'Create': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.templates.create',
-              ordered_params=[u'projectId'],
-              path_params=[u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/templates',
-              request_field=u'createJobFromTemplateRequest',
-              request_type_name=u'DataflowProjectsTemplatesCreateRequest',
-              response_type_name=u'Job',
-              supports_download=False,
-          ),
-          'Get': base_api.ApiMethodInfo(
-              http_method=u'GET',
-              method_id=u'dataflow.projects.templates.get',
-              ordered_params=[u'projectId'],
-              path_params=[u'projectId'],
-              query_params=[u'gcsPath', u'location', u'view'],
-              relative_path=u'v1b3/projects/{projectId}/templates:get',
-              request_field='',
-              request_type_name=u'DataflowProjectsTemplatesGetRequest',
-              response_type_name=u'GetTemplateResponse',
-              supports_download=False,
-          ),
-          'Launch': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.templates.launch',
-              ordered_params=[u'projectId'],
-              path_params=[u'projectId'],
-              query_params=[u'gcsPath', u'location', u'validateOnly'],
-              relative_path=u'v1b3/projects/{projectId}/templates:launch',
-              request_field=u'launchTemplateParameters',
-              request_type_name=u'DataflowProjectsTemplatesLaunchRequest',
-              response_type_name=u'LaunchTemplateResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def Create(self, request, global_params=None):
-      """Creates a Cloud Dataflow job from a template.
+      r"""Creates a Cloud Dataflow job from a template.
 
       Args:
         request: (DataflowProjectsTemplatesCreateRequest) input message
@@ -885,8 +842,21 @@ def Create(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Create.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.templates.create',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/templates',
+        request_field=u'create_job_from_template_request',
+        request_type_name=u'DataflowProjectsTemplatesCreateRequest',
+        response_type_name=u'Job',
+        supports_download=False,
+    )
+
     def Get(self, request, global_params=None):
-      """Get the template associated with a template.
+      r"""Get the template associated with a template.
 
       Args:
         request: (DataflowProjectsTemplatesGetRequest) input message
@@ -898,8 +868,21 @@ def Get(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Get.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'GET',
+        method_id=u'dataflow.projects.templates.get',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[u'gcsPath', u'location', u'view'],
+        relative_path=u'v1b3/projects/{projectId}/templates:get',
+        request_field='',
+        request_type_name=u'DataflowProjectsTemplatesGetRequest',
+        response_type_name=u'GetTemplateResponse',
+        supports_download=False,
+    )
+
     def Launch(self, request, global_params=None):
-      """Launch a template.
+      r"""Launch a template.
 
       Args:
         request: (DataflowProjectsTemplatesLaunchRequest) input message
@@ -911,6 +894,19 @@ def Launch(self, request, global_params=None):
       return self._RunMethod(
           config, request, global_params=global_params)
 
+    Launch.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.templates.launch',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[u'gcsPath', u'location', u'validateOnly'],
+        relative_path=u'v1b3/projects/{projectId}/templates:launch',
+        request_field=u'launch_template_parameters',
+        request_type_name=u'DataflowProjectsTemplatesLaunchRequest',
+        response_type_name=u'LaunchTemplateResponse',
+        supports_download=False,
+    )
+
   class ProjectsService(base_api.BaseApiService):
     """Service class for the projects resource."""
 
@@ -918,26 +914,11 @@ class ProjectsService(base_api.BaseApiService):
 
     def __init__(self, client):
       super(DataflowV1b3.ProjectsService, self).__init__(client)
-      self._method_configs = {
-          'WorkerMessages': base_api.ApiMethodInfo(
-              http_method=u'POST',
-              method_id=u'dataflow.projects.workerMessages',
-              ordered_params=[u'projectId'],
-              path_params=[u'projectId'],
-              query_params=[],
-              relative_path=u'v1b3/projects/{projectId}/WorkerMessages',
-              request_field=u'sendWorkerMessagesRequest',
-              request_type_name=u'DataflowProjectsWorkerMessagesRequest',
-              response_type_name=u'SendWorkerMessagesResponse',
-              supports_download=False,
-          ),
-          }
-
       self._upload_configs = {
           }
 
     def WorkerMessages(self, request, global_params=None):
-      """Send a worker_message to the service.
+      r"""Send a worker_message to the service.
 
       Args:
         request: (DataflowProjectsWorkerMessagesRequest) input message
@@ -948,3 +929,16 @@ def WorkerMessages(self, request, global_params=None):
       config = self.GetMethodConfig('WorkerMessages')
       return self._RunMethod(
           config, request, global_params=global_params)
+
+    WorkerMessages.method_config = lambda: base_api.ApiMethodInfo(
+        http_method=u'POST',
+        method_id=u'dataflow.projects.workerMessages',
+        ordered_params=[u'projectId'],
+        path_params=[u'projectId'],
+        query_params=[],
+        relative_path=u'v1b3/projects/{projectId}/WorkerMessages',
+        request_field=u'send_worker_messages_request',
+        request_type_name=u'DataflowProjectsWorkerMessagesRequest',
+        response_type_name=u'SendWorkerMessagesResponse',
+        supports_download=False,
+    )
diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py
index bdb5c6d2662..ba50416f386 100644
--- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py
+++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py
@@ -17,13 +17,10 @@
 
 """Generated message classes for dataflow version v1b3.
 
-Develops and executes data processing patterns like ETL, batch computation,
-and continuous computation.
+Manages Google Cloud Dataflow projects on Google Cloud Platform.
 """
 # NOTE: This file is autogenerated and should not be edited by hand.
 
-from __future__ import absolute_import
-
 from apitools.base.protorpclite import messages as _messages
 from apitools.base.py import encoding
 from apitools.base.py import extra_types
@@ -33,7 +30,7 @@
 
 
 class ApproximateProgress(_messages.Message):
-  """Obsolete in favor of ApproximateReportedProgress and
+  r"""Obsolete in favor of ApproximateReportedProgress and
   ApproximateSplitRequest.
 
   Fields:
@@ -48,7 +45,7 @@ class ApproximateProgress(_messages.Message):
 
 
 class ApproximateReportedProgress(_messages.Message):
-  """A progress measurement of a WorkItem by a worker.
+  r"""A progress measurement of a WorkItem by a worker.
 
   Fields:
     consumedParallelism: Total amount of parallelism in the portion of input
@@ -90,7 +87,7 @@ class ApproximateReportedProgress(_messages.Message):
 
 
 class ApproximateSplitRequest(_messages.Message):
-  """A suggestion by the service to the worker to dynamically split the
+  r"""A suggestion by the service to the worker to dynamically split the
   WorkItem.
 
   Fields:
@@ -104,7 +101,7 @@ class ApproximateSplitRequest(_messages.Message):
 
 
 class AutoscalingEvent(_messages.Message):
-  """A structured message reporting an autoscaling decision made by the
+  r"""A structured message reporting an autoscaling decision made by the
   Dataflow service.
 
   Enums:
@@ -120,10 +117,13 @@ class AutoscalingEvent(_messages.Message):
       resize to use.
     time: The time this event was emitted to indicate a new target or current
       num_workers value.
+    workerPool: A short and friendly name for the worker pool this event
+      refers to, populated from the value of
+      PoolStageRelation::user_pool_name.
   """
 
   class EventTypeValueValuesEnum(_messages.Enum):
-    """The type of autoscaling event to report.
+    r"""The type of autoscaling event to report.
 
     Values:
       TYPE_UNKNOWN: Default type for the enum.  Value should never be
@@ -155,10 +155,11 @@ class EventTypeValueValuesEnum(_messages.Enum):
   eventType = _messages.EnumField('EventTypeValueValuesEnum', 3)
   targetNumWorkers = _messages.IntegerField(4)
   time = _messages.StringField(5)
+  workerPool = _messages.StringField(6)
 
 
 class AutoscalingSettings(_messages.Message):
-  """Settings for WorkerPool autoscaling.
+  r"""Settings for WorkerPool autoscaling.
 
   Enums:
     AlgorithmValueValuesEnum: The algorithm to use for autoscaling.
@@ -169,7 +170,7 @@ class AutoscalingSettings(_messages.Message):
   """
 
   class AlgorithmValueValuesEnum(_messages.Enum):
-    """The algorithm to use for autoscaling.
+    r"""The algorithm to use for autoscaling.
 
     Values:
       AUTOSCALING_ALGORITHM_UNKNOWN: The algorithm is unknown, or unspecified.
@@ -185,8 +186,38 @@ class AlgorithmValueValuesEnum(_messages.Enum):
   maxNumWorkers = _messages.IntegerField(2, variant=_messages.Variant.INT32)
 
 
+class BigQueryIODetails(_messages.Message):
+  r"""Metadata for a BigQuery connector used by the job.
+
+  Fields:
+    dataset: Dataset accessed in the connection.
+    projectId: Project accessed in the connection.
+    query: Query used to access data in the connection.
+    table: Table accessed in the connection.
+  """
+
+  dataset = _messages.StringField(1)
+  projectId = _messages.StringField(2)
+  query = _messages.StringField(3)
+  table = _messages.StringField(4)
+
+
+class BigTableIODetails(_messages.Message):
+  r"""Metadata for a BigTable connector used by the job.
+
+  Fields:
+    instanceId: InstanceId accessed in the connection.
+    projectId: ProjectId accessed in the connection.
+    tableId: TableId accessed in the connection.
+  """
+
+  instanceId = _messages.StringField(1)
+  projectId = _messages.StringField(2)
+  tableId = _messages.StringField(3)
+
+
 class CPUTime(_messages.Message):
-  """Modeled after information exposed by /proc/stat.
+  r"""Modeled after information exposed by /proc/stat.
 
   Fields:
     rate: Average CPU utilization rate (% non-idle cpu / second) since
@@ -202,7 +233,7 @@ class CPUTime(_messages.Message):
 
 
 class ComponentSource(_messages.Message):
-  """Description of an interstitial value between transforms in an execution
+  r"""Description of an interstitial value between transforms in an execution
   stage.
 
   Fields:
@@ -219,7 +250,7 @@ class ComponentSource(_messages.Message):
 
 
 class ComponentTransform(_messages.Message):
-  """Description of a transform executed as part of an execution stage.
+  r"""Description of a transform executed as part of an execution stage.
 
   Fields:
     name: Dataflow service generated name for this source.
@@ -235,7 +266,7 @@ class ComponentTransform(_messages.Message):
 
 
 class ComputationTopology(_messages.Message):
-  """All configuration data for a particular Computation.
+  r"""All configuration data for a particular Computation.
 
   Fields:
     computationId: The ID of the computation.
@@ -255,8 +286,8 @@ class ComputationTopology(_messages.Message):
 
 
 class ConcatPosition(_messages.Message):
-  """A position that encapsulates an inner position and an index for the inner
-  position. A ConcatPosition can be used by a reader of a source that
+  r"""A position that encapsulates an inner position and an index for the
+  inner position. A ConcatPosition can be used by a reader of a source that
   encapsulates a set of other sources.
 
   Fields:
@@ -269,7 +300,7 @@ class ConcatPosition(_messages.Message):
 
 
 class CounterMetadata(_messages.Message):
-  """CounterMetadata includes all static non-name non-value counter
+  r"""CounterMetadata includes all static non-name non-value counter
   attributes.
 
   Enums:
@@ -284,7 +315,7 @@ class CounterMetadata(_messages.Message):
   """
 
   class KindValueValuesEnum(_messages.Enum):
-    """Counter aggregation kind.
+    r"""Counter aggregation kind.
 
     Values:
       INVALID: Counter aggregation kind was not set.
@@ -312,7 +343,7 @@ class KindValueValuesEnum(_messages.Enum):
     LATEST_VALUE = 9
 
   class StandardUnitsValueValuesEnum(_messages.Enum):
-    """System defined Units, see above enum.
+    r"""System defined Units, see above enum.
 
     Values:
       BYTES: Counter returns a value in bytes.
@@ -340,7 +371,7 @@ class StandardUnitsValueValuesEnum(_messages.Enum):
 
 
 class CounterStructuredName(_messages.Message):
-  """Identifies a counter within a per-job namespace. Counters whose
+  r"""Identifies a counter within a per-job namespace. Counters whose
   structured names are the same get merged into a single value for the job.
 
   Enums:
@@ -372,7 +403,7 @@ class CounterStructuredName(_messages.Message):
   """
 
   class OriginValueValuesEnum(_messages.Enum):
-    """One of the standard Origins defined above.
+    r"""One of the standard Origins defined above.
 
     Values:
       SYSTEM: Counter was created by the Dataflow system.
@@ -382,7 +413,7 @@ class OriginValueValuesEnum(_messages.Enum):
     USER = 1
 
   class PortionValueValuesEnum(_messages.Enum):
-    """Portion of this counter, either key or value.
+    r"""Portion of this counter, either key or value.
 
     Values:
       ALL: Counter portion has not been set.
@@ -406,7 +437,7 @@ class PortionValueValuesEnum(_messages.Enum):
 
 
 class CounterStructuredNameAndMetadata(_messages.Message):
-  """A single message which encapsulates structured name and metadata for a
+  r"""A single message which encapsulates structured name and metadata for a
   given counter.
 
   Fields:
@@ -419,7 +450,7 @@ class CounterStructuredNameAndMetadata(_messages.Message):
 
 
 class CounterUpdate(_messages.Message):
-  """An update to a Counter sent from a worker.
+  r"""An update to a Counter sent from a worker.
 
   Fields:
     boolean: Boolean value for And, Or.
@@ -463,7 +494,7 @@ class CounterUpdate(_messages.Message):
 
 
 class CreateJobFromTemplateRequest(_messages.Message):
-  """A request to create a Cloud Dataflow job from a template.
+  r"""A request to create a Cloud Dataflow job from a template.
 
   Messages:
     ParametersValue: The runtime parameters to pass to the job.
@@ -480,7 +511,7 @@ class CreateJobFromTemplateRequest(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ParametersValue(_messages.Message):
-    """The runtime parameters to pass to the job.
+    r"""The runtime parameters to pass to the job.
 
     Messages:
       AdditionalProperty: An additional property for a ParametersValue object.
@@ -490,7 +521,7 @@ class ParametersValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ParametersValue object.
+      r"""An additional property for a ParametersValue object.
 
       Fields:
         key: Name of the additional property.
@@ -510,7 +541,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class CustomSourceLocation(_messages.Message):
-  """Identifies the location of a custom souce.
+  r"""Identifies the location of a custom souce.
 
   Fields:
     stateful: Whether this source is stateful.
@@ -520,7 +551,7 @@ class CustomSourceLocation(_messages.Message):
 
 
 class DataDiskAssignment(_messages.Message):
-  """Data disk assignment for a given VM instance.
+  r"""Data disk assignment for a given VM instance.
 
   Fields:
     dataDisks: Mounted data disks. The order is important a data disk's
@@ -537,7 +568,7 @@ class DataDiskAssignment(_messages.Message):
 
 
 class DataflowProjectsJobsAggregatedRequest(_messages.Message):
-  """A DataflowProjectsJobsAggregatedRequest object.
+  r"""A DataflowProjectsJobsAggregatedRequest object.
 
   Enums:
     FilterValueValuesEnum: The kind of filter to use.
@@ -558,7 +589,7 @@ class DataflowProjectsJobsAggregatedRequest(_messages.Message):
   """
 
   class FilterValueValuesEnum(_messages.Enum):
-    """The kind of filter to use.
+    r"""The kind of filter to use.
 
     Values:
       UNKNOWN: <no description>
@@ -572,7 +603,7 @@ class FilterValueValuesEnum(_messages.Enum):
     ACTIVE = 3
 
   class ViewValueValuesEnum(_messages.Enum):
-    """Level of information requested in response. Default is
+    r"""Level of information requested in response. Default is
     `JOB_VIEW_SUMMARY`.
 
     Values:
@@ -595,7 +626,7 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsJobsCreateRequest(_messages.Message):
-  """A DataflowProjectsJobsCreateRequest object.
+  r"""A DataflowProjectsJobsCreateRequest object.
 
   Enums:
     ViewValueValuesEnum: The level of information requested in response.
@@ -609,7 +640,7 @@ class DataflowProjectsJobsCreateRequest(_messages.Message):
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
+    r"""The level of information requested in response.
 
     Values:
       JOB_VIEW_UNKNOWN: <no description>
@@ -630,37 +661,37 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsJobsDebugGetConfigRequest(_messages.Message):
-  """A DataflowProjectsJobsDebugGetConfigRequest object.
+  r"""A DataflowProjectsJobsDebugGetConfigRequest object.
 
   Fields:
-    getDebugConfigRequest: A GetDebugConfigRequest resource to be passed as
+    get_debug_config_request: A GetDebugConfigRequest resource to be passed as
       the request body.
     jobId: The job id.
     projectId: The project id.
   """
 
-  getDebugConfigRequest = _messages.MessageField('GetDebugConfigRequest', 1)
+  get_debug_config_request = _messages.MessageField('GetDebugConfigRequest', 1)
   jobId = _messages.StringField(2, required=True)
   projectId = _messages.StringField(3, required=True)
 
 
 class DataflowProjectsJobsDebugSendCaptureRequest(_messages.Message):
-  """A DataflowProjectsJobsDebugSendCaptureRequest object.
+  r"""A DataflowProjectsJobsDebugSendCaptureRequest object.
 
   Fields:
     jobId: The job id.
     projectId: The project id.
-    sendDebugCaptureRequest: A SendDebugCaptureRequest resource to be passed
-      as the request body.
+    send_debug_capture_request: A SendDebugCaptureRequest resource to be
+      passed as the request body.
   """
 
   jobId = _messages.StringField(1, required=True)
   projectId = _messages.StringField(2, required=True)
-  sendDebugCaptureRequest = _messages.MessageField('SendDebugCaptureRequest', 3)
+  send_debug_capture_request = _messages.MessageField('SendDebugCaptureRequest', 3)
 
 
 class DataflowProjectsJobsGetMetricsRequest(_messages.Message):
-  """A DataflowProjectsJobsGetMetricsRequest object.
+  r"""A DataflowProjectsJobsGetMetricsRequest object.
 
   Fields:
     jobId: The job to get messages for.
@@ -677,7 +708,7 @@ class DataflowProjectsJobsGetMetricsRequest(_messages.Message):
 
 
 class DataflowProjectsJobsGetRequest(_messages.Message):
-  """A DataflowProjectsJobsGetRequest object.
+  r"""A DataflowProjectsJobsGetRequest object.
 
   Enums:
     ViewValueValuesEnum: The level of information requested in response.
@@ -690,7 +721,7 @@ class DataflowProjectsJobsGetRequest(_messages.Message):
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
+    r"""The level of information requested in response.
 
     Values:
       JOB_VIEW_UNKNOWN: <no description>
@@ -710,7 +741,7 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsJobsListRequest(_messages.Message):
-  """A DataflowProjectsJobsListRequest object.
+  r"""A DataflowProjectsJobsListRequest object.
 
   Enums:
     FilterValueValuesEnum: The kind of filter to use.
@@ -731,7 +762,7 @@ class DataflowProjectsJobsListRequest(_messages.Message):
   """
 
   class FilterValueValuesEnum(_messages.Enum):
-    """The kind of filter to use.
+    r"""The kind of filter to use.
 
     Values:
       UNKNOWN: <no description>
@@ -745,7 +776,7 @@ class FilterValueValuesEnum(_messages.Enum):
     ACTIVE = 3
 
   class ViewValueValuesEnum(_messages.Enum):
-    """Level of information requested in response. Default is
+    r"""Level of information requested in response. Default is
     `JOB_VIEW_SUMMARY`.
 
     Values:
@@ -768,7 +799,7 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsJobsMessagesListRequest(_messages.Message):
-  """A DataflowProjectsJobsMessagesListRequest object.
+  r"""A DataflowProjectsJobsMessagesListRequest object.
 
   Enums:
     MinimumImportanceValueValuesEnum: Filter to only get messages with
@@ -793,7 +824,7 @@ class DataflowProjectsJobsMessagesListRequest(_messages.Message):
   """
 
   class MinimumImportanceValueValuesEnum(_messages.Enum):
-    """Filter to only get messages with importance >= level
+    r"""Filter to only get messages with importance >= level
 
     Values:
       JOB_MESSAGE_IMPORTANCE_UNKNOWN: <no description>
@@ -821,7 +852,7 @@ class MinimumImportanceValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsJobsUpdateRequest(_messages.Message):
-  """A DataflowProjectsJobsUpdateRequest object.
+  r"""A DataflowProjectsJobsUpdateRequest object.
 
   Fields:
     job: A Job resource to be passed as the request body.
@@ -837,37 +868,37 @@ class DataflowProjectsJobsUpdateRequest(_messages.Message):
 
 
 class DataflowProjectsJobsWorkItemsLeaseRequest(_messages.Message):
-  """A DataflowProjectsJobsWorkItemsLeaseRequest object.
+  r"""A DataflowProjectsJobsWorkItemsLeaseRequest object.
 
   Fields:
     jobId: Identifies the workflow job this worker belongs to.
-    leaseWorkItemRequest: A LeaseWorkItemRequest resource to be passed as the
-      request body.
+    lease_work_item_request: A LeaseWorkItemRequest resource to be passed as
+      the request body.
     projectId: Identifies the project this worker belongs to.
   """
 
   jobId = _messages.StringField(1, required=True)
-  leaseWorkItemRequest = _messages.MessageField('LeaseWorkItemRequest', 2)
+  lease_work_item_request = _messages.MessageField('LeaseWorkItemRequest', 2)
   projectId = _messages.StringField(3, required=True)
 
 
 class DataflowProjectsJobsWorkItemsReportStatusRequest(_messages.Message):
-  """A DataflowProjectsJobsWorkItemsReportStatusRequest object.
+  r"""A DataflowProjectsJobsWorkItemsReportStatusRequest object.
 
   Fields:
     jobId: The job which the WorkItem is part of.
     projectId: The project which owns the WorkItem's job.
-    reportWorkItemStatusRequest: A ReportWorkItemStatusRequest resource to be
-      passed as the request body.
+    report_work_item_status_request: A ReportWorkItemStatusRequest resource to
+      be passed as the request body.
   """
 
   jobId = _messages.StringField(1, required=True)
   projectId = _messages.StringField(2, required=True)
-  reportWorkItemStatusRequest = _messages.MessageField('ReportWorkItemStatusRequest', 3)
+  report_work_item_status_request = _messages.MessageField('ReportWorkItemStatusRequest', 3)
 
 
 class DataflowProjectsLocationsJobsCreateRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsCreateRequest object.
+  r"""A DataflowProjectsLocationsJobsCreateRequest object.
 
   Enums:
     ViewValueValuesEnum: The level of information requested in response.
@@ -881,7 +912,7 @@ class DataflowProjectsLocationsJobsCreateRequest(_messages.Message):
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
+    r"""The level of information requested in response.
 
     Values:
       JOB_VIEW_UNKNOWN: <no description>
@@ -902,41 +933,41 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsLocationsJobsDebugGetConfigRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsDebugGetConfigRequest object.
+  r"""A DataflowProjectsLocationsJobsDebugGetConfigRequest object.
 
   Fields:
-    getDebugConfigRequest: A GetDebugConfigRequest resource to be passed as
+    get_debug_config_request: A GetDebugConfigRequest resource to be passed as
       the request body.
     jobId: The job id.
     location: The location which contains the job specified by job_id.
     projectId: The project id.
   """
 
-  getDebugConfigRequest = _messages.MessageField('GetDebugConfigRequest', 1)
+  get_debug_config_request = _messages.MessageField('GetDebugConfigRequest', 1)
   jobId = _messages.StringField(2, required=True)
   location = _messages.StringField(3, required=True)
   projectId = _messages.StringField(4, required=True)
 
 
 class DataflowProjectsLocationsJobsDebugSendCaptureRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsDebugSendCaptureRequest object.
+  r"""A DataflowProjectsLocationsJobsDebugSendCaptureRequest object.
 
   Fields:
     jobId: The job id.
     location: The location which contains the job specified by job_id.
     projectId: The project id.
-    sendDebugCaptureRequest: A SendDebugCaptureRequest resource to be passed
-      as the request body.
+    send_debug_capture_request: A SendDebugCaptureRequest resource to be
+      passed as the request body.
   """
 
   jobId = _messages.StringField(1, required=True)
   location = _messages.StringField(2, required=True)
   projectId = _messages.StringField(3, required=True)
-  sendDebugCaptureRequest = _messages.MessageField('SendDebugCaptureRequest', 4)
+  send_debug_capture_request = _messages.MessageField('SendDebugCaptureRequest', 4)
 
 
 class DataflowProjectsLocationsJobsGetMetricsRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsGetMetricsRequest object.
+  r"""A DataflowProjectsLocationsJobsGetMetricsRequest object.
 
   Fields:
     jobId: The job to get messages for.
@@ -953,7 +984,7 @@ class DataflowProjectsLocationsJobsGetMetricsRequest(_messages.Message):
 
 
 class DataflowProjectsLocationsJobsGetRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsGetRequest object.
+  r"""A DataflowProjectsLocationsJobsGetRequest object.
 
   Enums:
     ViewValueValuesEnum: The level of information requested in response.
@@ -966,7 +997,7 @@ class DataflowProjectsLocationsJobsGetRequest(_messages.Message):
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """The level of information requested in response.
+    r"""The level of information requested in response.
 
     Values:
       JOB_VIEW_UNKNOWN: <no description>
@@ -986,7 +1017,7 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsLocationsJobsListRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsListRequest object.
+  r"""A DataflowProjectsLocationsJobsListRequest object.
 
   Enums:
     FilterValueValuesEnum: The kind of filter to use.
@@ -1007,7 +1038,7 @@ class DataflowProjectsLocationsJobsListRequest(_messages.Message):
   """
 
   class FilterValueValuesEnum(_messages.Enum):
-    """The kind of filter to use.
+    r"""The kind of filter to use.
 
     Values:
       UNKNOWN: <no description>
@@ -1021,7 +1052,7 @@ class FilterValueValuesEnum(_messages.Enum):
     ACTIVE = 3
 
   class ViewValueValuesEnum(_messages.Enum):
-    """Level of information requested in response. Default is
+    r"""Level of information requested in response. Default is
     `JOB_VIEW_SUMMARY`.
 
     Values:
@@ -1044,7 +1075,7 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsLocationsJobsMessagesListRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsMessagesListRequest object.
+  r"""A DataflowProjectsLocationsJobsMessagesListRequest object.
 
   Enums:
     MinimumImportanceValueValuesEnum: Filter to only get messages with
@@ -1069,7 +1100,7 @@ class DataflowProjectsLocationsJobsMessagesListRequest(_messages.Message):
   """
 
   class MinimumImportanceValueValuesEnum(_messages.Enum):
-    """Filter to only get messages with importance >= level
+    r"""Filter to only get messages with importance >= level
 
     Values:
       JOB_MESSAGE_IMPORTANCE_UNKNOWN: <no description>
@@ -1097,7 +1128,7 @@ class MinimumImportanceValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsLocationsJobsUpdateRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsUpdateRequest object.
+  r"""A DataflowProjectsLocationsJobsUpdateRequest object.
 
   Fields:
     job: A Job resource to be passed as the request body.
@@ -1113,57 +1144,57 @@ class DataflowProjectsLocationsJobsUpdateRequest(_messages.Message):
 
 
 class DataflowProjectsLocationsJobsWorkItemsLeaseRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsWorkItemsLeaseRequest object.
+  r"""A DataflowProjectsLocationsJobsWorkItemsLeaseRequest object.
 
   Fields:
     jobId: Identifies the workflow job this worker belongs to.
-    leaseWorkItemRequest: A LeaseWorkItemRequest resource to be passed as the
-      request body.
+    lease_work_item_request: A LeaseWorkItemRequest resource to be passed as
+      the request body.
     location: The location which contains the WorkItem's job.
     projectId: Identifies the project this worker belongs to.
   """
 
   jobId = _messages.StringField(1, required=True)
-  leaseWorkItemRequest = _messages.MessageField('LeaseWorkItemRequest', 2)
+  lease_work_item_request = _messages.MessageField('LeaseWorkItemRequest', 2)
   location = _messages.StringField(3, required=True)
   projectId = _messages.StringField(4, required=True)
 
 
 class DataflowProjectsLocationsJobsWorkItemsReportStatusRequest(_messages.Message):
-  """A DataflowProjectsLocationsJobsWorkItemsReportStatusRequest object.
+  r"""A DataflowProjectsLocationsJobsWorkItemsReportStatusRequest object.
 
   Fields:
     jobId: The job which the WorkItem is part of.
     location: The location which contains the WorkItem's job.
     projectId: The project which owns the WorkItem's job.
-    reportWorkItemStatusRequest: A ReportWorkItemStatusRequest resource to be
-      passed as the request body.
+    report_work_item_status_request: A ReportWorkItemStatusRequest resource to
+      be passed as the request body.
   """
 
   jobId = _messages.StringField(1, required=True)
   location = _messages.StringField(2, required=True)
   projectId = _messages.StringField(3, required=True)
-  reportWorkItemStatusRequest = _messages.MessageField('ReportWorkItemStatusRequest', 4)
+  report_work_item_status_request = _messages.MessageField('ReportWorkItemStatusRequest', 4)
 
 
 class DataflowProjectsLocationsTemplatesCreateRequest(_messages.Message):
-  """A DataflowProjectsLocationsTemplatesCreateRequest object.
+  r"""A DataflowProjectsLocationsTemplatesCreateRequest object.
 
   Fields:
-    createJobFromTemplateRequest: A CreateJobFromTemplateRequest resource to
-      be passed as the request body.
+    create_job_from_template_request: A CreateJobFromTemplateRequest resource
+      to be passed as the request body.
     location: The location to which to direct the request.
     projectId: Required. The ID of the Cloud Platform project that the job
       belongs to.
   """
 
-  createJobFromTemplateRequest = _messages.MessageField('CreateJobFromTemplateRequest', 1)
+  create_job_from_template_request = _messages.MessageField('CreateJobFromTemplateRequest', 1)
   location = _messages.StringField(2, required=True)
   projectId = _messages.StringField(3, required=True)
 
 
 class DataflowProjectsLocationsTemplatesGetRequest(_messages.Message):
-  """A DataflowProjectsLocationsTemplatesGetRequest object.
+  r"""A DataflowProjectsLocationsTemplatesGetRequest object.
 
   Enums:
     ViewValueValuesEnum: The view to retrieve. Defaults to METADATA_ONLY.
@@ -1179,7 +1210,7 @@ class DataflowProjectsLocationsTemplatesGetRequest(_messages.Message):
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """The view to retrieve. Defaults to METADATA_ONLY.
+    r"""The view to retrieve. Defaults to METADATA_ONLY.
 
     Values:
       METADATA_ONLY: <no description>
@@ -1193,13 +1224,13 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsLocationsTemplatesLaunchRequest(_messages.Message):
-  """A DataflowProjectsLocationsTemplatesLaunchRequest object.
+  r"""A DataflowProjectsLocationsTemplatesLaunchRequest object.
 
   Fields:
     gcsPath: Required. A Cloud Storage path to the template from which to
       create the job. Must be valid Cloud Storage URL, beginning with 'gs://'.
-    launchTemplateParameters: A LaunchTemplateParameters resource to be passed
-      as the request body.
+    launch_template_parameters: A LaunchTemplateParameters resource to be
+      passed as the request body.
     location: The location to which to direct the request.
     projectId: Required. The ID of the Cloud Platform project that the job
       belongs to.
@@ -1208,43 +1239,43 @@ class DataflowProjectsLocationsTemplatesLaunchRequest(_messages.Message):
   """
 
   gcsPath = _messages.StringField(1)
-  launchTemplateParameters = _messages.MessageField('LaunchTemplateParameters', 2)
+  launch_template_parameters = _messages.MessageField('LaunchTemplateParameters', 2)
   location = _messages.StringField(3, required=True)
   projectId = _messages.StringField(4, required=True)
   validateOnly = _messages.BooleanField(5)
 
 
 class DataflowProjectsLocationsWorkerMessagesRequest(_messages.Message):
-  """A DataflowProjectsLocationsWorkerMessagesRequest object.
+  r"""A DataflowProjectsLocationsWorkerMessagesRequest object.
 
   Fields:
     location: The location which contains the job
     projectId: The project to send the WorkerMessages to.
-    sendWorkerMessagesRequest: A SendWorkerMessagesRequest resource to be
+    send_worker_messages_request: A SendWorkerMessagesRequest resource to be
       passed as the request body.
   """
 
   location = _messages.StringField(1, required=True)
   projectId = _messages.StringField(2, required=True)
-  sendWorkerMessagesRequest = _messages.MessageField('SendWorkerMessagesRequest', 3)
+  send_worker_messages_request = _messages.MessageField('SendWorkerMessagesRequest', 3)
 
 
 class DataflowProjectsTemplatesCreateRequest(_messages.Message):
-  """A DataflowProjectsTemplatesCreateRequest object.
+  r"""A DataflowProjectsTemplatesCreateRequest object.
 
   Fields:
-    createJobFromTemplateRequest: A CreateJobFromTemplateRequest resource to
-      be passed as the request body.
+    create_job_from_template_request: A CreateJobFromTemplateRequest resource
+      to be passed as the request body.
     projectId: Required. The ID of the Cloud Platform project that the job
       belongs to.
   """
 
-  createJobFromTemplateRequest = _messages.MessageField('CreateJobFromTemplateRequest', 1)
+  create_job_from_template_request = _messages.MessageField('CreateJobFromTemplateRequest', 1)
   projectId = _messages.StringField(2, required=True)
 
 
 class DataflowProjectsTemplatesGetRequest(_messages.Message):
-  """A DataflowProjectsTemplatesGetRequest object.
+  r"""A DataflowProjectsTemplatesGetRequest object.
 
   Enums:
     ViewValueValuesEnum: The view to retrieve. Defaults to METADATA_ONLY.
@@ -1260,7 +1291,7 @@ class DataflowProjectsTemplatesGetRequest(_messages.Message):
   """
 
   class ViewValueValuesEnum(_messages.Enum):
-    """The view to retrieve. Defaults to METADATA_ONLY.
+    r"""The view to retrieve. Defaults to METADATA_ONLY.
 
     Values:
       METADATA_ONLY: <no description>
@@ -1274,13 +1305,13 @@ class ViewValueValuesEnum(_messages.Enum):
 
 
 class DataflowProjectsTemplatesLaunchRequest(_messages.Message):
-  """A DataflowProjectsTemplatesLaunchRequest object.
+  r"""A DataflowProjectsTemplatesLaunchRequest object.
 
   Fields:
     gcsPath: Required. A Cloud Storage path to the template from which to
       create the job. Must be valid Cloud Storage URL, beginning with 'gs://'.
-    launchTemplateParameters: A LaunchTemplateParameters resource to be passed
-      as the request body.
+    launch_template_parameters: A LaunchTemplateParameters resource to be
+      passed as the request body.
     location: The location to which to direct the request.
     projectId: Required. The ID of the Cloud Platform project that the job
       belongs to.
@@ -1289,27 +1320,39 @@ class DataflowProjectsTemplatesLaunchRequest(_messages.Message):
   """
 
   gcsPath = _messages.StringField(1)
-  launchTemplateParameters = _messages.MessageField('LaunchTemplateParameters', 2)
+  launch_template_parameters = _messages.MessageField('LaunchTemplateParameters', 2)
   location = _messages.StringField(3)
   projectId = _messages.StringField(4, required=True)
   validateOnly = _messages.BooleanField(5)
 
 
 class DataflowProjectsWorkerMessagesRequest(_messages.Message):
-  """A DataflowProjectsWorkerMessagesRequest object.
+  r"""A DataflowProjectsWorkerMessagesRequest object.
 
   Fields:
     projectId: The project to send the WorkerMessages to.
-    sendWorkerMessagesRequest: A SendWorkerMessagesRequest resource to be
+    send_worker_messages_request: A SendWorkerMessagesRequest resource to be
       passed as the request body.
   """
 
   projectId = _messages.StringField(1, required=True)
-  sendWorkerMessagesRequest = _messages.MessageField('SendWorkerMessagesRequest', 2)
+  send_worker_messages_request = _messages.MessageField('SendWorkerMessagesRequest', 2)
+
+
+class DatastoreIODetails(_messages.Message):
+  r"""Metadata for a Datastore connector used by the job.
+
+  Fields:
+    namespace: Namespace used in the connection.
+    projectId: ProjectId accessed in the connection.
+  """
+
+  namespace = _messages.StringField(1)
+  projectId = _messages.StringField(2)
 
 
 class DerivedSource(_messages.Message):
-  """Specification of one of the bundles produced as a result of splitting a
+  r"""Specification of one of the bundles produced as a result of splitting a
   Source (e.g. when executing a SourceSplitRequest, or when splitting an
   active task using WorkItemStatus.dynamic_source_split), relative to the
   source being split.
@@ -1324,7 +1367,7 @@ class DerivedSource(_messages.Message):
   """
 
   class DerivationModeValueValuesEnum(_messages.Enum):
-    """What source to base the produced source on (if any).
+    r"""What source to base the produced source on (if any).
 
     Values:
       SOURCE_DERIVATION_MODE_UNKNOWN: The source derivation is unknown, or
@@ -1346,7 +1389,7 @@ class DerivationModeValueValuesEnum(_messages.Enum):
 
 
 class Disk(_messages.Message):
-  """Describes the data disk used by a workflow job.
+  r"""Describes the data disk used by a workflow job.
 
   Fields:
     diskType: Disk storage type, as defined by Google Compute Engine.  This
@@ -1374,7 +1417,7 @@ class Disk(_messages.Message):
 
 
 class DisplayData(_messages.Message):
-  """Data provided with a pipeline or transform to provide descriptive info.
+  r"""Data provided with a pipeline or transform to provide descriptive info.
 
   Fields:
     boolValue: Contains value if the data is of a boolean type.
@@ -1414,7 +1457,7 @@ class DisplayData(_messages.Message):
 
 
 class DistributionUpdate(_messages.Message):
-  """A metric value representing a distribution.
+  r"""A metric value representing a distribution.
 
   Fields:
     count: The count of the number of elements present in the distribution.
@@ -1436,7 +1479,7 @@ class DistributionUpdate(_messages.Message):
 
 
 class DynamicSourceSplit(_messages.Message):
-  """When a task splits using WorkItemStatus.dynamic_source_split, this
+  r"""When a task splits using WorkItemStatus.dynamic_source_split, this
   message describes the two parts of the split relative to the description of
   the current task's input.
 
@@ -1452,7 +1495,7 @@ class DynamicSourceSplit(_messages.Message):
 
 
 class Environment(_messages.Message):
-  """Describes the environment in which a Dataflow Job runs.
+  r"""Describes the environment in which a Dataflow Job runs.
 
   Messages:
     InternalExperimentsValue: Experimental settings.
@@ -1498,7 +1541,7 @@ class Environment(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class InternalExperimentsValue(_messages.Message):
-    """Experimental settings.
+    r"""Experimental settings.
 
     Messages:
       AdditionalProperty: An additional property for a
@@ -1510,7 +1553,7 @@ class InternalExperimentsValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a InternalExperimentsValue object.
+      r"""An additional property for a InternalExperimentsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1524,7 +1567,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class SdkPipelineOptionsValue(_messages.Message):
-    """The Cloud Dataflow SDK pipeline options specified by the user. These
+    r"""The Cloud Dataflow SDK pipeline options specified by the user. These
     options are passed through the service and are used to recreate the SDK
     pipeline options on the worker in a language agnostic and platform
     independent way.
@@ -1538,7 +1581,7 @@ class SdkPipelineOptionsValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a SdkPipelineOptionsValue object.
+      r"""An additional property for a SdkPipelineOptionsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1552,7 +1595,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class UserAgentValue(_messages.Message):
-    """A description of the process that generated the request.
+    r"""A description of the process that generated the request.
 
     Messages:
       AdditionalProperty: An additional property for a UserAgentValue object.
@@ -1562,7 +1605,7 @@ class UserAgentValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a UserAgentValue object.
+      r"""An additional property for a UserAgentValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1576,7 +1619,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class VersionValue(_messages.Message):
-    """A structure describing which components and their versions of the
+    r"""A structure describing which components and their versions of the
     service are required in order to run the job.
 
     Messages:
@@ -1587,7 +1630,7 @@ class VersionValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a VersionValue object.
+      r"""An additional property for a VersionValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1612,7 +1655,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class ExecutionStageState(_messages.Message):
-  """A message describing the state of a particular execution stage.
+  r"""A message describing the state of a particular execution stage.
 
   Enums:
     ExecutionStageStateValueValuesEnum: Executions stage states allow the same
@@ -1626,7 +1669,7 @@ class ExecutionStageState(_messages.Message):
   """
 
   class ExecutionStageStateValueValuesEnum(_messages.Enum):
-    """Executions stage states allow the same set of values as JobState.
+    r"""Executions stage states allow the same set of values as JobState.
 
     Values:
       JOB_STATE_UNKNOWN: The job's run state isn't specified.
@@ -1664,13 +1707,16 @@ class ExecutionStageStateValueValuesEnum(_messages.Enum):
         was requested. This state is a terminal state, may only be set by the
         Cloud Dataflow service, and only as a transition from
         `JOB_STATE_DRAINING`.
-      JOB_STATE_PENDING: 'JOB_STATE_PENDING' indicates that the job has been
+      JOB_STATE_PENDING: `JOB_STATE_PENDING` indicates that the job has been
         created but is not yet running.  Jobs that are pending may only
         transition to `JOB_STATE_RUNNING`, or `JOB_STATE_FAILED`.
-      JOB_STATE_CANCELLING: 'JOB_STATE_CANCELLING' indicates that the job has
+      JOB_STATE_CANCELLING: `JOB_STATE_CANCELLING` indicates that the job has
         been explicitly cancelled and is in the process of stopping.  Jobs
-        that are cancelling may only transition to 'JOB_STATE_CANCELLED' or
-        'JOB_STATE_FAILED'.
+        that are cancelling may only transition to `JOB_STATE_CANCELLED` or
+        `JOB_STATE_FAILED`.
+      JOB_STATE_QUEUED: `JOB_STATE_QUEUED` indicates that the job has been
+        created but is being delayed until launch. Jobs that are queued may
+        only transition to `JOB_STATE_PENDING` or `JOB_STATE_CANCELLED`.
     """
     JOB_STATE_UNKNOWN = 0
     JOB_STATE_STOPPED = 1
@@ -1683,6 +1729,7 @@ class ExecutionStageStateValueValuesEnum(_messages.Enum):
     JOB_STATE_DRAINED = 8
     JOB_STATE_PENDING = 9
     JOB_STATE_CANCELLING = 10
+    JOB_STATE_QUEUED = 11
 
   currentStateTime = _messages.StringField(1)
   executionStageName = _messages.StringField(2)
@@ -1690,7 +1737,7 @@ class ExecutionStageStateValueValuesEnum(_messages.Enum):
 
 
 class ExecutionStageSummary(_messages.Message):
-  """Description of the composing transforms, names/ids, and input/outputs of
+  r"""Description of the composing transforms, names/ids, and input/outputs of
   a stage of execution.  Some composing transforms and sources may have been
   generated by the Dataflow service during execution planning.
 
@@ -1709,7 +1756,7 @@ class ExecutionStageSummary(_messages.Message):
   """
 
   class KindValueValuesEnum(_messages.Enum):
-    """Type of tranform this stage is executing.
+    r"""Type of tranform this stage is executing.
 
     Values:
       UNKNOWN_KIND: Unrecognized transform type.
@@ -1743,7 +1790,7 @@ class KindValueValuesEnum(_messages.Enum):
 
 
 class FailedLocation(_messages.Message):
-  """Indicates which location failed to respond to a request for data.
+  r"""Indicates which location failed to respond to a request for data.
 
   Fields:
     name: The name of the failed location.
@@ -1752,8 +1799,18 @@ class FailedLocation(_messages.Message):
   name = _messages.StringField(1)
 
 
+class FileIODetails(_messages.Message):
+  r"""Metadata for a File connector used by the job.
+
+  Fields:
+    filePattern: File Pattern used to access files by the connector.
+  """
+
+  filePattern = _messages.StringField(1)
+
+
 class FlattenInstruction(_messages.Message):
-  """An instruction that copies its inputs (zero or more) to its (single)
+  r"""An instruction that copies its inputs (zero or more) to its (single)
   output.
 
   Fields:
@@ -1764,7 +1821,7 @@ class FlattenInstruction(_messages.Message):
 
 
 class FloatingPointList(_messages.Message):
-  """A metric value representing a list of floating point numbers.
+  r"""A metric value representing a list of floating point numbers.
 
   Fields:
     elements: Elements of the list.
@@ -1774,7 +1831,7 @@ class FloatingPointList(_messages.Message):
 
 
 class FloatingPointMean(_messages.Message):
-  """A representation of a floating point mean metric contribution.
+  r"""A representation of a floating point mean metric contribution.
 
   Fields:
     count: The number of values being aggregated.
@@ -1786,7 +1843,7 @@ class FloatingPointMean(_messages.Message):
 
 
 class GetDebugConfigRequest(_messages.Message):
-  """Request to get updated debug configuration for component.
+  r"""Request to get updated debug configuration for component.
 
   Fields:
     componentId: The internal component id for which debug configuration is
@@ -1801,7 +1858,7 @@ class GetDebugConfigRequest(_messages.Message):
 
 
 class GetDebugConfigResponse(_messages.Message):
-  """Response to a get debug configuration request.
+  r"""Response to a get debug configuration request.
 
   Fields:
     config: The encoded debug configuration for the requested component.
@@ -1811,7 +1868,7 @@ class GetDebugConfigResponse(_messages.Message):
 
 
 class GetTemplateResponse(_messages.Message):
-  """The response to a GetTemplate request.
+  r"""The response to a GetTemplate request.
 
   Fields:
     metadata: The template metadata describing the template name, available
@@ -1825,7 +1882,7 @@ class GetTemplateResponse(_messages.Message):
 
 
 class Histogram(_messages.Message):
-  """Histogram of value counts for a distribution.  Buckets have an inclusive
+  r"""Histogram of value counts for a distribution.  Buckets have an inclusive
   lower bound and exclusive upper bound and use "1,2,5 bucketing": The first
   bucket range is from [0,1) and all subsequent bucket boundaries are powers
   of ten multiplied by 1, 2, or 5. Thus, bucket boundaries are 0, 1, 2, 5, 10,
@@ -1846,7 +1903,7 @@ class Histogram(_messages.Message):
 
 
 class InstructionInput(_messages.Message):
-  """An input of an instruction, as a reference to an output of a producer
+  r"""An input of an instruction, as a reference to an output of a producer
   instruction.
 
   Fields:
@@ -1862,7 +1919,7 @@ class InstructionInput(_messages.Message):
 
 
 class InstructionOutput(_messages.Message):
-  """An output of an instruction.
+  r"""An output of an instruction.
 
   Messages:
     CodecValue: The codec to use to encode data being written via this output.
@@ -1883,7 +1940,7 @@ class InstructionOutput(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class CodecValue(_messages.Message):
-    """The codec to use to encode data being written via this output.
+    r"""The codec to use to encode data being written via this output.
 
     Messages:
       AdditionalProperty: An additional property for a CodecValue object.
@@ -1893,7 +1950,7 @@ class CodecValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a CodecValue object.
+      r"""An additional property for a CodecValue object.
 
       Fields:
         key: Name of the additional property.
@@ -1914,7 +1971,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class IntegerGauge(_messages.Message):
-  """A metric value representing temporal values of a variable.
+  r"""A metric value representing temporal values of a variable.
 
   Fields:
     timestamp: The time at which this value was measured. Measured as msecs
@@ -1927,7 +1984,7 @@ class IntegerGauge(_messages.Message):
 
 
 class IntegerList(_messages.Message):
-  """A metric value representing a list of integers.
+  r"""A metric value representing a list of integers.
 
   Fields:
     elements: Elements of the list.
@@ -1937,7 +1994,7 @@ class IntegerList(_messages.Message):
 
 
 class IntegerMean(_messages.Message):
-  """A representation of an integer mean metric contribution.
+  r"""A representation of an integer mean metric contribution.
 
   Fields:
     count: The number of values being aggregated.
@@ -1949,7 +2006,7 @@ class IntegerMean(_messages.Message):
 
 
 class Job(_messages.Message):
-  """Defines a job to be run by the Cloud Dataflow service.
+  r"""Defines a job to be run by the Cloud Dataflow service.
 
   Enums:
     CurrentStateValueValuesEnum: The current state of the job.  Jobs are
@@ -1998,6 +2055,9 @@ class Job(_messages.Message):
     id: The unique ID of this job.  This field is set by the Cloud Dataflow
       service when the Job is created, and is immutable for the life of the
       job.
+    jobMetadata: This field is populated by the Dataflow service to support
+      filtering jobs by the metadata values provided here. Populated for
+      ListJobs and all GetJob views SUMMARY and higher.
     labels: User-defined labels for this job.  The labels map can contain no
       more than 64 entries.  Entries of the labels map are UTF8 strings that
       comply with the following restrictions:  * Keys must conform to regexp:
@@ -2042,7 +2102,7 @@ class Job(_messages.Message):
   """
 
   class CurrentStateValueValuesEnum(_messages.Enum):
-    """The current state of the job.  Jobs are created in the
+    r"""The current state of the job.  Jobs are created in the
     `JOB_STATE_STOPPED` state unless otherwise specified.  A job in the
     `JOB_STATE_RUNNING` state may asynchronously enter a terminal state. After
     a job has reached a terminal state, no further state updates may be made.
@@ -2085,13 +2145,16 @@ class CurrentStateValueValuesEnum(_messages.Enum):
         was requested. This state is a terminal state, may only be set by the
         Cloud Dataflow service, and only as a transition from
         `JOB_STATE_DRAINING`.
-      JOB_STATE_PENDING: 'JOB_STATE_PENDING' indicates that the job has been
+      JOB_STATE_PENDING: `JOB_STATE_PENDING` indicates that the job has been
         created but is not yet running.  Jobs that are pending may only
         transition to `JOB_STATE_RUNNING`, or `JOB_STATE_FAILED`.
-      JOB_STATE_CANCELLING: 'JOB_STATE_CANCELLING' indicates that the job has
+      JOB_STATE_CANCELLING: `JOB_STATE_CANCELLING` indicates that the job has
         been explicitly cancelled and is in the process of stopping.  Jobs
-        that are cancelling may only transition to 'JOB_STATE_CANCELLED' or
-        'JOB_STATE_FAILED'.
+        that are cancelling may only transition to `JOB_STATE_CANCELLED` or
+        `JOB_STATE_FAILED`.
+      JOB_STATE_QUEUED: `JOB_STATE_QUEUED` indicates that the job has been
+        created but is being delayed until launch. Jobs that are queued may
+        only transition to `JOB_STATE_PENDING` or `JOB_STATE_CANCELLED`.
     """
     JOB_STATE_UNKNOWN = 0
     JOB_STATE_STOPPED = 1
@@ -2104,9 +2167,10 @@ class CurrentStateValueValuesEnum(_messages.Enum):
     JOB_STATE_DRAINED = 8
     JOB_STATE_PENDING = 9
     JOB_STATE_CANCELLING = 10
+    JOB_STATE_QUEUED = 11
 
   class RequestedStateValueValuesEnum(_messages.Enum):
-    """The job's requested state.  `UpdateJob` may be used to switch between
+    r"""The job's requested state.  `UpdateJob` may be used to switch between
     the `JOB_STATE_STOPPED` and `JOB_STATE_RUNNING` states, by setting
     requested_state.  `UpdateJob` may also be used to directly set a job's
     requested state to `JOB_STATE_CANCELLED` or `JOB_STATE_DONE`, irrevocably
@@ -2148,13 +2212,16 @@ class RequestedStateValueValuesEnum(_messages.Enum):
         was requested. This state is a terminal state, may only be set by the
         Cloud Dataflow service, and only as a transition from
         `JOB_STATE_DRAINING`.
-      JOB_STATE_PENDING: 'JOB_STATE_PENDING' indicates that the job has been
+      JOB_STATE_PENDING: `JOB_STATE_PENDING` indicates that the job has been
         created but is not yet running.  Jobs that are pending may only
         transition to `JOB_STATE_RUNNING`, or `JOB_STATE_FAILED`.
-      JOB_STATE_CANCELLING: 'JOB_STATE_CANCELLING' indicates that the job has
+      JOB_STATE_CANCELLING: `JOB_STATE_CANCELLING` indicates that the job has
         been explicitly cancelled and is in the process of stopping.  Jobs
-        that are cancelling may only transition to 'JOB_STATE_CANCELLED' or
-        'JOB_STATE_FAILED'.
+        that are cancelling may only transition to `JOB_STATE_CANCELLED` or
+        `JOB_STATE_FAILED`.
+      JOB_STATE_QUEUED: `JOB_STATE_QUEUED` indicates that the job has been
+        created but is being delayed until launch. Jobs that are queued may
+        only transition to `JOB_STATE_PENDING` or `JOB_STATE_CANCELLED`.
     """
     JOB_STATE_UNKNOWN = 0
     JOB_STATE_STOPPED = 1
@@ -2167,9 +2234,10 @@ class RequestedStateValueValuesEnum(_messages.Enum):
     JOB_STATE_DRAINED = 8
     JOB_STATE_PENDING = 9
     JOB_STATE_CANCELLING = 10
+    JOB_STATE_QUEUED = 11
 
   class TypeValueValuesEnum(_messages.Enum):
-    """The type of Cloud Dataflow job.
+    r"""The type of Cloud Dataflow job.
 
     Values:
       JOB_TYPE_UNKNOWN: The type of the job is unspecified, or unknown.
@@ -2184,7 +2252,7 @@ class TypeValueValuesEnum(_messages.Enum):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class LabelsValue(_messages.Message):
-    """User-defined labels for this job.  The labels map can contain no more
+    r"""User-defined labels for this job.  The labels map can contain no more
     than 64 entries.  Entries of the labels map are UTF8 strings that comply
     with the following restrictions:  * Keys must conform to regexp:
     \p{Ll}\p{Lo}{0,62} * Values must conform to regexp:
@@ -2199,7 +2267,7 @@ class LabelsValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a LabelsValue object.
+      r"""An additional property for a LabelsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2213,7 +2281,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class TransformNameMappingValue(_messages.Message):
-    """The map of transform name prefixes of the job to be replaced to the
+    r"""The map of transform name prefixes of the job to be replaced to the
     corresponding name prefixes of the new job.
 
     Messages:
@@ -2226,7 +2294,7 @@ class TransformNameMappingValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a TransformNameMappingValue object.
+      r"""An additional property for a TransformNameMappingValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2245,23 +2313,24 @@ class AdditionalProperty(_messages.Message):
   environment = _messages.MessageField('Environment', 5)
   executionInfo = _messages.MessageField('JobExecutionInfo', 6)
   id = _messages.StringField(7)
-  labels = _messages.MessageField('LabelsValue', 8)
-  location = _messages.StringField(9)
-  name = _messages.StringField(10)
-  pipelineDescription = _messages.MessageField('PipelineDescription', 11)
-  projectId = _messages.StringField(12)
-  replaceJobId = _messages.StringField(13)
-  replacedByJobId = _messages.StringField(14)
-  requestedState = _messages.EnumField('RequestedStateValueValuesEnum', 15)
-  stageStates = _messages.MessageField('ExecutionStageState', 16, repeated=True)
-  steps = _messages.MessageField('Step', 17, repeated=True)
-  tempFiles = _messages.StringField(18, repeated=True)
-  transformNameMapping = _messages.MessageField('TransformNameMappingValue', 19)
-  type = _messages.EnumField('TypeValueValuesEnum', 20)
+  jobMetadata = _messages.MessageField('JobMetadata', 8)
+  labels = _messages.MessageField('LabelsValue', 9)
+  location = _messages.StringField(10)
+  name = _messages.StringField(11)
+  pipelineDescription = _messages.MessageField('PipelineDescription', 12)
+  projectId = _messages.StringField(13)
+  replaceJobId = _messages.StringField(14)
+  replacedByJobId = _messages.StringField(15)
+  requestedState = _messages.EnumField('RequestedStateValueValuesEnum', 16)
+  stageStates = _messages.MessageField('ExecutionStageState', 17, repeated=True)
+  steps = _messages.MessageField('Step', 18, repeated=True)
+  tempFiles = _messages.StringField(19, repeated=True)
+  transformNameMapping = _messages.MessageField('TransformNameMappingValue', 20)
+  type = _messages.EnumField('TypeValueValuesEnum', 21)
 
 
 class JobExecutionInfo(_messages.Message):
-  """Additional information about how a Cloud Dataflow job will be executed
+  r"""Additional information about how a Cloud Dataflow job will be executed
   that isn't contained in the submitted job.
 
   Messages:
@@ -2274,7 +2343,7 @@ class JobExecutionInfo(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class StagesValue(_messages.Message):
-    """A mapping from each stage to the information about that stage.
+    r"""A mapping from each stage to the information about that stage.
 
     Messages:
       AdditionalProperty: An additional property for a StagesValue object.
@@ -2284,7 +2353,7 @@ class StagesValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a StagesValue object.
+      r"""An additional property for a StagesValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2300,7 +2369,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class JobExecutionStageInfo(_messages.Message):
-  """Contains information about how a particular google.dataflow.v1beta3.Step
+  r"""Contains information about how a particular google.dataflow.v1beta3.Step
   will be executed.
 
   Fields:
@@ -2313,7 +2382,7 @@ class JobExecutionStageInfo(_messages.Message):
 
 
 class JobMessage(_messages.Message):
-  """A particular message pertaining to a Dataflow job.
+  r"""A particular message pertaining to a Dataflow job.
 
   Enums:
     MessageImportanceValueValuesEnum: Importance level of the message.
@@ -2326,7 +2395,7 @@ class JobMessage(_messages.Message):
   """
 
   class MessageImportanceValueValuesEnum(_messages.Enum):
-    """Importance level of the message.
+    r"""Importance level of the message.
 
     Values:
       JOB_MESSAGE_IMPORTANCE_UNKNOWN: The message importance isn't specified,
@@ -2367,8 +2436,35 @@ class MessageImportanceValueValuesEnum(_messages.Enum):
   time = _messages.StringField(4)
 
 
+class JobMetadata(_messages.Message):
+  r"""Metadata available primarily for filtering jobs. Will be included in the
+  ListJob response and Job SUMMARY view+.
+
+  Fields:
+    bigTableDetails: Identification of a BigTable source used in the Dataflow
+      job.
+    bigqueryDetails: Identification of a BigQuery source used in the Dataflow
+      job.
+    datastoreDetails: Identification of a Datastore source used in the
+      Dataflow job.
+    fileDetails: Identification of a File source used in the Dataflow job.
+    pubsubDetails: Identification of a PubSub source used in the Dataflow job.
+    sdkVersion: The SDK version used to run the job.
+    spannerDetails: Identification of a Spanner source used in the Dataflow
+      job.
+  """
+
+  bigTableDetails = _messages.MessageField('BigTableIODetails', 1, repeated=True)
+  bigqueryDetails = _messages.MessageField('BigQueryIODetails', 2, repeated=True)
+  datastoreDetails = _messages.MessageField('DatastoreIODetails', 3, repeated=True)
+  fileDetails = _messages.MessageField('FileIODetails', 4, repeated=True)
+  pubsubDetails = _messages.MessageField('PubSubIODetails', 5, repeated=True)
+  sdkVersion = _messages.MessageField('SdkVersion', 6)
+  spannerDetails = _messages.MessageField('SpannerIODetails', 7, repeated=True)
+
+
 class JobMetrics(_messages.Message):
-  """JobMetrics contains a collection of metrics descibing the detailed
+  r"""JobMetrics contains a collection of metrics descibing the detailed
   progress of a Dataflow job. Metrics correspond to user-defined and system-
   defined metrics in the job.  This resource captures only the most recent
   values of each metric; time-series data can be queried for them (under the
@@ -2384,7 +2480,7 @@ class JobMetrics(_messages.Message):
 
 
 class KeyRangeDataDiskAssignment(_messages.Message):
-  """Data disk assignment information for a specific key-range of a sharded
+  r"""Data disk assignment information for a specific key-range of a sharded
   computation. Currently we only support UTF-8 character splits to simplify
   encoding into JSON.
 
@@ -2403,7 +2499,7 @@ class KeyRangeDataDiskAssignment(_messages.Message):
 
 
 class KeyRangeLocation(_messages.Message):
-  """Location information for a specific key-range of a sharded computation.
+  r"""Location information for a specific key-range of a sharded computation.
   Currently we only support UTF-8 character splits to simplify encoding into
   JSON.
 
@@ -2429,7 +2525,7 @@ class KeyRangeLocation(_messages.Message):
 
 
 class LaunchTemplateParameters(_messages.Message):
-  """Parameters to provide to the template being launched.
+  r"""Parameters to provide to the template being launched.
 
   Messages:
     ParametersValue: The runtime parameters to pass to the job.
@@ -2442,7 +2538,7 @@ class LaunchTemplateParameters(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ParametersValue(_messages.Message):
-    """The runtime parameters to pass to the job.
+    r"""The runtime parameters to pass to the job.
 
     Messages:
       AdditionalProperty: An additional property for a ParametersValue object.
@@ -2452,7 +2548,7 @@ class ParametersValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ParametersValue object.
+      r"""An additional property for a ParametersValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2470,7 +2566,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class LaunchTemplateResponse(_messages.Message):
-  """Response to the request to launch a template.
+  r"""Response to the request to launch a template.
 
   Fields:
     job: The job that was launched, if the request was not a dry run and the
@@ -2481,7 +2577,7 @@ class LaunchTemplateResponse(_messages.Message):
 
 
 class LeaseWorkItemRequest(_messages.Message):
-  """Request to lease WorkItems.
+  r"""Request to lease WorkItems.
 
   Fields:
     currentWorkerTime: The current timestamp at the worker.
@@ -2503,7 +2599,7 @@ class LeaseWorkItemRequest(_messages.Message):
 
 
 class LeaseWorkItemResponse(_messages.Message):
-  """Response to a request to lease WorkItems.
+  r"""Response to a request to lease WorkItems.
 
   Fields:
     workItems: A list of the leased WorkItems.
@@ -2513,7 +2609,7 @@ class LeaseWorkItemResponse(_messages.Message):
 
 
 class ListJobMessagesResponse(_messages.Message):
-  """Response to a request to list job messages.
+  r"""Response to a request to list job messages.
 
   Fields:
     autoscalingEvents: Autoscaling events in ascending timestamp order.
@@ -2528,8 +2624,8 @@ class ListJobMessagesResponse(_messages.Message):
 
 
 class ListJobsResponse(_messages.Message):
-  """Response to a request to list Cloud Dataflow jobs.  This may be a partial
-  response, depending on the page size in the ListJobsRequest.
+  r"""Response to a request to list Cloud Dataflow jobs.  This may be a
+  partial response, depending on the page size in the ListJobsRequest.
 
   Fields:
     failedLocation: Zero or more messages describing locations that failed to
@@ -2544,7 +2640,7 @@ class ListJobsResponse(_messages.Message):
 
 
 class MapTask(_messages.Message):
-  """MapTask consists of an ordered set of instructions, each of which
+  r"""MapTask consists of an ordered set of instructions, each of which
   describes one particular low-level operation for the worker to perform in
   order to accomplish the MapTask's WorkItem.  Each instruction must appear in
   the list before any instructions which depends on its output.
@@ -2563,7 +2659,7 @@ class MapTask(_messages.Message):
 
 
 class MetricShortId(_messages.Message):
-  """The metric short id is returned to the user alongside an offset into
+  r"""The metric short id is returned to the user alongside an offset into
   ReportWorkItemStatusRequest
 
   Fields:
@@ -2577,7 +2673,8 @@ class MetricShortId(_messages.Message):
 
 
 class MetricStructuredName(_messages.Message):
-  """Identifies a metric, by describing the source which generated the metric.
+  r"""Identifies a metric, by describing the source which generated the
+  metric.
 
   Messages:
     ContextValue: Zero or more labeled fields which identify the part of the
@@ -2602,7 +2699,7 @@ class MetricStructuredName(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ContextValue(_messages.Message):
-    """Zero or more labeled fields which identify the part of the job this
+    r"""Zero or more labeled fields which identify the part of the job this
     metric is associated with, such as the name of a step or collection.  For
     example, built-in counters associated with steps will have context['step']
     = <step-name>. Counters associated with PCollections in the SDK will have
@@ -2616,7 +2713,7 @@ class ContextValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ContextValue object.
+      r"""An additional property for a ContextValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2634,7 +2731,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class MetricUpdate(_messages.Message):
-  """Describes the state of a metric.
+  r"""Describes the state of a metric.
 
   Fields:
     cumulative: True if this metric is reported as the total cumulative
@@ -2643,6 +2740,9 @@ class MetricUpdate(_messages.Message):
       reported as a delta that is not associated with any WorkItem.
     distribution: A struct value describing properties of a distribution of
       numeric values.
+    gauge: A struct value describing properties of a Gauge. Metrics of gauge
+      type show the value of a metric across time, and is aggregated based on
+      the newest value.
     internal: Worker-computed aggregate value for internal use by the Dataflow
       service.
     kind: Metric aggregation kind.  The possible metric aggregation kinds are
@@ -2672,18 +2772,19 @@ class MetricUpdate(_messages.Message):
 
   cumulative = _messages.BooleanField(1)
   distribution = _messages.MessageField('extra_types.JsonValue', 2)
-  internal = _messages.MessageField('extra_types.JsonValue', 3)
-  kind = _messages.StringField(4)
-  meanCount = _messages.MessageField('extra_types.JsonValue', 5)
-  meanSum = _messages.MessageField('extra_types.JsonValue', 6)
-  name = _messages.MessageField('MetricStructuredName', 7)
-  scalar = _messages.MessageField('extra_types.JsonValue', 8)
-  set = _messages.MessageField('extra_types.JsonValue', 9)
-  updateTime = _messages.StringField(10)
+  gauge = _messages.MessageField('extra_types.JsonValue', 3)
+  internal = _messages.MessageField('extra_types.JsonValue', 4)
+  kind = _messages.StringField(5)
+  meanCount = _messages.MessageField('extra_types.JsonValue', 6)
+  meanSum = _messages.MessageField('extra_types.JsonValue', 7)
+  name = _messages.MessageField('MetricStructuredName', 8)
+  scalar = _messages.MessageField('extra_types.JsonValue', 9)
+  set = _messages.MessageField('extra_types.JsonValue', 10)
+  updateTime = _messages.StringField(11)
 
 
 class MountedDataDisk(_messages.Message):
-  """Describes mounted data disk.
+  r"""Describes mounted data disk.
 
   Fields:
     dataDisk: The name of the data disk. This name is local to the Google
@@ -2695,7 +2796,7 @@ class MountedDataDisk(_messages.Message):
 
 
 class MultiOutputInfo(_messages.Message):
-  """Information about an output of a multi-output DoFn.
+  r"""Information about an output of a multi-output DoFn.
 
   Fields:
     tag: The id of the tag the user code will emit to this output by; this
@@ -2706,7 +2807,7 @@ class MultiOutputInfo(_messages.Message):
 
 
 class NameAndKind(_messages.Message):
-  """Basic metadata about a counter.
+  r"""Basic metadata about a counter.
 
   Enums:
     KindValueValuesEnum: Counter aggregation kind.
@@ -2717,7 +2818,7 @@ class NameAndKind(_messages.Message):
   """
 
   class KindValueValuesEnum(_messages.Enum):
-    """Counter aggregation kind.
+    r"""Counter aggregation kind.
 
     Values:
       INVALID: Counter aggregation kind was not set.
@@ -2749,7 +2850,7 @@ class KindValueValuesEnum(_messages.Enum):
 
 
 class Package(_messages.Message):
-  """The packages that must be installed in order for a worker to run the
+  r"""The packages that must be installed in order for a worker to run the
   steps of the Cloud Dataflow job that will be assigned to its worker pool.
   This is the mechanism by which the Cloud Dataflow SDK causes code to be
   loaded onto the workers. For example, the Cloud Dataflow Java SDK might use
@@ -2769,8 +2870,8 @@ class Package(_messages.Message):
 
 
 class ParDoInstruction(_messages.Message):
-  """An instruction that does a ParDo operation. Takes one main input and zero
-  or more side inputs, and produces zero or more outputs. Runs user code.
+  r"""An instruction that does a ParDo operation. Takes one main input and
+  zero or more side inputs, and produces zero or more outputs. Runs user code.
 
   Messages:
     UserFnValue: The user function to invoke.
@@ -2786,7 +2887,7 @@ class ParDoInstruction(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class UserFnValue(_messages.Message):
-    """The user function to invoke.
+    r"""The user function to invoke.
 
     Messages:
       AdditionalProperty: An additional property for a UserFnValue object.
@@ -2796,7 +2897,7 @@ class UserFnValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a UserFnValue object.
+      r"""An additional property for a UserFnValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2816,7 +2917,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class ParallelInstruction(_messages.Message):
-  """Describes a particular operation comprising a MapTask.
+  r"""Describes a particular operation comprising a MapTask.
 
   Fields:
     flatten: Additional information for Flatten instructions.
@@ -2845,7 +2946,7 @@ class ParallelInstruction(_messages.Message):
 
 
 class Parameter(_messages.Message):
-  """Structured data associated with this message.
+  r"""Structured data associated with this message.
 
   Fields:
     key: Key or name for this parameter.
@@ -2857,7 +2958,7 @@ class Parameter(_messages.Message):
 
 
 class ParameterMetadata(_messages.Message):
-  """Metadata for a specific parameter.
+  r"""Metadata for a specific parameter.
 
   Fields:
     helpText: Required. The help text to display for the parameter.
@@ -2876,7 +2977,7 @@ class ParameterMetadata(_messages.Message):
 
 
 class PartialGroupByKeyInstruction(_messages.Message):
-  """An instruction that does a partial group-by-key. One input and one
+  r"""An instruction that does a partial group-by-key. One input and one
   output.
 
   Messages:
@@ -2900,7 +3001,7 @@ class PartialGroupByKeyInstruction(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class InputElementCodecValue(_messages.Message):
-    """The codec to use for interpreting an element in the input PTable.
+    r"""The codec to use for interpreting an element in the input PTable.
 
     Messages:
       AdditionalProperty: An additional property for a InputElementCodecValue
@@ -2911,7 +3012,7 @@ class InputElementCodecValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a InputElementCodecValue object.
+      r"""An additional property for a InputElementCodecValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2925,7 +3026,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ValueCombiningFnValue(_messages.Message):
-    """The value combining function to invoke.
+    r"""The value combining function to invoke.
 
     Messages:
       AdditionalProperty: An additional property for a ValueCombiningFnValue
@@ -2936,7 +3037,7 @@ class ValueCombiningFnValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ValueCombiningFnValue object.
+      r"""An additional property for a ValueCombiningFnValue object.
 
       Fields:
         key: Name of the additional property.
@@ -2957,7 +3058,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class PipelineDescription(_messages.Message):
-  """A descriptive representation of submitted pipeline as well as the
+  r"""A descriptive representation of submitted pipeline as well as the
   executed form.  This data is provided by the Dataflow service for ease of
   visualizing the pipeline and interpretting Dataflow provided metrics.
 
@@ -2975,7 +3076,7 @@ class PipelineDescription(_messages.Message):
 
 
 class Position(_messages.Message):
-  """Position defines a position within a collection of data.  The value can
+  r"""Position defines a position within a collection of data.  The value can
   be either the end position, a key (used with ordered collections), a byte
   offset, or a record index.
 
@@ -2998,8 +3099,20 @@ class Position(_messages.Message):
   shufflePosition = _messages.StringField(6)
 
 
+class PubSubIODetails(_messages.Message):
+  r"""Metadata for a PubSub connector used by the job.
+
+  Fields:
+    subscription: Subscription used in the connection.
+    topic: Topic accessed in the connection.
+  """
+
+  subscription = _messages.StringField(1)
+  topic = _messages.StringField(2)
+
+
 class PubsubLocation(_messages.Message):
-  """Identifies a pubsub location to use for transferring data into or out of
+  r"""Identifies a pubsub location to use for transferring data into or out of
   a streaming Dataflow job.
 
   Fields:
@@ -3029,7 +3142,7 @@ class PubsubLocation(_messages.Message):
 
 
 class ReadInstruction(_messages.Message):
-  """An instruction that reads records. Takes no inputs, produces one output.
+  r"""An instruction that reads records. Takes no inputs, produces one output.
 
   Fields:
     source: The source to read from.
@@ -3039,7 +3152,7 @@ class ReadInstruction(_messages.Message):
 
 
 class ReportWorkItemStatusRequest(_messages.Message):
-  """Request to report the status of WorkItems.
+  r"""Request to report the status of WorkItems.
 
   Fields:
     currentWorkerTime: The current timestamp at the worker.
@@ -3060,7 +3173,7 @@ class ReportWorkItemStatusRequest(_messages.Message):
 
 
 class ReportWorkItemStatusResponse(_messages.Message):
-  """Response from a request to report the status of WorkItems.
+  r"""Response from a request to report the status of WorkItems.
 
   Fields:
     workItemServiceStates: A set of messages indicating the service-side state
@@ -3073,7 +3186,7 @@ class ReportWorkItemStatusResponse(_messages.Message):
 
 
 class ReportedParallelism(_messages.Message):
-  """Represents the level of parallelism in a WorkItem's input, reported by
+  r"""Represents the level of parallelism in a WorkItem's input, reported by
   the worker.
 
   Fields:
@@ -3090,7 +3203,7 @@ class ReportedParallelism(_messages.Message):
 
 
 class ResourceUtilizationReport(_messages.Message):
-  """Worker metrics exported from workers. This contains resource utilization
+  r"""Worker metrics exported from workers. This contains resource utilization
   metrics accumulated from a variety of sources. For more information, see go
   /df-resource-signals.
 
@@ -3102,13 +3215,13 @@ class ResourceUtilizationReport(_messages.Message):
 
 
 class ResourceUtilizationReportResponse(_messages.Message):
-  """Service-side response to WorkerMessage reporting resource utilization.
+  r"""Service-side response to WorkerMessage reporting resource utilization.
   """
 
 
 
 class RuntimeEnvironment(_messages.Message):
-  """The environment values to set at runtime.
+  r"""The environment values to set at runtime.
 
   Fields:
     additionalExperiments: Additional experiment flags for the job.
@@ -3142,8 +3255,44 @@ class RuntimeEnvironment(_messages.Message):
   zone = _messages.StringField(9)
 
 
+class SdkVersion(_messages.Message):
+  r"""The version of the SDK used to run the jobl
+
+  Enums:
+    SdkSupportStatusValueValuesEnum: The support status for this SDK version.
+
+  Fields:
+    sdkSupportStatus: The support status for this SDK version.
+    version: The version of the SDK used to run the job.
+    versionDisplayName: A readable string describing the version of the sdk.
+  """
+
+  class SdkSupportStatusValueValuesEnum(_messages.Enum):
+    r"""The support status for this SDK version.
+
+    Values:
+      UNKNOWN: Cloud Dataflow is unaware of this version.
+      SUPPORTED: This is a known version of an SDK, and is supported.
+      STALE: A newer version of the SDK family exists, and an update is
+        recommended.
+      DEPRECATED: This version of the SDK is deprecated and will eventually be
+        no longer supported.
+      UNSUPPORTED: Support for this SDK version has ended and it should no
+        longer be used.
+    """
+    UNKNOWN = 0
+    SUPPORTED = 1
+    STALE = 2
+    DEPRECATED = 3
+    UNSUPPORTED = 4
+
+  sdkSupportStatus = _messages.EnumField('SdkSupportStatusValueValuesEnum', 1)
+  version = _messages.StringField(2)
+  versionDisplayName = _messages.StringField(3)
+
+
 class SendDebugCaptureRequest(_messages.Message):
-  """Request to send encoded debug information.
+  r"""Request to send encoded debug information.
 
   Fields:
     componentId: The internal component id for which debug information is
@@ -3160,12 +3309,12 @@ class SendDebugCaptureRequest(_messages.Message):
 
 
 class SendDebugCaptureResponse(_messages.Message):
-  """Response to a send capture request.
+  r"""Response to a send capture request.
 nothing"""
 
 
 class SendWorkerMessagesRequest(_messages.Message):
-  """A request for sending worker messages to the service.
+  r"""A request for sending worker messages to the service.
 
   Fields:
     location: The location which contains the job
@@ -3177,7 +3326,7 @@ class SendWorkerMessagesRequest(_messages.Message):
 
 
 class SendWorkerMessagesResponse(_messages.Message):
-  """The response to the worker messages.
+  r"""The response to the worker messages.
 
   Fields:
     workerMessageResponses: The servers response to the worker messages.
@@ -3187,7 +3336,7 @@ class SendWorkerMessagesResponse(_messages.Message):
 
 
 class SeqMapTask(_messages.Message):
-  """Describes a particular function to invoke.
+  r"""Describes a particular function to invoke.
 
   Messages:
     UserFnValue: The user function to invoke.
@@ -3205,7 +3354,7 @@ class SeqMapTask(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class UserFnValue(_messages.Message):
-    """The user function to invoke.
+    r"""The user function to invoke.
 
     Messages:
       AdditionalProperty: An additional property for a UserFnValue object.
@@ -3215,7 +3364,7 @@ class UserFnValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a UserFnValue object.
+      r"""An additional property for a UserFnValue object.
 
       Fields:
         key: Name of the additional property.
@@ -3236,7 +3385,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class SeqMapTaskOutputInfo(_messages.Message):
-  """Information about an output of a SeqMapTask.
+  r"""Information about an output of a SeqMapTask.
 
   Fields:
     sink: The sink to write the output value to.
@@ -3248,7 +3397,7 @@ class SeqMapTaskOutputInfo(_messages.Message):
 
 
 class ShellTask(_messages.Message):
-  """A task which consists of a shell command for the worker to execute.
+  r"""A task which consists of a shell command for the worker to execute.
 
   Fields:
     command: The shell command to run.
@@ -3260,7 +3409,7 @@ class ShellTask(_messages.Message):
 
 
 class SideInputInfo(_messages.Message):
-  """Information about a side input of a DoFn or an input of a SeqDoFn.
+  r"""Information about a side input of a DoFn or an input of a SeqDoFn.
 
   Messages:
     KindValue: How to interpret the source element(s) as a side input value.
@@ -3277,7 +3426,7 @@ class SideInputInfo(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class KindValue(_messages.Message):
-    """How to interpret the source element(s) as a side input value.
+    r"""How to interpret the source element(s) as a side input value.
 
     Messages:
       AdditionalProperty: An additional property for a KindValue object.
@@ -3287,7 +3436,7 @@ class KindValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a KindValue object.
+      r"""An additional property for a KindValue object.
 
       Fields:
         key: Name of the additional property.
@@ -3305,7 +3454,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class Sink(_messages.Message):
-  """A sink that records can be encoded and written to.
+  r"""A sink that records can be encoded and written to.
 
   Messages:
     CodecValue: The codec to use to encode data written to the sink.
@@ -3318,7 +3467,7 @@ class Sink(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class CodecValue(_messages.Message):
-    """The codec to use to encode data written to the sink.
+    r"""The codec to use to encode data written to the sink.
 
     Messages:
       AdditionalProperty: An additional property for a CodecValue object.
@@ -3328,7 +3477,7 @@ class CodecValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a CodecValue object.
+      r"""An additional property for a CodecValue object.
 
       Fields:
         key: Name of the additional property.
@@ -3342,7 +3491,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class SpecValue(_messages.Message):
-    """The sink to write to, plus its parameters.
+    r"""The sink to write to, plus its parameters.
 
     Messages:
       AdditionalProperty: An additional property for a SpecValue object.
@@ -3352,7 +3501,7 @@ class SpecValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a SpecValue object.
+      r"""An additional property for a SpecValue object.
 
       Fields:
         key: Name of the additional property.
@@ -3369,7 +3518,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class Source(_messages.Message):
-  """A source that records can be read and decoded from.
+  r"""A source that records can be read and decoded from.
 
   Messages:
     BaseSpecsValueListEntry: A BaseSpecsValueListEntry object.
@@ -3408,7 +3557,7 @@ class Source(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class BaseSpecsValueListEntry(_messages.Message):
-    """A BaseSpecsValueListEntry object.
+    r"""A BaseSpecsValueListEntry object.
 
     Messages:
       AdditionalProperty: An additional property for a BaseSpecsValueListEntry
@@ -3419,7 +3568,7 @@ class BaseSpecsValueListEntry(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a BaseSpecsValueListEntry object.
+      r"""An additional property for a BaseSpecsValueListEntry object.
 
       Fields:
         key: Name of the additional property.
@@ -3433,7 +3582,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class CodecValue(_messages.Message):
-    """The codec to use to decode data read from the source.
+    r"""The codec to use to decode data read from the source.
 
     Messages:
       AdditionalProperty: An additional property for a CodecValue object.
@@ -3443,7 +3592,7 @@ class CodecValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a CodecValue object.
+      r"""An additional property for a CodecValue object.
 
       Fields:
         key: Name of the additional property.
@@ -3457,7 +3606,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class SpecValue(_messages.Message):
-    """The source to read from, plus its parameters.
+    r"""The source to read from, plus its parameters.
 
     Messages:
       AdditionalProperty: An additional property for a SpecValue object.
@@ -3467,7 +3616,7 @@ class SpecValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a SpecValue object.
+      r"""An additional property for a SpecValue object.
 
       Fields:
         key: Name of the additional property.
@@ -3487,7 +3636,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class SourceFork(_messages.Message):
-  """DEPRECATED in favor of DynamicSourceSplit.
+  r"""DEPRECATED in favor of DynamicSourceSplit.
 
   Fields:
     primary: DEPRECATED
@@ -3503,7 +3652,7 @@ class SourceFork(_messages.Message):
 
 
 class SourceGetMetadataRequest(_messages.Message):
-  """A request to compute the SourceMetadata of a Source.
+  r"""A request to compute the SourceMetadata of a Source.
 
   Fields:
     source: Specification of the source whose metadata should be computed.
@@ -3513,7 +3662,7 @@ class SourceGetMetadataRequest(_messages.Message):
 
 
 class SourceGetMetadataResponse(_messages.Message):
-  """The result of a SourceGetMetadataOperation.
+  r"""The result of a SourceGetMetadataOperation.
 
   Fields:
     metadata: The computed metadata.
@@ -3523,7 +3672,7 @@ class SourceGetMetadataResponse(_messages.Message):
 
 
 class SourceMetadata(_messages.Message):
-  """Metadata about a Source useful for automatically optimizing and tuning
+  r"""Metadata about a Source useful for automatically optimizing and tuning
   the pipeline, etc.
 
   Fields:
@@ -3543,7 +3692,7 @@ class SourceMetadata(_messages.Message):
 
 
 class SourceOperationRequest(_messages.Message):
-  """A work item that represents the different operations that can be
+  r"""A work item that represents the different operations that can be
   performed on a user-defined Source specification.
 
   Fields:
@@ -3567,7 +3716,7 @@ class SourceOperationRequest(_messages.Message):
 
 
 class SourceOperationResponse(_messages.Message):
-  """The result of a SourceOperationRequest, specified in
+  r"""The result of a SourceOperationRequest, specified in
   ReportWorkItemStatusRequest.source_operation when the work item is
   completed.
 
@@ -3581,8 +3730,8 @@ class SourceOperationResponse(_messages.Message):
 
 
 class SourceSplitOptions(_messages.Message):
-  """Hints for splitting a Source into bundles (parts for parallel processing)
-  using SourceSplitRequest.
+  r"""Hints for splitting a Source into bundles (parts for parallel
+  processing) using SourceSplitRequest.
 
   Fields:
     desiredBundleSizeBytes: The source should be split into a set of bundles
@@ -3595,7 +3744,7 @@ class SourceSplitOptions(_messages.Message):
 
 
 class SourceSplitRequest(_messages.Message):
-  """Represents the operation to split a high-level Source specification into
+  r"""Represents the operation to split a high-level Source specification into
   bundles (parts for parallel processing).  At a high level, splitting of a
   source into bundles happens as follows: SourceSplitRequest is applied to the
   source. If it returns SOURCE_SPLIT_OUTCOME_USE_CURRENT, no further splitting
@@ -3616,7 +3765,7 @@ class SourceSplitRequest(_messages.Message):
 
 
 class SourceSplitResponse(_messages.Message):
-  """The response to a SourceSplitRequest.
+  r"""The response to a SourceSplitRequest.
 
   Enums:
     OutcomeValueValuesEnum: Indicates whether splitting happened and produced
@@ -3638,11 +3787,11 @@ class SourceSplitResponse(_messages.Message):
   """
 
   class OutcomeValueValuesEnum(_messages.Enum):
-    """Indicates whether splitting happened and produced a list of bundles. If
-    this is USE_CURRENT_SOURCE_AS_IS, the current source should be processed
-    "as is" without splitting. "bundles" is ignored in this case. If this is
-    SPLITTING_HAPPENED, then "bundles" contains a list of bundles into which
-    the source was split.
+    r"""Indicates whether splitting happened and produced a list of bundles.
+    If this is USE_CURRENT_SOURCE_AS_IS, the current source should be
+    processed "as is" without splitting. "bundles" is ignored in this case. If
+    this is SPLITTING_HAPPENED, then "bundles" contains a list of bundles into
+    which the source was split.
 
     Values:
       SOURCE_SPLIT_OUTCOME_UNKNOWN: The source split outcome is unknown, or
@@ -3662,7 +3811,7 @@ class OutcomeValueValuesEnum(_messages.Enum):
 
 
 class SourceSplitShard(_messages.Message):
-  """DEPRECATED in favor of DerivedSource.
+  r"""DEPRECATED in favor of DerivedSource.
 
   Enums:
     DerivationModeValueValuesEnum: DEPRECATED
@@ -3673,7 +3822,7 @@ class SourceSplitShard(_messages.Message):
   """
 
   class DerivationModeValueValuesEnum(_messages.Enum):
-    """DEPRECATED
+    r"""DEPRECATED
 
     Values:
       SOURCE_DERIVATION_MODE_UNKNOWN: The source derivation is unknown, or
@@ -3694,8 +3843,22 @@ class DerivationModeValueValuesEnum(_messages.Enum):
   source = _messages.MessageField('Source', 2)
 
 
+class SpannerIODetails(_messages.Message):
+  r"""Metadata for a Spanner connector used by the job.
+
+  Fields:
+    databaseId: DatabaseId accessed in the connection.
+    instanceId: InstanceId accessed in the connection.
+    projectId: ProjectId accessed in the connection.
+  """
+
+  databaseId = _messages.StringField(1)
+  instanceId = _messages.StringField(2)
+  projectId = _messages.StringField(3)
+
+
 class SplitInt64(_messages.Message):
-  """A representation of an int64, n, that is immune to precision loss when
+  r"""A representation of an int64, n, that is immune to precision loss when
   encoded in JSON.
 
   Fields:
@@ -3708,7 +3871,7 @@ class SplitInt64(_messages.Message):
 
 
 class StageSource(_messages.Message):
-  """Description of an input or output of an execution stage.
+  r"""Description of an input or output of an execution stage.
 
   Fields:
     name: Dataflow service generated name for this source.
@@ -3726,7 +3889,7 @@ class StageSource(_messages.Message):
 
 
 class StandardQueryParameters(_messages.Message):
-  """Query parameters accepted by all methods.
+  r"""Query parameters accepted by all methods.
 
   Enums:
     FXgafvValueValuesEnum: V1 error format.
@@ -3736,14 +3899,12 @@ class StandardQueryParameters(_messages.Message):
     f__xgafv: V1 error format.
     access_token: OAuth access token.
     alt: Data format for response.
-    bearer_token: OAuth bearer token.
     callback: JSONP
     fields: Selector specifying which fields to include in a partial response.
     key: API key. Your API key identifies your project and provides you with
       API access, quota, and reports. Required unless you provide an OAuth 2.0
       token.
     oauth_token: OAuth 2.0 token for the current user.
-    pp: Pretty-print response.
     prettyPrint: Returns response with indentations and line breaks.
     quotaUser: Available to use for quota purposes for server-side
       applications. Can be any arbitrary string assigned to a user, but should
@@ -3755,7 +3916,7 @@ class StandardQueryParameters(_messages.Message):
   """
 
   class AltValueValuesEnum(_messages.Enum):
-    """Data format for response.
+    r"""Data format for response.
 
     Values:
       json: Responses with Content-Type of application/json
@@ -3767,7 +3928,7 @@ class AltValueValuesEnum(_messages.Enum):
     proto = 2
 
   class FXgafvValueValuesEnum(_messages.Enum):
-    """V1 error format.
+    r"""V1 error format.
 
     Values:
       _1: v1 error format
@@ -3779,21 +3940,19 @@ class FXgafvValueValuesEnum(_messages.Enum):
   f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
   access_token = _messages.StringField(2)
   alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
-  bearer_token = _messages.StringField(4)
-  callback = _messages.StringField(5)
-  fields = _messages.StringField(6)
-  key = _messages.StringField(7)
-  oauth_token = _messages.StringField(8)
-  pp = _messages.BooleanField(9, default=True)
-  prettyPrint = _messages.BooleanField(10, default=True)
-  quotaUser = _messages.StringField(11)
-  trace = _messages.StringField(12)
-  uploadType = _messages.StringField(13)
-  upload_protocol = _messages.StringField(14)
+  callback = _messages.StringField(4)
+  fields = _messages.StringField(5)
+  key = _messages.StringField(6)
+  oauth_token = _messages.StringField(7)
+  prettyPrint = _messages.BooleanField(8, default=True)
+  quotaUser = _messages.StringField(9)
+  trace = _messages.StringField(10)
+  uploadType = _messages.StringField(11)
+  upload_protocol = _messages.StringField(12)
 
 
 class StateFamilyConfig(_messages.Message):
-  """State family configuration.
+  r"""State family configuration.
 
   Fields:
     isRead: If true, this family corresponds to a read operation.
@@ -3805,7 +3964,7 @@ class StateFamilyConfig(_messages.Message):
 
 
 class Status(_messages.Message):
-  """The `Status` type defines a logical error model that is suitable for
+  r"""The `Status` type defines a logical error model that is suitable for
   different programming environments, including REST APIs and RPC APIs. It is
   used by [gRPC](https://github.com/grpc). The error model is designed to be:
   - Simple to use and understand for most users - Flexible enough to meet
@@ -3853,7 +4012,7 @@ class Status(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class DetailsValueListEntry(_messages.Message):
-    """A DetailsValueListEntry object.
+    r"""A DetailsValueListEntry object.
 
     Messages:
       AdditionalProperty: An additional property for a DetailsValueListEntry
@@ -3865,7 +4024,7 @@ class DetailsValueListEntry(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a DetailsValueListEntry object.
+      r"""An additional property for a DetailsValueListEntry object.
 
       Fields:
         key: Name of the additional property.
@@ -3883,8 +4042,8 @@ class AdditionalProperty(_messages.Message):
 
 
 class Step(_messages.Message):
-  """Defines a particular step within a Cloud Dataflow job.  A job consists of
-  multiple steps, each of which performs some specific operation as part of
+  r"""Defines a particular step within a Cloud Dataflow job.  A job consists
+  of multiple steps, each of which performs some specific operation as part of
   the overall job.  Data is typically passed from one step to another as part
   of the job.  Here's an example of a sequence of steps which together
   implement a Map-Reduce job:    * Read a collection of data from some source,
@@ -3912,9 +4071,9 @@ class Step(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class PropertiesValue(_messages.Message):
-    """Named properties associated with the step. Each kind of predefined step
-    has its own required set of properties. Must be provided on Create.  Only
-    retrieved with JOB_VIEW_ALL.
+    r"""Named properties associated with the step. Each kind of predefined
+    step has its own required set of properties. Must be provided on Create.
+    Only retrieved with JOB_VIEW_ALL.
 
     Messages:
       AdditionalProperty: An additional property for a PropertiesValue object.
@@ -3924,7 +4083,7 @@ class PropertiesValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a PropertiesValue object.
+      r"""An additional property for a PropertiesValue object.
 
       Fields:
         key: Name of the additional property.
@@ -3942,7 +4101,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class StreamLocation(_messages.Message):
-  """Describes a stream of data, either as input to be processed or as output
+  r"""Describes a stream of data, either as input to be processed or as output
   of a streaming Dataflow job.
 
   Fields:
@@ -3960,7 +4119,7 @@ class StreamLocation(_messages.Message):
 
 
 class StreamingComputationConfig(_messages.Message):
-  """Configuration information for a single streaming computation.
+  r"""Configuration information for a single streaming computation.
 
   Fields:
     computationId: Unique identifier for this computation.
@@ -3976,7 +4135,7 @@ class StreamingComputationConfig(_messages.Message):
 
 
 class StreamingComputationRanges(_messages.Message):
-  """Describes full or partial data disk assignment information of the
+  r"""Describes full or partial data disk assignment information of the
   computation ranges.
 
   Fields:
@@ -3989,7 +4148,7 @@ class StreamingComputationRanges(_messages.Message):
 
 
 class StreamingComputationTask(_messages.Message):
-  """A task which describes what action should be performed for the specified
+  r"""A task which describes what action should be performed for the specified
   streaming computation ranges.
 
   Enums:
@@ -4003,7 +4162,7 @@ class StreamingComputationTask(_messages.Message):
   """
 
   class TaskTypeValueValuesEnum(_messages.Enum):
-    """A type of streaming computation task.
+    r"""A type of streaming computation task.
 
     Values:
       STREAMING_COMPUTATION_TASK_UNKNOWN: The streaming computation task is
@@ -4023,7 +4182,8 @@ class TaskTypeValueValuesEnum(_messages.Enum):
 
 
 class StreamingConfigTask(_messages.Message):
-  """A task that carries configuration information for streaming computations.
+  r"""A task that carries configuration information for streaming
+  computations.
 
   Messages:
     UserStepToStateFamilyNameMapValue: Map from user step names to state
@@ -4042,7 +4202,7 @@ class StreamingConfigTask(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class UserStepToStateFamilyNameMapValue(_messages.Message):
-    """Map from user step names to state families.
+    r"""Map from user step names to state families.
 
     Messages:
       AdditionalProperty: An additional property for a
@@ -4054,7 +4214,7 @@ class UserStepToStateFamilyNameMapValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a UserStepToStateFamilyNameMapValue
+      r"""An additional property for a UserStepToStateFamilyNameMapValue
       object.
 
       Fields:
@@ -4074,7 +4234,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class StreamingSetupTask(_messages.Message):
-  """A task which initializes part of a streaming Dataflow job.
+  r"""A task which initializes part of a streaming Dataflow job.
 
   Fields:
     drain: The user has requested drain.
@@ -4093,7 +4253,7 @@ class StreamingSetupTask(_messages.Message):
 
 
 class StreamingSideInputLocation(_messages.Message):
-  """Identifies the location of a streaming side input.
+  r"""Identifies the location of a streaming side input.
 
   Fields:
     stateFamily: Identifies the state family where this side input is stored.
@@ -4106,7 +4266,7 @@ class StreamingSideInputLocation(_messages.Message):
 
 
 class StreamingStageLocation(_messages.Message):
-  """Identifies the location of a streaming computation stage, for stage-to-
+  r"""Identifies the location of a streaming computation stage, for stage-to-
   stage communication.
 
   Fields:
@@ -4118,7 +4278,7 @@ class StreamingStageLocation(_messages.Message):
 
 
 class StringList(_messages.Message):
-  """A metric value representing a list of strings.
+  r"""A metric value representing a list of strings.
 
   Fields:
     elements: Elements of the list.
@@ -4128,7 +4288,7 @@ class StringList(_messages.Message):
 
 
 class StructuredMessage(_messages.Message):
-  """A rich message format, including a human readable string, a key for
+  r"""A rich message format, including a human readable string, a key for
   identifying the message, and structured data associated with the message for
   programmatic consumption.
 
@@ -4145,7 +4305,7 @@ class StructuredMessage(_messages.Message):
 
 
 class TaskRunnerSettings(_messages.Message):
-  """Taskrunner configuration settings.
+  r"""Taskrunner configuration settings.
 
   Fields:
     alsologtostderr: Whether to also send taskrunner log info to stderr.
@@ -4208,7 +4368,7 @@ class TaskRunnerSettings(_messages.Message):
 
 
 class TemplateMetadata(_messages.Message):
-  """Metadata describing a template.
+  r"""Metadata describing a template.
 
   Fields:
     description: Optional. A description of the template.
@@ -4222,8 +4382,8 @@ class TemplateMetadata(_messages.Message):
 
 
 class TopologyConfig(_messages.Message):
-  """Global topology of the streaming Dataflow job, including all computations
-  and their sharded locations.
+  r"""Global topology of the streaming Dataflow job, including all
+  computations and their sharded locations.
 
   Messages:
     UserStageToComputationNameMapValue: Maps user stage names to stable
@@ -4241,7 +4401,7 @@ class TopologyConfig(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class UserStageToComputationNameMapValue(_messages.Message):
-    """Maps user stage names to stable computation names.
+    r"""Maps user stage names to stable computation names.
 
     Messages:
       AdditionalProperty: An additional property for a
@@ -4253,7 +4413,7 @@ class UserStageToComputationNameMapValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a UserStageToComputationNameMapValue
+      r"""An additional property for a UserStageToComputationNameMapValue
       object.
 
       Fields:
@@ -4274,7 +4434,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class TransformSummary(_messages.Message):
-  """Description of the type, names/ids, and input/outputs for a transform.
+  r"""Description of the type, names/ids, and input/outputs for a transform.
 
   Enums:
     KindValueValuesEnum: Type of transform.
@@ -4291,7 +4451,7 @@ class TransformSummary(_messages.Message):
   """
 
   class KindValueValuesEnum(_messages.Enum):
-    """Type of transform.
+    r"""Type of transform.
 
     Values:
       UNKNOWN_KIND: Unrecognized transform type.
@@ -4324,7 +4484,7 @@ class KindValueValuesEnum(_messages.Enum):
 
 
 class WorkItem(_messages.Message):
-  """WorkItem represents basic information about a WorkItem to be executed in
+  r"""WorkItem represents basic information about a WorkItem to be executed in
   the cloud.
 
   Fields:
@@ -4369,7 +4529,7 @@ class WorkItem(_messages.Message):
 
 
 class WorkItemServiceState(_messages.Message):
-  """The Dataflow service's idea of the current state of a WorkItem being
+  r"""The Dataflow service's idea of the current state of a WorkItem being
   processed by a worker.
 
   Messages:
@@ -4397,7 +4557,7 @@ class WorkItemServiceState(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class HarnessDataValue(_messages.Message):
-    """Other data returned by the service, specific to the particular worker
+    r"""Other data returned by the service, specific to the particular worker
     harness.
 
     Messages:
@@ -4409,7 +4569,7 @@ class HarnessDataValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a HarnessDataValue object.
+      r"""An additional property for a HarnessDataValue object.
 
       Fields:
         key: Name of the additional property.
@@ -4432,7 +4592,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class WorkItemStatus(_messages.Message):
-  """Conveys a worker's progress through the work described by a WorkItem.
+  r"""Conveys a worker's progress through the work described by a WorkItem.
 
   Fields:
     completed: True if the WorkItem was completed (successfully or
@@ -4506,7 +4666,7 @@ class WorkItemStatus(_messages.Message):
 
 
 class WorkerHealthReport(_messages.Message):
-  """WorkerHealthReport contains information about the health of a worker.
+  r"""WorkerHealthReport contains information about the health of a worker.
   The VM should be identified by the labels attached to the WorkerMessage that
   this health ping belongs to.
 
@@ -4527,7 +4687,7 @@ class WorkerHealthReport(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class PodsValueListEntry(_messages.Message):
-    """A PodsValueListEntry object.
+    r"""A PodsValueListEntry object.
 
     Messages:
       AdditionalProperty: An additional property for a PodsValueListEntry
@@ -4538,7 +4698,7 @@ class PodsValueListEntry(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a PodsValueListEntry object.
+      r"""An additional property for a PodsValueListEntry object.
 
       Fields:
         key: Name of the additional property.
@@ -4557,8 +4717,8 @@ class AdditionalProperty(_messages.Message):
 
 
 class WorkerHealthReportResponse(_messages.Message):
-  """WorkerHealthReportResponse contains information returned to the worker in
-  response to a health ping.
+  r"""WorkerHealthReportResponse contains information returned to the worker
+  in response to a health ping.
 
   Fields:
     reportInterval: A positive value indicates the worker should change its
@@ -4570,7 +4730,7 @@ class WorkerHealthReportResponse(_messages.Message):
 
 
 class WorkerLifecycleEvent(_messages.Message):
-  """A report of an event in a worker's lifecycle. The proto contains one
+  r"""A report of an event in a worker's lifecycle. The proto contains one
   event, because the worker is expected to asynchronously send each message
   immediately after the event. Due to this asynchrony, messages may arrive out
   of order (or missing), and it is up to the consumer to interpret. The
@@ -4593,7 +4753,7 @@ class WorkerLifecycleEvent(_messages.Message):
   """
 
   class EventValueValuesEnum(_messages.Enum):
-    """The event being reported.
+    r"""The event being reported.
 
     Values:
       UNKNOWN_EVENT: Invalid event.
@@ -4618,7 +4778,7 @@ class EventValueValuesEnum(_messages.Enum):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class MetadataValue(_messages.Message):
-    """Other stats that can accompany an event. E.g. { "downloaded_bytes" :
+    r"""Other stats that can accompany an event. E.g. { "downloaded_bytes" :
     "123456" }
 
     Messages:
@@ -4629,7 +4789,7 @@ class MetadataValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a MetadataValue object.
+      r"""An additional property for a MetadataValue object.
 
       Fields:
         key: Name of the additional property.
@@ -4647,12 +4807,12 @@ class AdditionalProperty(_messages.Message):
 
 
 class WorkerMessage(_messages.Message):
-  """WorkerMessage provides information to the backend about a worker.
+  r"""WorkerMessage provides information to the backend about a worker.
 
   Messages:
     LabelsValue: Labels are used to group WorkerMessages. For example, a
       worker_message about a particular container might have the labels: {
-      "JOB_ID": "2015-04-22",   "WORKER_ID": "wordcount-vm-2015\u2026"
+      "JOB_ID": "2015-04-22",   "WORKER_ID": "wordcount-vm-2015..."
       "CONTAINER_TYPE": "worker",   "CONTAINER_ID": "ac1234def"} Label tags
       typically correspond to Label enum values. However, for ease of
       development other strings can be used as tags. LABEL_UNSPECIFIED should
@@ -4661,7 +4821,7 @@ class WorkerMessage(_messages.Message):
   Fields:
     labels: Labels are used to group WorkerMessages. For example, a
       worker_message about a particular container might have the labels: {
-      "JOB_ID": "2015-04-22",   "WORKER_ID": "wordcount-vm-2015\u2026"
+      "JOB_ID": "2015-04-22",   "WORKER_ID": "wordcount-vm-2015..."
       "CONTAINER_TYPE": "worker",   "CONTAINER_ID": "ac1234def"} Label tags
       typically correspond to Label enum values. However, for ease of
       development other strings can be used as tags. LABEL_UNSPECIFIED should
@@ -4676,9 +4836,9 @@ class WorkerMessage(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class LabelsValue(_messages.Message):
-    """Labels are used to group WorkerMessages. For example, a worker_message
+    r"""Labels are used to group WorkerMessages. For example, a worker_message
     about a particular container might have the labels: { "JOB_ID":
-    "2015-04-22",   "WORKER_ID": "wordcount-vm-2015\u2026"   "CONTAINER_TYPE":
+    "2015-04-22",   "WORKER_ID": "wordcount-vm-2015..."   "CONTAINER_TYPE":
     "worker",   "CONTAINER_ID": "ac1234def"} Label tags typically correspond
     to Label enum values. However, for ease of development other strings can
     be used as tags. LABEL_UNSPECIFIED should not be used here.
@@ -4691,7 +4851,7 @@ class LabelsValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a LabelsValue object.
+      r"""An additional property for a LabelsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -4713,7 +4873,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class WorkerMessageCode(_messages.Message):
-  """A message code is used to report status and error messages to the
+  r"""A message code is used to report status and error messages to the
   service. The message codes are intended to be machine readable. The service
   will take care of translating these into user understandable messages if
   necessary.  Example use cases:   1. Worker processes reporting successful
@@ -4760,7 +4920,7 @@ class WorkerMessageCode(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class ParametersValue(_messages.Message):
-    """Parameters contains specific information about the code.  This is a
+    r"""Parameters contains specific information about the code.  This is a
     struct to allow parameters of different types.  Examples:  1. For a
     "HARNESS_STARTED" message parameters might provide the name     of the
     worker and additional data like timing information.  2. For a
@@ -4781,7 +4941,7 @@ class ParametersValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a ParametersValue object.
+      r"""An additional property for a ParametersValue object.
 
       Fields:
         key: Name of the additional property.
@@ -4798,7 +4958,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class WorkerMessageResponse(_messages.Message):
-  """A worker_message response allows the server to pass information to the
+  r"""A worker_message response allows the server to pass information to the
   sender.
 
   Fields:
@@ -4816,7 +4976,7 @@ class WorkerMessageResponse(_messages.Message):
 
 
 class WorkerPool(_messages.Message):
-  """Describes one particular pool of Cloud Dataflow workers to be
+  r"""Describes one particular pool of Cloud Dataflow workers to be
   instantiated by the Cloud Dataflow service in order to perform the
   computations required by a job.  Note that a workflow job may use multiple
   pools, in order to match the various computational requirements of the
@@ -4898,7 +5058,7 @@ class WorkerPool(_messages.Message):
   """
 
   class DefaultPackageSetValueValuesEnum(_messages.Enum):
-    """The default package set to install.  This allows the service to select
+    r"""The default package set to install.  This allows the service to select
     a default set of packages which are useful to worker harnesses written in
     a particular language.
 
@@ -4918,7 +5078,7 @@ class DefaultPackageSetValueValuesEnum(_messages.Enum):
     DEFAULT_PACKAGE_SET_PYTHON = 3
 
   class IpConfigurationValueValuesEnum(_messages.Enum):
-    """Configuration for VM IPs.
+    r"""Configuration for VM IPs.
 
     Values:
       WORKER_IP_UNSPECIFIED: The configuration is unknown, or unspecified.
@@ -4930,7 +5090,7 @@ class IpConfigurationValueValuesEnum(_messages.Enum):
     WORKER_IP_PRIVATE = 2
 
   class TeardownPolicyValueValuesEnum(_messages.Enum):
-    """Sets the policy for determining when to turndown worker pool. Allowed
+    r"""Sets the policy for determining when to turndown worker pool. Allowed
     values are: `TEARDOWN_ALWAYS`, `TEARDOWN_ON_SUCCESS`, and
     `TEARDOWN_NEVER`. `TEARDOWN_ALWAYS` means workers are always torn down
     regardless of whether the job succeeds. `TEARDOWN_ON_SUCCESS` means
@@ -4958,7 +5118,7 @@ class TeardownPolicyValueValuesEnum(_messages.Enum):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class MetadataValue(_messages.Message):
-    """Metadata to set on the Google Compute Engine VMs.
+    r"""Metadata to set on the Google Compute Engine VMs.
 
     Messages:
       AdditionalProperty: An additional property for a MetadataValue object.
@@ -4968,7 +5128,7 @@ class MetadataValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a MetadataValue object.
+      r"""An additional property for a MetadataValue object.
 
       Fields:
         key: Name of the additional property.
@@ -4982,7 +5142,7 @@ class AdditionalProperty(_messages.Message):
 
   @encoding.MapUnrecognizedFields('additionalProperties')
   class PoolArgsValue(_messages.Message):
-    """Extra arguments for this worker pool.
+    r"""Extra arguments for this worker pool.
 
     Messages:
       AdditionalProperty: An additional property for a PoolArgsValue object.
@@ -4993,7 +5153,7 @@ class PoolArgsValue(_messages.Message):
     """
 
     class AdditionalProperty(_messages.Message):
-      """An additional property for a PoolArgsValue object.
+      r"""An additional property for a PoolArgsValue object.
 
       Fields:
         key: Name of the additional property.
@@ -5029,7 +5189,7 @@ class AdditionalProperty(_messages.Message):
 
 
 class WorkerSettings(_messages.Message):
-  """Provides data to pass through to the worker harness.
+  r"""Provides data to pass through to the worker harness.
 
   Fields:
     baseUrl: The base URL for accessing Google Cloud APIs.  When workers
@@ -5059,7 +5219,7 @@ class WorkerSettings(_messages.Message):
 
 
 class WorkerShutdownNotice(_messages.Message):
-  """Shutdown notification from workers. This is to be sent by the shutdown
+  r"""Shutdown notification from workers. This is to be sent by the shutdown
   script of the worker VM so that the backend knows that the VM is being shut
   down.
 
@@ -5073,11 +5233,12 @@ class WorkerShutdownNotice(_messages.Message):
 
 
 class WorkerShutdownNoticeResponse(_messages.Message):
-  """Service-side response to WorkerMessage issuing shutdown notice."""
+  r"""Service-side response to WorkerMessage issuing shutdown notice."""
 
 
 class WriteInstruction(_messages.Message):
-  """An instruction that writes records. Takes one input, produces no outputs.
+  r"""An instruction that writes records. Takes one input, produces no
+  outputs.
 
   Fields:
     input: The input.


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 149451)
    Time Spent: 1h 20m  (was: 1h 10m)

> Update Dataflow Python API client
> ---------------------------------
>
>                 Key: BEAM-5460
>                 URL: https://issues.apache.org/jira/browse/BEAM-5460
>             Project: Beam
>          Issue Type: Improvement
>          Components: sdk-py-core
>    Affects Versions: 2.7.0
>            Reporter: Charles Chen
>            Assignee: Charles Chen
>            Priority: Major
>          Time Spent: 1h 20m
>  Remaining Estimate: 0h
>
> We should update the Dataflow Python API client to reflect the most current Dataflow API.



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Mime
View raw message