diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 0000000..e69de29
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index ee8d16a..b35d666 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -30,7 +30,7 @@ env_vars: {
 
 env_vars: {
     key: "V2_STAGING_BUCKET"
-    value: "docs-staging-v2-staging"
+    value: "docs-staging-v2"
 }
 
 # It will upload the docker image after successful builds.
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 0000000..f525142
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+  msg "Retrieving secret ${key}"
+  docker run --entrypoint=gcloud \
+    --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+    gcr.io/google.com/cloudsdktool/cloud-sdk \
+    secrets versions access latest \
+    --project cloud-devrel-kokoro-resources \
+    --secret ${key} > \
+    "${SECRET_LOCATION}/${key}"
+  if [[ $? == 0 ]]; then
+    msg "Secret written to ${SECRET_LOCATION}/${key}"
+  else
+    msg "Error retrieving secret ${key}"
+  fi
+done
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index 1a537a6..9178930 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,42 +23,18 @@ env_vars: {
     value: "github/python-video-transcoder/.kokoro/release.sh"
 }
 
-# Fetch the token needed for reporting release status to GitHub
-before_action {
-  fetch_keystore {
-    keystore_resource {
-      keystore_config_id: 73713
-      keyname: "yoshi-automation-github-key"
-    }
-  }
-}
-
-# Fetch PyPI password
-before_action {
-  fetch_keystore {
-    keystore_resource {
-      keystore_config_id: 73713
-      keyname: "google_cloud_pypi_password"
-    }
-  }
-}
-
-# Fetch magictoken to use with Magic Github Proxy 
-before_action {
-  fetch_keystore {
-    keystore_resource {
-      keystore_config_id: 73713
-      keyname: "releasetool-magictoken"
-    }
-  }
+# Fetch PyPI password	
+before_action {	
+  fetch_keystore {	
+    keystore_resource {	
+      keystore_config_id: 73713	
+      keyname: "google_cloud_pypi_password"	
+    }	
+  }	
 }
 
-# Fetch api key to use with Magic Github Proxy 
-before_action {
-  fetch_keystore {
-    keystore_resource {
-      keystore_config_id: 73713
-      keyname: "magic-github-proxy-api-key"
-    }
-  }
-}
+# Tokens needed to report release status back to GitHub
+env_vars: {
+  key: "SECRET_MANAGER_KEYS"
+  value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index a6fb80e..53c2039 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
     value: "py-3.6"
 }
 
+# Declare build specific Cloud project.
+env_vars: {
+    key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+    value: "python-docs-samples-tests-py36"
+}
+
 env_vars: {
     key: "TRAMPOLINE_BUILD_FILE"
     value: "github/python-video-transcoder/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 52af18f..33e7d9c 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
     value: "py-3.7"
 }
 
+# Declare build specific Cloud project.
+env_vars: {
+    key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+    value: "python-docs-samples-tests-py37"
+}
+
 env_vars: {
     key: "TRAMPOLINE_BUILD_FILE"
     value: "github/python-video-transcoder/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index fbb0302..c94d8bf 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
     value: "py-3.8"
 }
 
+# Declare build specific Cloud project.
+env_vars: {
+    key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+    value: "python-docs-samples-tests-py38"
+}
+
 env_vars: {
     key: "TRAMPOLINE_BUILD_FILE"
     value: "github/python-video-transcoder/.kokoro/test-samples.sh"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 3ed28ed..9611b36 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
     git checkout $LATEST_RELEASE
 fi
 
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+  echo "No tests run. `./samples` not found"
+  exit 0
+fi
+
 # Disable buffering, so that the logs stream through.
 export PYTHONUNBUFFERED=1
 
@@ -101,4 +107,4 @@ cd "$ROOT"
 # Workaround for Kokoro permissions issue: delete secrets
 rm testing/{test-env.sh,client-secrets.json,service-account.json}
 
-exit "$RTN"
\ No newline at end of file
+exit "$RTN"
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251..f39236e 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -15,9 +15,14 @@
 
 set -eo pipefail
 
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"  || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+    chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+    ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+    echo "cleanup";
+}
+trap cleanup EXIT
 
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a4c6c9a..74d13ff 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
 # Changelog
 
+## [0.2.0](https://www.github.com/googleapis/python-video-transcoder/compare/v0.1.0...v0.2.0) (2020-11-14)
+
+
+### Features
+
+* add create_time, start_time, and end_time to jobs ([#10](https://www.github.com/googleapis/python-video-transcoder/issues/10)) ([a5a210e](https://www.github.com/googleapis/python-video-transcoder/commit/a5a210e16420e3450200a346aaa2cd18a7270cf3))
+
 ## 0.1.0 (2020-08-24)
 
 
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index b3d1f60..039f436 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,44 +1,95 @@
 <!-- # Generated by synthtool. DO NOT EDIT! !-->
-# Contributor Code of Conduct
+# Code of Conduct
 
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
+## Our Pledge
 
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+*   Using welcoming and inclusive language
+*   Being respectful of differing viewpoints and experiences
+*   Gracefully accepting constructive criticism
+*   Focusing on what is best for the community
+*   Showing empathy towards other community members
 
 Examples of unacceptable behavior by participants include:
 
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
+*   The use of sexualized language or imagery and unwelcome sexual attention or
+    advances
+*   Trolling, insulting/derogatory comments, and personal or political attacks
+*   Public or private harassment
+*   Publishing others' private information, such as a physical or electronic
+    address, without explicit permission
+*   Other conduct which could reasonably be considered inappropriate in a
+    professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
 
 Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 0329f26..b9090f3 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -80,25 +80,6 @@ We use `nox <https://nox.readthedocs.io/en/latest/>`__ to instrument our tests.
 
 .. nox: https://pypi.org/project/nox/
 
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
-  or a ``pip`` `editable install`_ is not possible with this
-  library. This is because this library uses `namespace packages`_.
-  For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
-  Since ``editable`` / ``develop`` mode can't be used, packages
-  need to be installed directly. Hence your changes to the source
-  tree don't get incorporated into the **already installed**
-  package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
 *****************************************
 I'm getting weird errors... Can you help?
 *****************************************
diff --git a/docs/conf.py b/docs/conf.py
index b66933a..1d6b6aa 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
 # -- General configuration ------------------------------------------------
 
 # If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
 
 # Add any Sphinx extension module names here, as strings. They can be
 # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -39,6 +39,7 @@
     "sphinx.ext.autosummary",
     "sphinx.ext.intersphinx",
     "sphinx.ext.coverage",
+    "sphinx.ext.doctest",
     "sphinx.ext.napoleon",
     "sphinx.ext.todo",
     "sphinx.ext.viewcode",
@@ -348,6 +349,7 @@
     "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
     "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
     "grpc": ("https://grpc.io/grpc/python/", None),
+    "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
 }
 
 
diff --git a/docs/transcoder_v1beta1/types.rst b/docs/transcoder_v1beta1/types.rst
index 641cefb..a3f02bd 100644
--- a/docs/transcoder_v1beta1/types.rst
+++ b/docs/transcoder_v1beta1/types.rst
@@ -3,3 +3,4 @@ Types for Google Cloud Video Transcoder v1beta1 API
 
 .. automodule:: google.cloud.video.transcoder_v1beta1.types
     :members:
+    :show-inheritance:
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py
index 88525e5..727193f 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py
@@ -31,6 +31,7 @@
 from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers
 from google.cloud.video.transcoder_v1beta1.types import resources
 from google.cloud.video.transcoder_v1beta1.types import services
+from google.protobuf import timestamp_pb2 as timestamp  # type: ignore
 
 from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO
 from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport
@@ -53,12 +54,53 @@ class TranscoderServiceAsyncClient:
     DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT
 
     job_path = staticmethod(TranscoderServiceClient.job_path)
-
+    parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path)
     job_template_path = staticmethod(TranscoderServiceClient.job_template_path)
+    parse_job_template_path = staticmethod(
+        TranscoderServiceClient.parse_job_template_path
+    )
+
+    common_billing_account_path = staticmethod(
+        TranscoderServiceClient.common_billing_account_path
+    )
+    parse_common_billing_account_path = staticmethod(
+        TranscoderServiceClient.parse_common_billing_account_path
+    )
+
+    common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path)
+    parse_common_folder_path = staticmethod(
+        TranscoderServiceClient.parse_common_folder_path
+    )
+
+    common_organization_path = staticmethod(
+        TranscoderServiceClient.common_organization_path
+    )
+    parse_common_organization_path = staticmethod(
+        TranscoderServiceClient.parse_common_organization_path
+    )
+
+    common_project_path = staticmethod(TranscoderServiceClient.common_project_path)
+    parse_common_project_path = staticmethod(
+        TranscoderServiceClient.parse_common_project_path
+    )
+
+    common_location_path = staticmethod(TranscoderServiceClient.common_location_path)
+    parse_common_location_path = staticmethod(
+        TranscoderServiceClient.parse_common_location_path
+    )
 
     from_service_account_file = TranscoderServiceClient.from_service_account_file
     from_service_account_json = from_service_account_file
 
+    @property
+    def transport(self) -> TranscoderServiceTransport:
+        """Return the transport used by the client instance.
+
+        Returns:
+            TranscoderServiceTransport: The transport used by the client instance.
+        """
+        return self._client.transport
+
     get_transport_class = functools.partial(
         type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)
     )
@@ -85,16 +127,19 @@ def __init__(
             client_options (ClientOptions): Custom options for the client. It
                 won't take effect if a ``transport`` instance is provided.
                 (1) The ``api_endpoint`` property can be used to override the
-                default endpoint provided by the client. GOOGLE_API_USE_MTLS
+                default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
                 environment variable can also be used to override the endpoint:
                 "always" (always use the default mTLS endpoint), "never" (always
-                use the default regular endpoint, this is the default value for
-                the environment variable) and "auto" (auto switch to the default
-                mTLS endpoint if client SSL credentials is present). However,
-                the ``api_endpoint`` property takes precedence if provided.
-                (2) The ``client_cert_source`` property is used to provide client
-                SSL credentials for mutual TLS transport. If not provided, the
-                default SSL credentials will be used if present.
+                use the default regular endpoint) and "auto" (auto switch to the
+                default mTLS endpoint if client certificate is present, this is
+                the default value). However, the ``api_endpoint`` property takes
+                precedence if provided.
+                (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide client certificate for mutual TLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
 
         Raises:
             google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -150,7 +195,8 @@ async def create_job(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([parent, job]):
+        has_flattened_params = any([parent, job])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
@@ -226,7 +272,8 @@ async def list_jobs(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([parent]):
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
@@ -301,7 +348,8 @@ async def get_job(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([name]):
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
@@ -366,7 +414,8 @@ async def delete_job(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([name]):
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
@@ -435,7 +484,8 @@ async def create_job_template(
                 resource name.
 
                 This value should be 4-63 characters, and valid
-                characters are ``/[a-zA-Z0-9_-_]/``.
+                characters must match the regular expression
+                ``[a-zA-Z][a-zA-Z0-9_-]*``.
                 This corresponds to the ``job_template_id`` field
                 on the ``request`` instance; if ``request`` is provided, this
                 should not be set.
@@ -453,7 +503,8 @@ async def create_job_template(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([parent, job_template, job_template_id]):
+        has_flattened_params = any([parent, job_template, job_template_id])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
@@ -532,7 +583,8 @@ async def list_job_templates(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([parent]):
+        has_flattened_params = any([parent])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
@@ -608,7 +660,8 @@ async def get_job_template(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([name]):
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
@@ -673,7 +726,8 @@ async def delete_job_template(
         # Create or coerce a protobuf request object.
         # Sanity check: If we got a request object, we should *not* have
         # gotten any keyword arguments that map to the request.
-        if request is not None and any([name]):
+        has_flattened_params = any([name])
+        if request is not None and has_flattened_params:
             raise ValueError(
                 "If the `request` argument is set, then none of "
                 "the individual field arguments should be set."
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py
index bcba1b9..f91a9eb 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py
@@ -16,23 +16,26 @@
 #
 
 from collections import OrderedDict
+from distutils import util
 import os
 import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
 import pkg_resources
 
-import google.api_core.client_options as ClientOptions  # type: ignore
+from google.api_core import client_options as client_options_lib  # type: ignore
 from google.api_core import exceptions  # type: ignore
 from google.api_core import gapic_v1  # type: ignore
 from google.api_core import retry as retries  # type: ignore
 from google.auth import credentials  # type: ignore
 from google.auth.transport import mtls  # type: ignore
+from google.auth.transport.grpc import SslCredentials  # type: ignore
 from google.auth.exceptions import MutualTLSChannelError  # type: ignore
 from google.oauth2 import service_account  # type: ignore
 
 from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers
 from google.cloud.video.transcoder_v1beta1.types import resources
 from google.cloud.video.transcoder_v1beta1.types import services
+from google.protobuf import timestamp_pb2 as timestamp  # type: ignore
 
 from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO
 from .transports.grpc import TranscoderServiceGrpcTransport
@@ -138,6 +141,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
 
     from_service_account_json = from_service_account_file
 
+    @property
+    def transport(self) -> TranscoderServiceTransport:
+        """Return the transport used by the client instance.
+
+        Returns:
+            TranscoderServiceTransport: The transport used by the client instance.
+        """
+        return self._transport
+
     @staticmethod
     def job_path(project: str, location: str, job: str,) -> str:
         """Return a fully-qualified job string."""
@@ -170,12 +182,71 @@ def parse_job_template_path(path: str) -> Dict[str, str]:
         )
         return m.groupdict() if m else {}
 
+    @staticmethod
+    def common_billing_account_path(billing_account: str,) -> str:
+        """Return a fully-qualified billing_account string."""
+        return "billingAccounts/{billing_account}".format(
+            billing_account=billing_account,
+        )
+
+    @staticmethod
+    def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+        """Parse a billing_account path into its component segments."""
+        m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_folder_path(folder: str,) -> str:
+        """Return a fully-qualified folder string."""
+        return "folders/{folder}".format(folder=folder,)
+
+    @staticmethod
+    def parse_common_folder_path(path: str) -> Dict[str, str]:
+        """Parse a folder path into its component segments."""
+        m = re.match(r"^folders/(?P<folder>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_organization_path(organization: str,) -> str:
+        """Return a fully-qualified organization string."""
+        return "organizations/{organization}".format(organization=organization,)
+
+    @staticmethod
+    def parse_common_organization_path(path: str) -> Dict[str, str]:
+        """Parse a organization path into its component segments."""
+        m = re.match(r"^organizations/(?P<organization>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_project_path(project: str,) -> str:
+        """Return a fully-qualified project string."""
+        return "projects/{project}".format(project=project,)
+
+    @staticmethod
+    def parse_common_project_path(path: str) -> Dict[str, str]:
+        """Parse a project path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)$", path)
+        return m.groupdict() if m else {}
+
+    @staticmethod
+    def common_location_path(project: str, location: str,) -> str:
+        """Return a fully-qualified location string."""
+        return "projects/{project}/locations/{location}".format(
+            project=project, location=location,
+        )
+
+    @staticmethod
+    def parse_common_location_path(path: str) -> Dict[str, str]:
+        """Parse a location path into its component segments."""
+        m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
+        return m.groupdict() if m else {}
+
     def __init__(
         self,
         *,
-        credentials: credentials.Credentials = None,
-        transport: Union[str, TranscoderServiceTransport] = None,
-        client_options: ClientOptions = None,
+        credentials: Optional[credentials.Credentials] = None,
+        transport: Union[str, TranscoderServiceTransport, None] = None,
+        client_options: Optional[client_options_lib.ClientOptions] = None,
         client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
     ) -> None:
         """Instantiate the transcoder service client.
@@ -189,23 +260,26 @@ def __init__(
             transport (Union[str, ~.TranscoderServiceTransport]): The
                 transport to use. If set to None, a transport is chosen
                 automatically.
-            client_options (ClientOptions): Custom options for the client. It
-                won't take effect if a ``transport`` instance is provided.
+            client_options (client_options_lib.ClientOptions): Custom options for the
+                client. It won't take effect if a ``transport`` instance is provided.
                 (1) The ``api_endpoint`` property can be used to override the
-                default endpoint provided by the client. GOOGLE_API_USE_MTLS
+                default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
                 environment variable can also be used to override the endpoint:
                 "always" (always use the default mTLS endpoint), "never" (always
-                use the default regular endpoint, this is the default value for
-                the environment variable) and "auto" (auto switch to the default
-                mTLS endpoint if client SSL credentials is present). However,
-                the ``api_endpoint`` property takes precedence if provided.
-                (2) The ``client_cert_source`` property is used to provide client
-                SSL credentials for mutual TLS transport. If not provided, the
-                default SSL credentials will be used if present.
-            client_info (google.api_core.gapic_v1.client_info.ClientInfo):	
-                The client info used to send a user-agent string along with	
-                API requests. If ``None``, then default info will be used.	
-                Generally, you only need to set this if you're developing	
+                use the default regular endpoint) and "auto" (auto switch to the
+                default mTLS endpoint if client certificate is present, this is
+                the default value). However, the ``api_endpoint`` property takes
+                precedence if provided.
+                (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+                is "true", then the ``client_cert_source`` property can be used
+                to provide client certificate for mutual TLS transport. If
+                not provided, the default SSL client certificate will be used if
+                present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+                set, no client certificate will be used.
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
                 your own client library.
 
         Raises:
@@ -213,29 +287,47 @@ def __init__(
                 creation failed for any reason.
         """
         if isinstance(client_options, dict):
-            client_options = ClientOptions.from_dict(client_options)
+            client_options = client_options_lib.from_dict(client_options)
         if client_options is None:
-            client_options = ClientOptions.ClientOptions()
+            client_options = client_options_lib.ClientOptions()
+
+        # Create SSL credentials for mutual TLS if needed.
+        use_client_cert = bool(
+            util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+        )
 
-        if client_options.api_endpoint is None:
-            use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+        ssl_credentials = None
+        is_mtls = False
+        if use_client_cert:
+            if client_options.client_cert_source:
+                import grpc  # type: ignore
+
+                cert, key = client_options.client_cert_source()
+                ssl_credentials = grpc.ssl_channel_credentials(
+                    certificate_chain=cert, private_key=key
+                )
+                is_mtls = True
+            else:
+                creds = SslCredentials()
+                is_mtls = creds.is_mtls
+                ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+        # Figure out which api endpoint to use.
+        if client_options.api_endpoint is not None:
+            api_endpoint = client_options.api_endpoint
+        else:
+            use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
             if use_mtls_env == "never":
-                client_options.api_endpoint = self.DEFAULT_ENDPOINT
+                api_endpoint = self.DEFAULT_ENDPOINT
             elif use_mtls_env == "always":
-                client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+                api_endpoint = self.DEFAULT_MTLS_ENDPOINT
             elif use_mtls_env == "auto":
-                has_client_cert_source = (
-                    client_options.client_cert_source is not None
-                    or mtls.has_default_client_cert_source()
-                )
-                client_options.api_endpoint = (
-                    self.DEFAULT_MTLS_ENDPOINT
-                    if has_client_cert_source
-                    else self.DEFAULT_ENDPOINT
+                api_endpoint = (
+                    self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
                 )
             else:
                 raise MutualTLSChannelError(
-                    "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+                    "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
                 )
 
         # Save or instantiate the transport.
@@ -259,10 +351,9 @@ def __init__(
             self._transport = Transport(
                 credentials=credentials,
                 credentials_file=client_options.credentials_file,
-                host=client_options.api_endpoint,
+                host=api_endpoint,
                 scopes=client_options.scopes,
-                api_mtls_endpoint=client_options.api_endpoint,
-                client_cert_source=client_options.client_cert_source,
+                ssl_channel_credentials=ssl_credentials,
                 quota_project_id=client_options.quota_project_id,
                 client_info=client_info,
             )
@@ -602,7 +693,8 @@ def create_job_template(
                 resource name.
 
                 This value should be 4-63 characters, and valid
-                characters are ``/[a-zA-Z0-9_-_]/``.
+                characters must match the regular expression
+                ``[a-zA-Z][a-zA-Z0-9_-]*``.
                 This corresponds to the ``job_template_id`` field
                 on the ``request`` instance; if ``request`` is provided, this
                 should not be set.
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
index f088718..dbb7824 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
@@ -19,7 +19,7 @@
 import typing
 import pkg_resources
 
-from google import auth
+from google import auth  # type: ignore
 from google.api_core import exceptions  # type: ignore
 from google.api_core import gapic_v1  # type: ignore
 from google.api_core import retry as retries  # type: ignore
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
index 1ba71d9..41c7569 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
@@ -15,6 +15,7 @@
 # limitations under the License.
 #
 
+import warnings
 from typing import Callable, Dict, Optional, Sequence, Tuple
 
 from google.api_core import grpc_helpers  # type: ignore
@@ -23,7 +24,6 @@
 from google.auth import credentials  # type: ignore
 from google.auth.transport.grpc import SslCredentials  # type: ignore
 
-
 import grpc  # type: ignore
 
 from google.cloud.video.transcoder_v1beta1.types import resources
@@ -64,6 +64,7 @@ def __init__(
         channel: grpc.Channel = None,
         api_mtls_endpoint: str = None,
         client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+        ssl_channel_credentials: grpc.ChannelCredentials = None,
         quota_project_id: Optional[str] = None,
         client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
     ) -> None:
@@ -84,20 +85,22 @@ def __init__(
                 ignored if ``channel`` is provided.
             channel (Optional[grpc.Channel]): A ``Channel`` instance through
                 which to make calls.
-            api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
-                provided, it overrides the ``host`` argument and tries to create
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
                 a mutual TLS channel with client SSL credentials from
                 ``client_cert_source`` or applicatin default SSL credentials.
-            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
-                callback to provide client SSL certificate bytes and private key
-                bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
-                is None.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for grpc channel. It is ignored if ``channel`` is provided.
             quota_project_id (Optional[str]): An optional project to use for billing
                 and quota.
-            client_info (google.api_core.gapic_v1.client_info.ClientInfo):	
-                The client info used to send a user-agent string along with	
-                API requests. If ``None``, then default info will be used.	
-                Generally, you only need to set this if you're developing	
+            client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+                The client info used to send a user-agent string along with
+                API requests. If ``None``, then default info will be used.
+                Generally, you only need to set this if you're developing
                 your own client library.
 
         Raises:
@@ -106,6 +109,8 @@ def __init__(
           google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
               and ``credentials_file`` are passed.
         """
+        self._ssl_channel_credentials = ssl_channel_credentials
+
         if channel:
             # Sanity check: Ensure that channel and credentials are not both
             # provided.
@@ -113,7 +118,13 @@ def __init__(
 
             # If a channel was explicitly provided, set it.
             self._grpc_channel = channel
+            self._ssl_channel_credentials = None
         elif api_mtls_endpoint:
+            warnings.warn(
+                "api_mtls_endpoint and client_cert_source are deprecated",
+                DeprecationWarning,
+            )
+
             host = (
                 api_mtls_endpoint
                 if ":" in api_mtls_endpoint
@@ -144,6 +155,24 @@ def __init__(
                 scopes=scopes or self.AUTH_SCOPES,
                 quota_project_id=quota_project_id,
             )
+            self._ssl_channel_credentials = ssl_credentials
+        else:
+            host = host if ":" in host else host + ":443"
+
+            if credentials is None:
+                credentials, _ = auth.default(
+                    scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+                )
+
+            # create a new channel. The provided one is ignored.
+            self._grpc_channel = type(self).create_channel(
+                host,
+                credentials=credentials,
+                credentials_file=credentials_file,
+                ssl_credentials=ssl_channel_credentials,
+                scopes=scopes or self.AUTH_SCOPES,
+                quota_project_id=quota_project_id,
+            )
 
         self._stubs = {}  # type: Dict[str, Callable]
 
@@ -204,19 +233,8 @@ def create_channel(
 
     @property
     def grpc_channel(self) -> grpc.Channel:
-        """Create the channel designed to connect to this service.
-
-        This property caches on the instance; repeated calls return
-        the same channel.
+        """Return the channel designed to connect to this service.
         """
-        # Sanity check: Only create a new channel if we do not already
-        # have one.
-        if not hasattr(self, "_grpc_channel"):
-            self._grpc_channel = self.create_channel(
-                self._host, credentials=self._credentials,
-            )
-
-        # Return the channel from cache.
         return self._grpc_channel
 
     @property
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
index eb97bc0..23a7485 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
@@ -15,10 +15,12 @@
 # limitations under the License.
 #
 
+import warnings
 from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
 
 from google.api_core import gapic_v1  # type: ignore
 from google.api_core import grpc_helpers_async  # type: ignore
+from google import auth  # type: ignore
 from google.auth import credentials  # type: ignore
 from google.auth.transport.grpc import SslCredentials  # type: ignore
 
@@ -106,6 +108,7 @@ def __init__(
         channel: aio.Channel = None,
         api_mtls_endpoint: str = None,
         client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+        ssl_channel_credentials: grpc.ChannelCredentials = None,
         quota_project_id=None,
         client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
     ) -> None:
@@ -127,14 +130,16 @@ def __init__(
                 are passed to :func:`google.auth.default`.
             channel (Optional[aio.Channel]): A ``Channel`` instance through
                 which to make calls.
-            api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
-                provided, it overrides the ``host`` argument and tries to create
+            api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+                If provided, it overrides the ``host`` argument and tries to create
                 a mutual TLS channel with client SSL credentials from
                 ``client_cert_source`` or applicatin default SSL credentials.
-            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
-                callback to provide client SSL certificate bytes and private key
-                bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
-                is None.
+            client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+                Deprecated. A callback to provide client SSL certificate bytes and
+                private key bytes, both in PEM format. It is ignored if
+                ``api_mtls_endpoint`` is None.
+            ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+                for grpc channel. It is ignored if ``channel`` is provided.
             quota_project_id (Optional[str]): An optional project to use for billing
                 and quota.
             client_info (google.api_core.gapic_v1.client_info.ClientInfo):	
@@ -149,6 +154,8 @@ def __init__(
           google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
               and ``credentials_file`` are passed.
         """
+        self._ssl_channel_credentials = ssl_channel_credentials
+
         if channel:
             # Sanity check: Ensure that channel and credentials are not both
             # provided.
@@ -156,13 +163,24 @@ def __init__(
 
             # If a channel was explicitly provided, set it.
             self._grpc_channel = channel
+            self._ssl_channel_credentials = None
         elif api_mtls_endpoint:
+            warnings.warn(
+                "api_mtls_endpoint and client_cert_source are deprecated",
+                DeprecationWarning,
+            )
+
             host = (
                 api_mtls_endpoint
                 if ":" in api_mtls_endpoint
                 else api_mtls_endpoint + ":443"
             )
 
+            if credentials is None:
+                credentials, _ = auth.default(
+                    scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+                )
+
             # Create SSL credentials with client_cert_source or application
             # default SSL credentials.
             if client_cert_source:
@@ -182,6 +200,24 @@ def __init__(
                 scopes=scopes or self.AUTH_SCOPES,
                 quota_project_id=quota_project_id,
             )
+            self._ssl_channel_credentials = ssl_credentials
+        else:
+            host = host if ":" in host else host + ":443"
+
+            if credentials is None:
+                credentials, _ = auth.default(
+                    scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+                )
+
+            # create a new channel. The provided one is ignored.
+            self._grpc_channel = type(self).create_channel(
+                host,
+                credentials=credentials,
+                credentials_file=credentials_file,
+                ssl_credentials=ssl_channel_credentials,
+                scopes=scopes or self.AUTH_SCOPES,
+                quota_project_id=quota_project_id,
+            )
 
         # Run the base constructor.
         super().__init__(
@@ -202,13 +238,6 @@ def grpc_channel(self) -> aio.Channel:
         This property caches on the instance; repeated calls return
         the same channel.
         """
-        # Sanity check: Only create a new channel if we do not already
-        # have one.
-        if not hasattr(self, "_grpc_channel"):
-            self._grpc_channel = self.create_channel(
-                self._host, credentials=self._credentials,
-            )
-
         # Return the channel from cache.
         return self._grpc_channel
 
diff --git a/google/cloud/video/transcoder_v1beta1/types/resources.py b/google/cloud/video/transcoder_v1beta1/types/resources.py
index 0fd3180..886df3b 100644
--- a/google/cloud/video/transcoder_v1beta1/types/resources.py
+++ b/google/cloud/video/transcoder_v1beta1/types/resources.py
@@ -19,6 +19,7 @@
 
 
 from google.protobuf import duration_pb2 as duration  # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp  # type: ignore
 
 
 __protobuf__ = proto.module(
@@ -99,6 +100,14 @@ class Job(proto.Message):
             Output only. List of failure details. This property may
             contain additional information about the failure when
             ``failure_reason`` is present.
+        create_time (~.timestamp.Timestamp):
+            Output only. The time the job was created.
+        start_time (~.timestamp.Timestamp):
+            Output only. The time the transcoding
+            started.
+        end_time (~.timestamp.Timestamp):
+            Output only. The time the transcoding
+            finished.
     """
 
     class ProcessingState(proto.Enum):
@@ -152,6 +161,12 @@ class OriginUri(proto.Message):
         proto.MESSAGE, number=11, message="FailureDetail",
     )
 
+    create_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,)
+
+    start_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,)
+
+    end_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,)
+
 
 class JobTemplate(proto.Message):
     r"""Transcoding job template resource.
@@ -387,10 +402,10 @@ class Manifest(proto.Message):
 
     Attributes:
         file_name (str):
-            The name of the generated file. The default is ``"master"``
-            with the extension suffix corresponding to the
-            ``Manifest.type``.
-        type (~.resources.Manifest.ManifestType):
+            The name of the generated file. The default is
+            ``"manifest"`` with the extension suffix corresponding to
+            the ``Manifest.type``.
+        type_ (~.resources.Manifest.ManifestType):
             Required. Type of the manifest, can be "HLS"
             or "DASH".
         mux_streams (Sequence[str]):
@@ -410,7 +425,7 @@ class ManifestType(proto.Enum):
 
     file_name = proto.Field(proto.STRING, number=1)
 
-    type = proto.Field(proto.ENUM, number=2, enum=ManifestType,)
+    type_ = proto.Field(proto.ENUM, number=2, enum=ManifestType,)
 
     mux_streams = proto.RepeatedField(proto.STRING, number=3)
 
@@ -432,7 +447,7 @@ class SpriteSheet(proto.Message):
     r"""Sprite sheet configuration.
 
     Attributes:
-        format (str):
+        format_ (str):
             Format type. The default is ``"jpeg"``.
 
             Supported formats:
@@ -476,7 +491,7 @@ class SpriteSheet(proto.Message):
             Specify the interval value in seconds.
     """
 
-    format = proto.Field(proto.STRING, number=1)
+    format_ = proto.Field(proto.STRING, number=1)
 
     file_prefix = proto.Field(proto.STRING, number=2)
 
@@ -811,11 +826,17 @@ class VideoStream(proto.Message):
             Enforce specified codec preset. The default is
             ``"veryfast"``.
         height_pixels (int):
-            Required. The height of video in pixels. Must
-            be an even integer.
+            The height of the video in pixels. Must be an
+            even integer. When not specified, the height is
+            adjusted to match the specified width and input
+            aspect ratio. If both are omitted, the input
+            height is used.
         width_pixels (int):
-            Required. The width of video in pixels. Must
-            be an even integer.
+            The width of the video in pixels. Must be an
+            even integer. When not specified, the width is
+            adjusted to match the specified height and input
+            aspect ratio. If both are omitted, the input
+            width is used.
         pixel_format (str):
             Pixel format to use. The default is ``"yuv420p"``.
 
@@ -880,10 +901,34 @@ class VideoStream(proto.Message):
             equal to zero. Must be less than
             ``VideoStream.gop_frame_count`` if set. The default is 0.
         frame_rate (float):
-            Required. The video frame rate in frames per
-            second. Must be less than or equal to 120. Will
-            default to the input frame rate if larger than
-            the input frame rate.
+            Required. The target video frame rate in frames per second
+            (FPS). Must be less than or equal to 120. Will default to
+            the input frame rate if larger than the input frame rate.
+            The API will generate an output FPS that is divisible by the
+            input FPS, and smaller or equal to the target FPS.
+
+            The following table shows the computed video FPS given the
+            target FPS (in parenthesis) and input FPS (in the first
+            column):
+
+            ::
+
+               |        | (30)   | (60)   | (25) | (50) |
+               |--------|--------|--------|------|------|
+               | 240    | Fail   | Fail   | Fail | Fail |
+               | 120    | 30     | 60     | 20   | 30   |
+               | 100    | 25     | 50     | 20   | 30   |
+               | 50     | 25     | 50     | 20   | 30   |
+               | 60     | 30     | 60     | 20   | 30   |
+               | 59.94  | 29.97  | 59.94  | 20   | 30   |
+               | 48     | 24     | 48     | 20   | 30   |
+               | 30     | 30     | 30     | 20   | 30   |
+               | 25     | 25     | 25     | 20   | 30   |
+               | 24     | 24     | 24     | 20   | 30   |
+               | 23.976 | 23.976 | 23.976 | 20   | 30   |
+               | 15     | 15     | 15     | 20   | 30   |
+               | 12     | 12     | 12     | 20   | 30   |
+               | 10     | 10     | 10     | 20   | 30   |
         aq_strength (float):
             Specify the intensity of the adaptive
             quantizer (AQ). Must be between 0 and 1, where 0
diff --git a/google/cloud/video/transcoder_v1beta1/types/services.py b/google/cloud/video/transcoder_v1beta1/types/services.py
index 5de258d..7e27235 100644
--- a/google/cloud/video/transcoder_v1beta1/types/services.py
+++ b/google/cloud/video/transcoder_v1beta1/types/services.py
@@ -136,7 +136,8 @@ class CreateJobTemplateRequest(proto.Message):
             name.
 
             This value should be 4-63 characters, and valid characters
-            are ``/[a-zA-Z0-9_-_]/``.
+            must match the regular expression
+            ``[a-zA-Z][a-zA-Z0-9_-]*``.
     """
 
     parent = proto.Field(proto.STRING, number=1)
diff --git a/noxfile.py b/noxfile.py
index dfdc268..153daa5 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -72,7 +72,9 @@ def default(session):
     # Install all test dependencies, then install this package in-place.
     session.install("asyncmock", "pytest-asyncio")
 
-    session.install("mock", "pytest", "pytest-cov")
+    session.install(
+        "mock", "pytest", "pytest-cov",
+    )
     session.install("-e", ".")
 
     # Run py.test against the unit tests.
@@ -173,7 +175,9 @@ def docfx(session):
     """Build the docfx yaml files for this library."""
 
     session.install("-e", ".")
-    session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+    # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+    # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+    session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
 
     shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
     session.run(
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index ff599eb..21f6d2a 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
 # Work from the project root.
 cd $ROOT
 
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+       [[ -f "testing/service-account.json" ]] || \
+       [[ -f "testing/client-secrets.json" ]]; then
+    echo "One or more target files exist, aborting."
+    exit 1
+fi
+
 # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
 PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
 
 gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+       --project="${PROJECT_ID}" \
        > testing/test-env.sh
 gcloud secrets versions access latest \
        --secret="python-docs-samples-service-account" \
+       --project="${PROJECT_ID}" \
        > testing/service-account.json
 gcloud secrets versions access latest \
        --secret="python-docs-samples-client-secrets" \
-       > testing/client-secrets.json
\ No newline at end of file
+       --project="${PROJECT_ID}" \
+       > testing/client-secrets.json
diff --git a/setup.py b/setup.py
index 1599096..0318706 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
 import os
 import setuptools  # type: ignore
 
-version = "0.1.0"
+version = "0.2.0"
 
 package_root = os.path.abspath(os.path.dirname(__file__))
 
diff --git a/synth.metadata b/synth.metadata
index 45009e7..76cbf07 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,30 +3,22 @@
     {
       "git": {
         "name": ".",
-        "remote": "sso://devrel/cloud/libraries/python/python-video-transcoder",
-        "sha": "d83d277c4513cf6c1caaf56ca3a6564cd5910c86"
-      }
-    },
-    {
-      "git": {
-        "name": "googleapis",
-        "remote": "https://github.com/googleapis/googleapis.git",
-        "sha": "72eb54c45231d84266ca059473bc1793c394fcb2",
-        "internalRef": "328059685"
+        "remote": "git@github.com:googleapis/python-video-transcoder",
+        "sha": "5a006f4972af69e2244ee71617d75c79212c9b97"
       }
     },
     {
       "git": {
         "name": "synthtool",
         "remote": "https://github.com/googleapis/synthtool.git",
-        "sha": "05de3e1e14a0b07eab8b474e669164dbd31f81fb"
+        "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
       }
     },
     {
       "git": {
         "name": "synthtool",
         "remote": "https://github.com/googleapis/synthtool.git",
-        "sha": "05de3e1e14a0b07eab8b474e669164dbd31f81fb"
+        "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
       }
     }
   ],
diff --git a/synth.py b/synth.py
index 76c55bf..ffa41ff 100644
--- a/synth.py
+++ b/synth.py
@@ -25,21 +25,23 @@
 # ----------------------------------------------------------------------------
 # Generate transcoder GAPIC layer
 # ----------------------------------------------------------------------------
-library = gapic.py_library(
-    service="transcoder",
-    version="v1beta1",
-    bazel_target="//google/cloud/video/transcoder/v1beta1:video-transcoder-v1beta1-py",
-)
-
-s.move(
-    library,
-    excludes=[
-        "setup.py",
-        "docs/index.rst",
-        "noxfile.py",
-        "scripts/fixup_transcoder_v1beta1_keywords.py",
-    ],
-)
+versions = ["v1beta1"]
+for version in versions:
+    library = gapic.py_library(
+        service="transcoder",
+        version=version,
+        bazel_target=f"//google/cloud/video/transcoder/{version}:video-transcoder-{version}-py",
+    )
+
+    s.move(
+        library,
+        excludes=[
+            "setup.py",
+            "docs/index.rst",
+            "noxfile.py",
+            f"scripts/fixup_transcoder_{version}_keywords.py",
+        ],
+    )
 
 # ----------------------------------------------------------------------------
 # Add templated files
diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
index dc47558..33af7c6 100644
--- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
+++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
@@ -44,6 +44,7 @@
 from google.cloud.video.transcoder_v1beta1.types import services
 from google.oauth2 import service_account
 from google.protobuf import duration_pb2 as duration  # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp  # type: ignore
 
 
 def client_cert_source_callback():
@@ -101,12 +102,12 @@ def test_transcoder_service_client_from_service_account_file(client_class):
     ) as factory:
         factory.return_value = creds
         client = client_class.from_service_account_file("dummy/file/path.json")
-        assert client._transport._credentials == creds
+        assert client.transport._credentials == creds
 
         client = client_class.from_service_account_json("dummy/file/path.json")
-        assert client._transport._credentials == creds
+        assert client.transport._credentials == creds
 
-        assert client._transport._host == "transcoder.googleapis.com:443"
+        assert client.transport._host == "transcoder.googleapis.com:443"
 
 
 def test_transcoder_service_client_get_transport_class():
@@ -162,15 +163,14 @@ def test_transcoder_service_client_client_options(
             credentials_file=None,
             host="squid.clam.whelk",
             scopes=None,
-            api_mtls_endpoint="squid.clam.whelk",
-            client_cert_source=None,
+            ssl_channel_credentials=None,
             quota_project_id=None,
             client_info=transports.base.DEFAULT_CLIENT_INFO,
         )
 
-    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
     # "never".
-    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
         with mock.patch.object(transport_class, "__init__") as patched:
             patched.return_value = None
             client = client_class()
@@ -179,15 +179,14 @@ def test_transcoder_service_client_client_options(
                 credentials_file=None,
                 host=client.DEFAULT_ENDPOINT,
                 scopes=None,
-                api_mtls_endpoint=client.DEFAULT_ENDPOINT,
-                client_cert_source=None,
+                ssl_channel_credentials=None,
                 quota_project_id=None,
                 client_info=transports.base.DEFAULT_CLIENT_INFO,
             )
 
-    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
     # "always".
-    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
         with mock.patch.object(transport_class, "__init__") as patched:
             patched.return_value = None
             client = client_class()
@@ -196,95 +195,185 @@ def test_transcoder_service_client_client_options(
                 credentials_file=None,
                 host=client.DEFAULT_MTLS_ENDPOINT,
                 scopes=None,
-                api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
-                client_cert_source=None,
+                ssl_channel_credentials=None,
                 quota_project_id=None,
                 client_info=transports.base.DEFAULT_CLIENT_INFO,
             )
 
-    # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
-    # "auto", and client_cert_source is provided.
-    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+    # unsupported value.
+    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+        with pytest.raises(MutualTLSChannelError):
+            client = client_class()
+
+    # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+    ):
+        with pytest.raises(ValueError):
+            client = client_class()
+
+    # Check the case quota_project_id is provided
+    options = client_options.ClientOptions(quota_project_id="octopus")
+    with mock.patch.object(transport_class, "__init__") as patched:
+        patched.return_value = None
+        client = client_class(client_options=options)
+        patched.assert_called_once_with(
+            credentials=None,
+            credentials_file=None,
+            host=client.DEFAULT_ENDPOINT,
+            scopes=None,
+            ssl_channel_credentials=None,
+            quota_project_id="octopus",
+            client_info=transports.base.DEFAULT_CLIENT_INFO,
+        )
+
+
+@pytest.mark.parametrize(
+    "client_class,transport_class,transport_name,use_client_cert_env",
+    [
+        (
+            TranscoderServiceClient,
+            transports.TranscoderServiceGrpcTransport,
+            "grpc",
+            "true",
+        ),
+        (
+            TranscoderServiceAsyncClient,
+            transports.TranscoderServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+            "true",
+        ),
+        (
+            TranscoderServiceClient,
+            transports.TranscoderServiceGrpcTransport,
+            "grpc",
+            "false",
+        ),
+        (
+            TranscoderServiceAsyncClient,
+            transports.TranscoderServiceGrpcAsyncIOTransport,
+            "grpc_asyncio",
+            "false",
+        ),
+    ],
+)
+@mock.patch.object(
+    TranscoderServiceClient,
+    "DEFAULT_ENDPOINT",
+    modify_default_endpoint(TranscoderServiceClient),
+)
+@mock.patch.object(
+    TranscoderServiceAsyncClient,
+    "DEFAULT_ENDPOINT",
+    modify_default_endpoint(TranscoderServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_transcoder_service_client_mtls_env_auto(
+    client_class, transport_class, transport_name, use_client_cert_env
+):
+    # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+    # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+    # Check the case client_cert_source is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+    ):
         options = client_options.ClientOptions(
             client_cert_source=client_cert_source_callback
         )
         with mock.patch.object(transport_class, "__init__") as patched:
-            patched.return_value = None
-            client = client_class(client_options=options)
-            patched.assert_called_once_with(
-                credentials=None,
-                credentials_file=None,
-                host=client.DEFAULT_MTLS_ENDPOINT,
-                scopes=None,
-                api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
-                client_cert_source=client_cert_source_callback,
-                quota_project_id=None,
-                client_info=transports.base.DEFAULT_CLIENT_INFO,
-            )
-
-    # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
-    # "auto", and default_client_cert_source is provided.
-    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
-        with mock.patch.object(transport_class, "__init__") as patched:
+            ssl_channel_creds = mock.Mock()
             with mock.patch(
-                "google.auth.transport.mtls.has_default_client_cert_source",
-                return_value=True,
+                "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
             ):
                 patched.return_value = None
-                client = client_class()
+                client = client_class(client_options=options)
+
+                if use_client_cert_env == "false":
+                    expected_ssl_channel_creds = None
+                    expected_host = client.DEFAULT_ENDPOINT
+                else:
+                    expected_ssl_channel_creds = ssl_channel_creds
+                    expected_host = client.DEFAULT_MTLS_ENDPOINT
+
                 patched.assert_called_once_with(
                     credentials=None,
                     credentials_file=None,
-                    host=client.DEFAULT_MTLS_ENDPOINT,
+                    host=expected_host,
                     scopes=None,
-                    api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
-                    client_cert_source=None,
+                    ssl_channel_credentials=expected_ssl_channel_creds,
                     quota_project_id=None,
                     client_info=transports.base.DEFAULT_CLIENT_INFO,
                 )
 
-    # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
-    # "auto", but client_cert_source and default_client_cert_source are None.
-    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+    # Check the case ADC client cert is provided. Whether client cert is used depends on
+    # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+    ):
         with mock.patch.object(transport_class, "__init__") as patched:
             with mock.patch(
-                "google.auth.transport.mtls.has_default_client_cert_source",
-                return_value=False,
+                "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
             ):
-                patched.return_value = None
-                client = client_class()
-                patched.assert_called_once_with(
-                    credentials=None,
-                    credentials_file=None,
-                    host=client.DEFAULT_ENDPOINT,
-                    scopes=None,
-                    api_mtls_endpoint=client.DEFAULT_ENDPOINT,
-                    client_cert_source=None,
-                    quota_project_id=None,
-                    client_info=transports.base.DEFAULT_CLIENT_INFO,
-                )
-
-    # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
-    # unsupported value.
-    with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
-        with pytest.raises(MutualTLSChannelError):
-            client = client_class()
-
-    # Check the case quota_project_id is provided
-    options = client_options.ClientOptions(quota_project_id="octopus")
-    with mock.patch.object(transport_class, "__init__") as patched:
-        patched.return_value = None
-        client = client_class(client_options=options)
-        patched.assert_called_once_with(
-            credentials=None,
-            credentials_file=None,
-            host=client.DEFAULT_ENDPOINT,
-            scopes=None,
-            api_mtls_endpoint=client.DEFAULT_ENDPOINT,
-            client_cert_source=None,
-            quota_project_id="octopus",
-            client_info=transports.base.DEFAULT_CLIENT_INFO,
-        )
+                with mock.patch(
+                    "google.auth.transport.grpc.SslCredentials.is_mtls",
+                    new_callable=mock.PropertyMock,
+                ) as is_mtls_mock:
+                    with mock.patch(
+                        "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+                        new_callable=mock.PropertyMock,
+                    ) as ssl_credentials_mock:
+                        if use_client_cert_env == "false":
+                            is_mtls_mock.return_value = False
+                            ssl_credentials_mock.return_value = None
+                            expected_host = client.DEFAULT_ENDPOINT
+                            expected_ssl_channel_creds = None
+                        else:
+                            is_mtls_mock.return_value = True
+                            ssl_credentials_mock.return_value = mock.Mock()
+                            expected_host = client.DEFAULT_MTLS_ENDPOINT
+                            expected_ssl_channel_creds = (
+                                ssl_credentials_mock.return_value
+                            )
+
+                        patched.return_value = None
+                        client = client_class()
+                        patched.assert_called_once_with(
+                            credentials=None,
+                            credentials_file=None,
+                            host=expected_host,
+                            scopes=None,
+                            ssl_channel_credentials=expected_ssl_channel_creds,
+                            quota_project_id=None,
+                            client_info=transports.base.DEFAULT_CLIENT_INFO,
+                        )
+
+    # Check the case client_cert_source and ADC client cert are not provided.
+    with mock.patch.dict(
+        os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+    ):
+        with mock.patch.object(transport_class, "__init__") as patched:
+            with mock.patch(
+                "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+            ):
+                with mock.patch(
+                    "google.auth.transport.grpc.SslCredentials.is_mtls",
+                    new_callable=mock.PropertyMock,
+                ) as is_mtls_mock:
+                    is_mtls_mock.return_value = False
+                    patched.return_value = None
+                    client = client_class()
+                    patched.assert_called_once_with(
+                        credentials=None,
+                        credentials_file=None,
+                        host=client.DEFAULT_ENDPOINT,
+                        scopes=None,
+                        ssl_channel_credentials=None,
+                        quota_project_id=None,
+                        client_info=transports.base.DEFAULT_CLIENT_INFO,
+                    )
 
 
 @pytest.mark.parametrize(
@@ -311,8 +400,7 @@ def test_transcoder_service_client_client_options_scopes(
             credentials_file=None,
             host=client.DEFAULT_ENDPOINT,
             scopes=["1", "2"],
-            api_mtls_endpoint=client.DEFAULT_ENDPOINT,
-            client_cert_source=None,
+            ssl_channel_credentials=None,
             quota_project_id=None,
             client_info=transports.base.DEFAULT_CLIENT_INFO,
         )
@@ -342,8 +430,7 @@ def test_transcoder_service_client_client_options_credentials_file(
             credentials_file="credentials.json",
             host=client.DEFAULT_ENDPOINT,
             scopes=None,
-            api_mtls_endpoint=client.DEFAULT_ENDPOINT,
-            client_cert_source=None,
+            ssl_channel_credentials=None,
             quota_project_id=None,
             client_info=transports.base.DEFAULT_CLIENT_INFO,
         )
@@ -362,8 +449,7 @@ def test_transcoder_service_client_client_options_from_dict():
             credentials_file=None,
             host="squid.clam.whelk",
             scopes=None,
-            api_mtls_endpoint="squid.clam.whelk",
-            client_cert_source=None,
+            ssl_channel_credentials=None,
             quota_project_id=None,
             client_info=transports.base.DEFAULT_CLIENT_INFO,
         )
@@ -379,7 +465,7 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ
     request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.create_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.create_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.Job(
             name="name_value",
@@ -400,6 +486,7 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ
         assert args[0] == services.CreateJobRequest()
 
     # Establish that the response is the type that we expect.
+
     assert isinstance(response, resources.Job)
 
     assert response.name == "name_value"
@@ -420,19 +507,19 @@ def test_create_job_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_create_job_async(transport: str = "grpc_asyncio"):
+async def test_create_job_async(
+    transport: str = "grpc_asyncio", request_type=services.CreateJobRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.CreateJobRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.create_job), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.create_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             resources.Job(
@@ -451,7 +538,7 @@ async def test_create_job_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.CreateJobRequest()
 
     # Establish that the response is the type that we expect.
     assert isinstance(response, resources.Job)
@@ -469,6 +556,11 @@ async def test_create_job_async(transport: str = "grpc_asyncio"):
     assert response.failure_reason == "failure_reason_value"
 
 
+@pytest.mark.asyncio
+async def test_create_job_async_from_dict():
+    await test_create_job_async(request_type=dict)
+
+
 def test_create_job_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -478,7 +570,7 @@ def test_create_job_field_headers():
     request.parent = "parent/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.create_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.create_job), "__call__") as call:
         call.return_value = resources.Job()
 
         client.create_job(request)
@@ -505,9 +597,7 @@ async def test_create_job_field_headers_async():
     request.parent = "parent/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.create_job), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.create_job), "__call__") as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
 
         await client.create_job(request)
@@ -526,7 +616,7 @@ def test_create_job_flattened():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.create_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.create_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.Job()
 
@@ -566,9 +656,7 @@ async def test_create_job_flattened_async():
     )
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.create_job), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.create_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.Job()
 
@@ -615,7 +703,7 @@ def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsReques
     request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = services.ListJobsResponse(
             next_page_token="next_page_token_value",
@@ -630,6 +718,7 @@ def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsReques
         assert args[0] == services.ListJobsRequest()
 
     # Establish that the response is the type that we expect.
+
     assert isinstance(response, pagers.ListJobsPager)
 
     assert response.next_page_token == "next_page_token_value"
@@ -640,19 +729,19 @@ def test_list_jobs_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_list_jobs_async(transport: str = "grpc_asyncio"):
+async def test_list_jobs_async(
+    transport: str = "grpc_asyncio", request_type=services.ListJobsRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.ListJobsRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.list_jobs), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             services.ListJobsResponse(next_page_token="next_page_token_value",)
@@ -664,7 +753,7 @@ async def test_list_jobs_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.ListJobsRequest()
 
     # Establish that the response is the type that we expect.
     assert isinstance(response, pagers.ListJobsAsyncPager)
@@ -672,6 +761,11 @@ async def test_list_jobs_async(transport: str = "grpc_asyncio"):
     assert response.next_page_token == "next_page_token_value"
 
 
+@pytest.mark.asyncio
+async def test_list_jobs_async_from_dict():
+    await test_list_jobs_async(request_type=dict)
+
+
 def test_list_jobs_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -681,7 +775,7 @@ def test_list_jobs_field_headers():
     request.parent = "parent/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         call.return_value = services.ListJobsResponse()
 
         client.list_jobs(request)
@@ -708,9 +802,7 @@ async def test_list_jobs_field_headers_async():
     request.parent = "parent/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.list_jobs), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             services.ListJobsResponse()
         )
@@ -731,7 +823,7 @@ def test_list_jobs_flattened():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = services.ListJobsResponse()
 
@@ -765,9 +857,7 @@ async def test_list_jobs_flattened_async():
     )
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.list_jobs), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = services.ListJobsResponse()
 
@@ -804,7 +894,7 @@ def test_list_jobs_pager():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials,)
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         # Set the response to a series of pages.
         call.side_effect = (
             services.ListJobsResponse(
@@ -834,7 +924,7 @@ def test_list_jobs_pages():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials,)
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+    with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
         # Set the response to a series of pages.
         call.side_effect = (
             services.ListJobsResponse(
@@ -847,8 +937,8 @@ def test_list_jobs_pages():
             RuntimeError,
         )
         pages = list(client.list_jobs(request={}).pages)
-        for page, token in zip(pages, ["abc", "def", "ghi", ""]):
-            assert page.raw_page.next_page_token == token
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
 
 
 @pytest.mark.asyncio
@@ -857,9 +947,7 @@ async def test_list_jobs_async_pager():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.list_jobs),
-        "__call__",
-        new_callable=mock.AsyncMock,
+        type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock
     ) as call:
         # Set the response to a series of pages.
         call.side_effect = (
@@ -888,9 +976,7 @@ async def test_list_jobs_async_pages():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.list_jobs),
-        "__call__",
-        new_callable=mock.AsyncMock,
+        type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock
     ) as call:
         # Set the response to a series of pages.
         call.side_effect = (
@@ -904,10 +990,10 @@ async def test_list_jobs_async_pages():
             RuntimeError,
         )
         pages = []
-        async for page in (await client.list_jobs(request={})).pages:
-            pages.append(page)
-        for page, token in zip(pages, ["abc", "def", "ghi", ""]):
-            assert page.raw_page.next_page_token == token
+        async for page_ in (await client.list_jobs(request={})).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
 
 
 def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest):
@@ -920,7 +1006,7 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest):
     request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.get_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.get_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.Job(
             name="name_value",
@@ -941,6 +1027,7 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest):
         assert args[0] == services.GetJobRequest()
 
     # Establish that the response is the type that we expect.
+
     assert isinstance(response, resources.Job)
 
     assert response.name == "name_value"
@@ -961,17 +1048,19 @@ def test_get_job_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_get_job_async(transport: str = "grpc_asyncio"):
+async def test_get_job_async(
+    transport: str = "grpc_asyncio", request_type=services.GetJobRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.GetJobRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._client._transport.get_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.get_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             resources.Job(
@@ -990,7 +1079,7 @@ async def test_get_job_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.GetJobRequest()
 
     # Establish that the response is the type that we expect.
     assert isinstance(response, resources.Job)
@@ -1008,6 +1097,11 @@ async def test_get_job_async(transport: str = "grpc_asyncio"):
     assert response.failure_reason == "failure_reason_value"
 
 
+@pytest.mark.asyncio
+async def test_get_job_async_from_dict():
+    await test_get_job_async(request_type=dict)
+
+
 def test_get_job_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -1017,7 +1111,7 @@ def test_get_job_field_headers():
     request.name = "name/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.get_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.get_job), "__call__") as call:
         call.return_value = resources.Job()
 
         client.get_job(request)
@@ -1044,7 +1138,7 @@ async def test_get_job_field_headers_async():
     request.name = "name/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._client._transport.get_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.get_job), "__call__") as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
 
         await client.get_job(request)
@@ -1063,7 +1157,7 @@ def test_get_job_flattened():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.get_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.get_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.Job()
 
@@ -1097,7 +1191,7 @@ async def test_get_job_flattened_async():
     )
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._client._transport.get_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.get_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.Job()
 
@@ -1138,7 +1232,7 @@ def test_delete_job(transport: str = "grpc", request_type=services.DeleteJobRequ
     request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.delete_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = None
 
@@ -1159,19 +1253,19 @@ def test_delete_job_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_delete_job_async(transport: str = "grpc_asyncio"):
+async def test_delete_job_async(
+    transport: str = "grpc_asyncio", request_type=services.DeleteJobRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.DeleteJobRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.delete_job), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
 
@@ -1181,12 +1275,17 @@ async def test_delete_job_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.DeleteJobRequest()
 
     # Establish that the response is the type that we expect.
     assert response is None
 
 
+@pytest.mark.asyncio
+async def test_delete_job_async_from_dict():
+    await test_delete_job_async(request_type=dict)
+
+
 def test_delete_job_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -1196,7 +1295,7 @@ def test_delete_job_field_headers():
     request.name = "name/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.delete_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
         call.return_value = None
 
         client.delete_job(request)
@@ -1223,9 +1322,7 @@ async def test_delete_job_field_headers_async():
     request.name = "name/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.delete_job), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
 
         await client.delete_job(request)
@@ -1244,7 +1341,7 @@ def test_delete_job_flattened():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(type(client._transport.delete_job), "__call__") as call:
+    with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = None
 
@@ -1278,9 +1375,7 @@ async def test_delete_job_flattened_async():
     )
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.delete_job), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = None
 
@@ -1324,7 +1419,7 @@ def test_create_job_template(
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.create_job_template), "__call__"
+        type(client.transport.create_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.JobTemplate(name="name_value",)
@@ -1338,6 +1433,7 @@ def test_create_job_template(
         assert args[0] == services.CreateJobTemplateRequest()
 
     # Establish that the response is the type that we expect.
+
     assert isinstance(response, resources.JobTemplate)
 
     assert response.name == "name_value"
@@ -1348,18 +1444,20 @@ def test_create_job_template_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_create_job_template_async(transport: str = "grpc_asyncio"):
+async def test_create_job_template_async(
+    transport: str = "grpc_asyncio", request_type=services.CreateJobTemplateRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.CreateJobTemplateRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.create_job_template), "__call__"
+        type(client.transport.create_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1372,7 +1470,7 @@ async def test_create_job_template_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.CreateJobTemplateRequest()
 
     # Establish that the response is the type that we expect.
     assert isinstance(response, resources.JobTemplate)
@@ -1380,6 +1478,11 @@ async def test_create_job_template_async(transport: str = "grpc_asyncio"):
     assert response.name == "name_value"
 
 
+@pytest.mark.asyncio
+async def test_create_job_template_async_from_dict():
+    await test_create_job_template_async(request_type=dict)
+
+
 def test_create_job_template_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -1390,7 +1493,7 @@ def test_create_job_template_field_headers():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.create_job_template), "__call__"
+        type(client.transport.create_job_template), "__call__"
     ) as call:
         call.return_value = resources.JobTemplate()
 
@@ -1419,7 +1522,7 @@ async def test_create_job_template_field_headers_async():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.create_job_template), "__call__"
+        type(client.transport.create_job_template), "__call__"
     ) as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             resources.JobTemplate()
@@ -1442,7 +1545,7 @@ def test_create_job_template_flattened():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.create_job_template), "__call__"
+        type(client.transport.create_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.JobTemplate()
@@ -1489,7 +1592,7 @@ async def test_create_job_template_flattened_async():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.create_job_template), "__call__"
+        type(client.transport.create_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.JobTemplate()
@@ -1547,7 +1650,7 @@ def test_list_job_templates(
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = services.ListJobTemplatesResponse(
@@ -1563,6 +1666,7 @@ def test_list_job_templates(
         assert args[0] == services.ListJobTemplatesRequest()
 
     # Establish that the response is the type that we expect.
+
     assert isinstance(response, pagers.ListJobTemplatesPager)
 
     assert response.next_page_token == "next_page_token_value"
@@ -1573,18 +1677,20 @@ def test_list_job_templates_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_list_job_templates_async(transport: str = "grpc_asyncio"):
+async def test_list_job_templates_async(
+    transport: str = "grpc_asyncio", request_type=services.ListJobTemplatesRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.ListJobTemplatesRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1597,7 +1703,7 @@ async def test_list_job_templates_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.ListJobTemplatesRequest()
 
     # Establish that the response is the type that we expect.
     assert isinstance(response, pagers.ListJobTemplatesAsyncPager)
@@ -1605,6 +1711,11 @@ async def test_list_job_templates_async(transport: str = "grpc_asyncio"):
     assert response.next_page_token == "next_page_token_value"
 
 
+@pytest.mark.asyncio
+async def test_list_job_templates_async_from_dict():
+    await test_list_job_templates_async(request_type=dict)
+
+
 def test_list_job_templates_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -1615,7 +1726,7 @@ def test_list_job_templates_field_headers():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         call.return_value = services.ListJobTemplatesResponse()
 
@@ -1644,7 +1755,7 @@ async def test_list_job_templates_field_headers_async():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             services.ListJobTemplatesResponse()
@@ -1667,7 +1778,7 @@ def test_list_job_templates_flattened():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = services.ListJobTemplatesResponse()
@@ -1703,7 +1814,7 @@ async def test_list_job_templates_flattened_async():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = services.ListJobTemplatesResponse()
@@ -1742,7 +1853,7 @@ def test_list_job_templates_pager():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         # Set the response to a series of pages.
         call.side_effect = (
@@ -1782,7 +1893,7 @@ def test_list_job_templates_pages():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.list_job_templates), "__call__"
+        type(client.transport.list_job_templates), "__call__"
     ) as call:
         # Set the response to a series of pages.
         call.side_effect = (
@@ -1804,8 +1915,8 @@ def test_list_job_templates_pages():
             RuntimeError,
         )
         pages = list(client.list_job_templates(request={}).pages)
-        for page, token in zip(pages, ["abc", "def", "ghi", ""]):
-            assert page.raw_page.next_page_token == token
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
 
 
 @pytest.mark.asyncio
@@ -1814,7 +1925,7 @@ async def test_list_job_templates_async_pager():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.list_job_templates),
+        type(client.transport.list_job_templates),
         "__call__",
         new_callable=mock.AsyncMock,
     ) as call:
@@ -1853,7 +1964,7 @@ async def test_list_job_templates_async_pages():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.list_job_templates),
+        type(client.transport.list_job_templates),
         "__call__",
         new_callable=mock.AsyncMock,
     ) as call:
@@ -1877,10 +1988,10 @@ async def test_list_job_templates_async_pages():
             RuntimeError,
         )
         pages = []
-        async for page in (await client.list_job_templates(request={})).pages:
-            pages.append(page)
-        for page, token in zip(pages, ["abc", "def", "ghi", ""]):
-            assert page.raw_page.next_page_token == token
+        async for page_ in (await client.list_job_templates(request={})).pages:
+            pages.append(page_)
+        for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+            assert page_.raw_page.next_page_token == token
 
 
 def test_get_job_template(
@@ -1895,9 +2006,7 @@ def test_get_job_template(
     request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._transport.get_job_template), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.JobTemplate(name="name_value",)
 
@@ -1910,6 +2019,7 @@ def test_get_job_template(
         assert args[0] == services.GetJobTemplateRequest()
 
     # Establish that the response is the type that we expect.
+
     assert isinstance(response, resources.JobTemplate)
 
     assert response.name == "name_value"
@@ -1920,19 +2030,19 @@ def test_get_job_template_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_get_job_template_async(transport: str = "grpc_asyncio"):
+async def test_get_job_template_async(
+    transport: str = "grpc_asyncio", request_type=services.GetJobTemplateRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.GetJobTemplateRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.get_job_template), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             resources.JobTemplate(name="name_value",)
@@ -1944,7 +2054,7 @@ async def test_get_job_template_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.GetJobTemplateRequest()
 
     # Establish that the response is the type that we expect.
     assert isinstance(response, resources.JobTemplate)
@@ -1952,6 +2062,11 @@ async def test_get_job_template_async(transport: str = "grpc_asyncio"):
     assert response.name == "name_value"
 
 
+@pytest.mark.asyncio
+async def test_get_job_template_async_from_dict():
+    await test_get_job_template_async(request_type=dict)
+
+
 def test_get_job_template_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -1961,9 +2076,7 @@ def test_get_job_template_field_headers():
     request.name = "name/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._transport.get_job_template), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
         call.return_value = resources.JobTemplate()
 
         client.get_job_template(request)
@@ -1990,9 +2103,7 @@ async def test_get_job_template_field_headers_async():
     request.name = "name/value"
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.get_job_template), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
             resources.JobTemplate()
         )
@@ -2013,9 +2124,7 @@ def test_get_job_template_flattened():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._transport.get_job_template), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.JobTemplate()
 
@@ -2049,9 +2158,7 @@ async def test_get_job_template_flattened_async():
     )
 
     # Mock the actual call within the gRPC stub, and fake the request.
-    with mock.patch.object(
-        type(client._client._transport.get_job_template), "__call__"
-    ) as call:
+    with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
         # Designate an appropriate return value for the call.
         call.return_value = resources.JobTemplate()
 
@@ -2097,7 +2204,7 @@ def test_delete_job_template(
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.delete_job_template), "__call__"
+        type(client.transport.delete_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = None
@@ -2119,18 +2226,20 @@ def test_delete_job_template_from_dict():
 
 
 @pytest.mark.asyncio
-async def test_delete_job_template_async(transport: str = "grpc_asyncio"):
+async def test_delete_job_template_async(
+    transport: str = "grpc_asyncio", request_type=services.DeleteJobTemplateRequest
+):
     client = TranscoderServiceAsyncClient(
         credentials=credentials.AnonymousCredentials(), transport=transport,
     )
 
     # Everything is optional in proto3 as far as the runtime is concerned,
     # and we are mocking out the actual API, so just send an empty request.
-    request = services.DeleteJobTemplateRequest()
+    request = request_type()
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.delete_job_template), "__call__"
+        type(client.transport.delete_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -2141,12 +2250,17 @@ async def test_delete_job_template_async(transport: str = "grpc_asyncio"):
         assert len(call.mock_calls)
         _, args, _ = call.mock_calls[0]
 
-        assert args[0] == request
+        assert args[0] == services.DeleteJobTemplateRequest()
 
     # Establish that the response is the type that we expect.
     assert response is None
 
 
+@pytest.mark.asyncio
+async def test_delete_job_template_async_from_dict():
+    await test_delete_job_template_async(request_type=dict)
+
+
 def test_delete_job_template_field_headers():
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
 
@@ -2157,7 +2271,7 @@ def test_delete_job_template_field_headers():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.delete_job_template), "__call__"
+        type(client.transport.delete_job_template), "__call__"
     ) as call:
         call.return_value = None
 
@@ -2186,7 +2300,7 @@ async def test_delete_job_template_field_headers_async():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.delete_job_template), "__call__"
+        type(client.transport.delete_job_template), "__call__"
     ) as call:
         call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
 
@@ -2207,7 +2321,7 @@ def test_delete_job_template_flattened():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._transport.delete_job_template), "__call__"
+        type(client.transport.delete_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = None
@@ -2243,7 +2357,7 @@ async def test_delete_job_template_flattened_async():
 
     # Mock the actual call within the gRPC stub, and fake the request.
     with mock.patch.object(
-        type(client._client._transport.delete_job_template), "__call__"
+        type(client.transport.delete_job_template), "__call__"
     ) as call:
         # Designate an appropriate return value for the call.
         call.return_value = None
@@ -2311,7 +2425,7 @@ def test_transport_instance():
         credentials=credentials.AnonymousCredentials(),
     )
     client = TranscoderServiceClient(transport=transport)
-    assert client._transport is transport
+    assert client.transport is transport
 
 
 def test_transport_get_channel():
@@ -2329,10 +2443,25 @@ def test_transport_get_channel():
     assert channel
 
 
+@pytest.mark.parametrize(
+    "transport_class",
+    [
+        transports.TranscoderServiceGrpcTransport,
+        transports.TranscoderServiceGrpcAsyncIOTransport,
+    ],
+)
+def test_transport_adc(transport_class):
+    # Test default credentials are used if not provided.
+    with mock.patch.object(auth, "default") as adc:
+        adc.return_value = (credentials.AnonymousCredentials(), None)
+        transport_class()
+        adc.assert_called_once()
+
+
 def test_transport_grpc_default():
     # A client should use the gRPC transport by default.
     client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
-    assert isinstance(client._transport, transports.TranscoderServiceGrpcTransport,)
+    assert isinstance(client.transport, transports.TranscoderServiceGrpcTransport,)
 
 
 def test_transcoder_service_base_transport_error():
@@ -2390,6 +2519,17 @@ def test_transcoder_service_base_transport_with_credentials_file():
         )
 
 
+def test_transcoder_service_base_transport_with_adc():
+    # Test the default credentials are used if credentials and credentials_file are None.
+    with mock.patch.object(auth, "default") as adc, mock.patch(
+        "google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages"
+    ) as Transport:
+        Transport.return_value = None
+        adc.return_value = (credentials.AnonymousCredentials(), None)
+        transport = transports.TranscoderServiceTransport()
+        adc.assert_called_once()
+
+
 def test_transcoder_service_auth_adc():
     # If no credentials are provided, we should use ADC credentials.
     with mock.patch.object(auth, "default") as adc:
@@ -2422,7 +2562,7 @@ def test_transcoder_service_host_no_port():
             api_endpoint="transcoder.googleapis.com"
         ),
     )
-    assert client._transport._host == "transcoder.googleapis.com:443"
+    assert client.transport._host == "transcoder.googleapis.com:443"
 
 
 def test_transcoder_service_host_with_port():
@@ -2432,185 +2572,119 @@ def test_transcoder_service_host_with_port():
             api_endpoint="transcoder.googleapis.com:8000"
         ),
     )
-    assert client._transport._host == "transcoder.googleapis.com:8000"
+    assert client.transport._host == "transcoder.googleapis.com:8000"
 
 
 def test_transcoder_service_grpc_transport_channel():
     channel = grpc.insecure_channel("http://localhost/")
 
-    # Check that if channel is provided, mtls endpoint and client_cert_source
-    # won't be used.
-    callback = mock.MagicMock()
+    # Check that channel is used if provided.
     transport = transports.TranscoderServiceGrpcTransport(
-        host="squid.clam.whelk",
-        channel=channel,
-        api_mtls_endpoint="mtls.squid.clam.whelk",
-        client_cert_source=callback,
+        host="squid.clam.whelk", channel=channel,
     )
     assert transport.grpc_channel == channel
     assert transport._host == "squid.clam.whelk:443"
-    assert not callback.called
+    assert transport._ssl_channel_credentials == None
 
 
 def test_transcoder_service_grpc_asyncio_transport_channel():
     channel = aio.insecure_channel("http://localhost/")
 
-    # Check that if channel is provided, mtls endpoint and client_cert_source
-    # won't be used.
-    callback = mock.MagicMock()
+    # Check that channel is used if provided.
     transport = transports.TranscoderServiceGrpcAsyncIOTransport(
-        host="squid.clam.whelk",
-        channel=channel,
-        api_mtls_endpoint="mtls.squid.clam.whelk",
-        client_cert_source=callback,
+        host="squid.clam.whelk", channel=channel,
     )
     assert transport.grpc_channel == channel
     assert transport._host == "squid.clam.whelk:443"
-    assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_transcoder_service_grpc_transport_channel_mtls_with_client_cert_source(
-    grpc_create_channel, grpc_ssl_channel_cred
-):
-    # Check that if channel is None, but api_mtls_endpoint and client_cert_source
-    # are provided, then a mTLS channel will be created.
-    mock_cred = mock.Mock()
-
-    mock_ssl_cred = mock.Mock()
-    grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
-    mock_grpc_channel = mock.Mock()
-    grpc_create_channel.return_value = mock_grpc_channel
-
-    transport = transports.TranscoderServiceGrpcTransport(
-        host="squid.clam.whelk",
-        credentials=mock_cred,
-        api_mtls_endpoint="mtls.squid.clam.whelk",
-        client_cert_source=client_cert_source_callback,
-    )
-    grpc_ssl_channel_cred.assert_called_once_with(
-        certificate_chain=b"cert bytes", private_key=b"key bytes"
-    )
-    grpc_create_channel.assert_called_once_with(
-        "mtls.squid.clam.whelk:443",
-        credentials=mock_cred,
-        credentials_file=None,
-        scopes=("https://www.googleapis.com/auth/cloud-platform",),
-        ssl_credentials=mock_ssl_cred,
-        quota_project_id=None,
-    )
-    assert transport.grpc_channel == mock_grpc_channel
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_transcoder_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
-    grpc_create_channel, grpc_ssl_channel_cred
-):
-    # Check that if channel is None, but api_mtls_endpoint and client_cert_source
-    # are provided, then a mTLS channel will be created.
-    mock_cred = mock.Mock()
-
-    mock_ssl_cred = mock.Mock()
-    grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
-    mock_grpc_channel = mock.Mock()
-    grpc_create_channel.return_value = mock_grpc_channel
-
-    transport = transports.TranscoderServiceGrpcAsyncIOTransport(
-        host="squid.clam.whelk",
-        credentials=mock_cred,
-        api_mtls_endpoint="mtls.squid.clam.whelk",
-        client_cert_source=client_cert_source_callback,
-    )
-    grpc_ssl_channel_cred.assert_called_once_with(
-        certificate_chain=b"cert bytes", private_key=b"key bytes"
-    )
-    grpc_create_channel.assert_called_once_with(
-        "mtls.squid.clam.whelk:443",
-        credentials=mock_cred,
-        credentials_file=None,
-        scopes=("https://www.googleapis.com/auth/cloud-platform",),
-        ssl_credentials=mock_ssl_cred,
-        quota_project_id=None,
-    )
-    assert transport.grpc_channel == mock_grpc_channel
+    assert transport._ssl_channel_credentials == None
 
 
 @pytest.mark.parametrize(
-    "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+    "transport_class",
+    [
+        transports.TranscoderServiceGrpcTransport,
+        transports.TranscoderServiceGrpcAsyncIOTransport,
+    ],
 )
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_transcoder_service_grpc_transport_channel_mtls_with_adc(
-    grpc_create_channel, api_mtls_endpoint
+def test_transcoder_service_transport_channel_mtls_with_client_cert_source(
+    transport_class,
 ):
-    # Check that if channel and client_cert_source are None, but api_mtls_endpoint
-    # is provided, then a mTLS channel will be created with SSL ADC.
-    mock_grpc_channel = mock.Mock()
-    grpc_create_channel.return_value = mock_grpc_channel
-
-    # Mock google.auth.transport.grpc.SslCredentials class.
-    mock_ssl_cred = mock.Mock()
-    with mock.patch.multiple(
-        "google.auth.transport.grpc.SslCredentials",
-        __init__=mock.Mock(return_value=None),
-        ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
-    ):
-        mock_cred = mock.Mock()
-        transport = transports.TranscoderServiceGrpcTransport(
-            host="squid.clam.whelk",
-            credentials=mock_cred,
-            api_mtls_endpoint=api_mtls_endpoint,
-            client_cert_source=None,
-        )
-        grpc_create_channel.assert_called_once_with(
-            "mtls.squid.clam.whelk:443",
-            credentials=mock_cred,
-            credentials_file=None,
-            scopes=("https://www.googleapis.com/auth/cloud-platform",),
-            ssl_credentials=mock_ssl_cred,
-            quota_project_id=None,
-        )
-        assert transport.grpc_channel == mock_grpc_channel
+    with mock.patch(
+        "grpc.ssl_channel_credentials", autospec=True
+    ) as grpc_ssl_channel_cred:
+        with mock.patch.object(
+            transport_class, "create_channel", autospec=True
+        ) as grpc_create_channel:
+            mock_ssl_cred = mock.Mock()
+            grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+
+            cred = credentials.AnonymousCredentials()
+            with pytest.warns(DeprecationWarning):
+                with mock.patch.object(auth, "default") as adc:
+                    adc.return_value = (cred, None)
+                    transport = transport_class(
+                        host="squid.clam.whelk",
+                        api_mtls_endpoint="mtls.squid.clam.whelk",
+                        client_cert_source=client_cert_source_callback,
+                    )
+                    adc.assert_called_once()
+
+            grpc_ssl_channel_cred.assert_called_once_with(
+                certificate_chain=b"cert bytes", private_key=b"key bytes"
+            )
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=cred,
+                credentials_file=None,
+                scopes=("https://www.googleapis.com/auth/cloud-platform",),
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+            )
+            assert transport.grpc_channel == mock_grpc_channel
+            assert transport._ssl_channel_credentials == mock_ssl_cred
 
 
 @pytest.mark.parametrize(
-    "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+    "transport_class",
+    [
+        transports.TranscoderServiceGrpcTransport,
+        transports.TranscoderServiceGrpcAsyncIOTransport,
+    ],
 )
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_transcoder_service_grpc_asyncio_transport_channel_mtls_with_adc(
-    grpc_create_channel, api_mtls_endpoint
-):
-    # Check that if channel and client_cert_source are None, but api_mtls_endpoint
-    # is provided, then a mTLS channel will be created with SSL ADC.
-    mock_grpc_channel = mock.Mock()
-    grpc_create_channel.return_value = mock_grpc_channel
-
-    # Mock google.auth.transport.grpc.SslCredentials class.
+def test_transcoder_service_transport_channel_mtls_with_adc(transport_class):
     mock_ssl_cred = mock.Mock()
     with mock.patch.multiple(
         "google.auth.transport.grpc.SslCredentials",
         __init__=mock.Mock(return_value=None),
         ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
     ):
-        mock_cred = mock.Mock()
-        transport = transports.TranscoderServiceGrpcAsyncIOTransport(
-            host="squid.clam.whelk",
-            credentials=mock_cred,
-            api_mtls_endpoint=api_mtls_endpoint,
-            client_cert_source=None,
-        )
-        grpc_create_channel.assert_called_once_with(
-            "mtls.squid.clam.whelk:443",
-            credentials=mock_cred,
-            credentials_file=None,
-            scopes=("https://www.googleapis.com/auth/cloud-platform",),
-            ssl_credentials=mock_ssl_cred,
-            quota_project_id=None,
-        )
-        assert transport.grpc_channel == mock_grpc_channel
+        with mock.patch.object(
+            transport_class, "create_channel", autospec=True
+        ) as grpc_create_channel:
+            mock_grpc_channel = mock.Mock()
+            grpc_create_channel.return_value = mock_grpc_channel
+            mock_cred = mock.Mock()
+
+            with pytest.warns(DeprecationWarning):
+                transport = transport_class(
+                    host="squid.clam.whelk",
+                    credentials=mock_cred,
+                    api_mtls_endpoint="mtls.squid.clam.whelk",
+                    client_cert_source=None,
+                )
+
+            grpc_create_channel.assert_called_once_with(
+                "mtls.squid.clam.whelk:443",
+                credentials=mock_cred,
+                credentials_file=None,
+                scopes=("https://www.googleapis.com/auth/cloud-platform",),
+                ssl_credentials=mock_ssl_cred,
+                quota_project_id=None,
+            )
+            assert transport.grpc_channel == mock_grpc_channel
 
 
 def test_job_path():
@@ -2639,9 +2713,9 @@ def test_parse_job_path():
 
 
 def test_job_template_path():
-    project = "squid"
-    location = "clam"
-    job_template = "whelk"
+    project = "cuttlefish"
+    location = "mussel"
+    job_template = "winkle"
 
     expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(
         project=project, location=location, job_template=job_template,
@@ -2652,9 +2726,9 @@ def test_job_template_path():
 
 def test_parse_job_template_path():
     expected = {
-        "project": "octopus",
-        "location": "oyster",
-        "job_template": "nudibranch",
+        "project": "nautilus",
+        "location": "scallop",
+        "job_template": "abalone",
     }
     path = TranscoderServiceClient.job_template_path(**expected)
 
@@ -2663,6 +2737,107 @@ def test_parse_job_template_path():
     assert expected == actual
 
 
+def test_common_billing_account_path():
+    billing_account = "squid"
+
+    expected = "billingAccounts/{billing_account}".format(
+        billing_account=billing_account,
+    )
+    actual = TranscoderServiceClient.common_billing_account_path(billing_account)
+    assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+    expected = {
+        "billing_account": "clam",
+    }
+    path = TranscoderServiceClient.common_billing_account_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = TranscoderServiceClient.parse_common_billing_account_path(path)
+    assert expected == actual
+
+
+def test_common_folder_path():
+    folder = "whelk"
+
+    expected = "folders/{folder}".format(folder=folder,)
+    actual = TranscoderServiceClient.common_folder_path(folder)
+    assert expected == actual
+
+
+def test_parse_common_folder_path():
+    expected = {
+        "folder": "octopus",
+    }
+    path = TranscoderServiceClient.common_folder_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = TranscoderServiceClient.parse_common_folder_path(path)
+    assert expected == actual
+
+
+def test_common_organization_path():
+    organization = "oyster"
+
+    expected = "organizations/{organization}".format(organization=organization,)
+    actual = TranscoderServiceClient.common_organization_path(organization)
+    assert expected == actual
+
+
+def test_parse_common_organization_path():
+    expected = {
+        "organization": "nudibranch",
+    }
+    path = TranscoderServiceClient.common_organization_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = TranscoderServiceClient.parse_common_organization_path(path)
+    assert expected == actual
+
+
+def test_common_project_path():
+    project = "cuttlefish"
+
+    expected = "projects/{project}".format(project=project,)
+    actual = TranscoderServiceClient.common_project_path(project)
+    assert expected == actual
+
+
+def test_parse_common_project_path():
+    expected = {
+        "project": "mussel",
+    }
+    path = TranscoderServiceClient.common_project_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = TranscoderServiceClient.parse_common_project_path(path)
+    assert expected == actual
+
+
+def test_common_location_path():
+    project = "winkle"
+    location = "nautilus"
+
+    expected = "projects/{project}/locations/{location}".format(
+        project=project, location=location,
+    )
+    actual = TranscoderServiceClient.common_location_path(project, location)
+    assert expected == actual
+
+
+def test_parse_common_location_path():
+    expected = {
+        "project": "scallop",
+        "location": "abalone",
+    }
+    path = TranscoderServiceClient.common_location_path(**expected)
+
+    # Check that the path construction is reversible.
+    actual = TranscoderServiceClient.parse_common_location_path(path)
+    assert expected == actual
+
+
 def test_client_withDEFAULT_CLIENT_INFO():
     client_info = gapic_v1.client_info.ClientInfo()