GoogleCloudPlatform / cloud-sql-python-connector

A Python library for connecting securely to your Cloud SQL instances.
Apache License 2.0
279 stars 65 forks source link

system.test_connector_object: test_multiple_connectors failed #726

Closed flaky-bot[bot] closed 1 year ago

flaky-bot[bot] commented 1 year ago

This test failed!

To configure my behavior, see the Flaky Bot documentation.

If I'm commenting on this issue too often, add the flakybot: quiet label and I will stop commenting.


commit: 953b8a52d0776fccf8b74628abd7a26ba253a0af buildURL: https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/actions/runs/4941100314 status: failed

Test output
def test_multiple_connectors() -> None:
        """Test that same Cloud SQL instance can connect with two Connector objects."""
        first_connector = Connector()
        second_connector = Connector()
        try:
            pool = init_connection_engine(first_connector)
            pool2 = init_connection_engine(second_connector)

>           with pool.connect() as conn:

tests/system/test_connector_object.py:79: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Engine(mysql+pymysql://)

    def connect(self) -> Connection:
        """Return a new :class:`_engine.Connection` object.

        The :class:`_engine.Connection` acts as a Python context manager, so
        the typical use of this method looks like::

            with engine.connect() as connection:
                connection.execute(text("insert into table values ('foo')"))
                connection.commit()

        Where above, after the block is completed, the connection is "closed"
        and its underlying DBAPI resources are returned to the connection pool.
        This also has the effect of rolling back any transaction that
        was explicitly begun or was begun via autobegin, and will
        emit the :meth:`_events.ConnectionEvents.rollback` event if one was
        started and is still in progress.

        .. seealso::

            :meth:`_engine.Engine.begin`

        """

>       return self._connection_cls(self)

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/engine/base.py:3264: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
engine = Engine(mysql+pymysql://), connection = None, _has_events = None
_allow_revalidate = True, _allow_autobegin = True

    def __init__(
        self,
        engine: Engine,
        connection: Optional[PoolProxiedConnection] = None,
        _has_events: Optional[bool] = None,
        _allow_revalidate: bool = True,
        _allow_autobegin: bool = True,
    ):
        """Construct a new Connection."""
        self.engine = engine
        self.dialect = dialect = engine.dialect

        if connection is None:
            try:
>               self._dbapi_connection = engine.raw_connection()

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/engine/base.py:145: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = Engine(mysql+pymysql://)

    def raw_connection(self) -> PoolProxiedConnection:
        """Return a "raw" DBAPI connection from the connection pool.

        The returned object is a proxied version of the DBAPI
        connection object used by the underlying driver in use.
        The object will have all the same behavior as the real DBAPI
        connection, except that its ``close()`` method will result in the
        connection being returned to the pool, rather than being closed
        for real.

        This method provides direct DBAPI connection access for
        special situations when the API provided by
        :class:`_engine.Connection`
        is not needed.   When a :class:`_engine.Connection` object is already
        present, the DBAPI connection is available using
        the :attr:`_engine.Connection.connection` accessor.

        .. seealso::

            :ref:`dbapi_connections`

        """
>       return self.pool.connect()

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/engine/base.py:3288: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 

    def connect(self) -> PoolProxiedConnection:
        """Return a DBAPI connection from the pool.

        The connection is instrumented such that when its
        ``close()`` method is called, the connection will be returned to
        the pool.

        """
>       return _ConnectionFairy._checkout(self)

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:452: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = 
pool = 
threadconns = None, fairy = None

    @classmethod
    def _checkout(
        cls,
        pool: Pool,
        threadconns: Optional[threading.local] = None,
        fairy: Optional[_ConnectionFairy] = None,
    ) -> _ConnectionFairy:

        if not fairy:
>           fairy = _ConnectionRecord.checkout(pool)

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:1268: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

cls = 
pool = 

    @classmethod
    def checkout(cls, pool: Pool) -> _ConnectionFairy:
        if TYPE_CHECKING:
            rec = cast(_ConnectionRecord, pool._do_get())
        else:
>           rec = pool._do_get()

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:716: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 

    def _do_get(self) -> ConnectionPoolEntry:
        use_overflow = self._max_overflow > -1

        wait = use_overflow and self._overflow >= self._max_overflow
        try:
            return self._pool.get(wait, self._timeout)
        except sqla_queue.Empty:
            # don't do things inside of "except Empty", because when we say
            # we timed out or can't connect and raise, Python 3 tells
            # people the real error is queue.Empty which it isn't.
            pass
        if use_overflow and self._overflow >= self._max_overflow:
            if not wait:
                return self._do_get()
            else:
                raise exc.TimeoutError(
                    "QueuePool limit of size %d overflow %d reached, "
                    "connection timed out, timeout %0.2f"
                    % (self.size(), self.overflow(), self._timeout),
                    code="3o7r",
                )

        if self._inc_overflow():
            try:
                return self._create_connection()
            except:
>               with util.safe_reraise():

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/impl.py:168: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
type_ = None, value = None, traceback = None

    def __exit__(
        self,
        type_: Optional[Type[BaseException]],
        value: Optional[BaseException],
        traceback: Optional[types.TracebackType],
    ) -> NoReturn:
        assert self._exc_info is not None
        # see #2703 for notes
        if type_ is None:
            exc_type, exc_value, exc_tb = self._exc_info
            assert exc_value is not None
            self._exc_info = None  # remove potential circular references
>           raise exc_value.with_traceback(exc_tb)

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/util/langhelpers.py:147: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 

    def _do_get(self) -> ConnectionPoolEntry:
        use_overflow = self._max_overflow > -1

        wait = use_overflow and self._overflow >= self._max_overflow
        try:
            return self._pool.get(wait, self._timeout)
        except sqla_queue.Empty:
            # don't do things inside of "except Empty", because when we say
            # we timed out or can't connect and raise, Python 3 tells
            # people the real error is queue.Empty which it isn't.
            pass
        if use_overflow and self._overflow >= self._max_overflow:
            if not wait:
                return self._do_get()
            else:
                raise exc.TimeoutError(
                    "QueuePool limit of size %d overflow %d reached, "
                    "connection timed out, timeout %0.2f"
                    % (self.size(), self.overflow(), self._timeout),
                    code="3o7r",
                )

        if self._inc_overflow():
            try:
>               return self._create_connection()

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/impl.py:166: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 

    def _create_connection(self) -> ConnectionPoolEntry:
        """Called by subclasses to create a new ConnectionRecord."""

>       return _ConnectionRecord(self)

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:393: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
pool = , connect = True

    def __init__(self, pool: Pool, connect: bool = True):
        self.fresh = False
        self.fairy_ref = None
        self.starttime = 0
        self.dbapi_connection = None

        self.__pool = pool
        if connect:
>           self.__connect()

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:678: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 

    def __connect(self) -> None:
        pool = self.__pool

        # ensure any existing connection is removed, so that if
        # creator fails, this attribute stays None
        self.dbapi_connection = None
        try:
            self.starttime = time.time()
            self.dbapi_connection = connection = pool._invoke_creator(self)
            pool.logger.debug("Created new connection %r", connection)
            self.fresh = True
        except BaseException as e:
>           with util.safe_reraise():

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:902: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
type_ = None, value = None, traceback = None

    def __exit__(
        self,
        type_: Optional[Type[BaseException]],
        value: Optional[BaseException],
        traceback: Optional[types.TracebackType],
    ) -> NoReturn:
        assert self._exc_info is not None
        # see #2703 for notes
        if type_ is None:
            exc_type, exc_value, exc_tb = self._exc_info
            assert exc_value is not None
            self._exc_info = None  # remove potential circular references
>           raise exc_value.with_traceback(exc_tb)

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/util/langhelpers.py:147: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 

    def __connect(self) -> None:
        pool = self.__pool

        # ensure any existing connection is removed, so that if
        # creator fails, this attribute stays None
        self.dbapi_connection = None
        try:
            self.starttime = time.time()
>           self.dbapi_connection = connection = pool._invoke_creator(self)

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:898: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

rec = 

>   return lambda rec: creator_fn()

.nox/system-3-11/lib/python3.11/site-packages/sqlalchemy/pool/base.py:365: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

    def getconn() -> pymysql.connections.Connection:
>       conn = custom_connector.connect(
            os.environ["MYSQL_CONNECTION_NAME"],
            "pymysql",
            user=os.environ["MYSQL_USER"],
            password=os.environ["MYSQL_PASS"],
            db=os.environ["MYSQL_DB"],
        )

tests/system/test_connector_object.py:36: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
instance_connection_string = 'cloud-sql-connector-testing:us-central1:mysql-proxy-testing'
driver = 'pymysql'
kwargs = {'db': 'proxy_testing', 'password': 'nbfJwk9XENJauafSCBFSe2Ys', 'user': 'mysql-proxy-testing'}
connect_task = 

    def connect(
        self, instance_connection_string: str, driver: str, **kwargs: Any
    ) -> Any:
        """Prepares and returns a database connection object and starts a
        background task to refresh the certificates and metadata.

        :type instance_connection_string: str
        :param instance_connection_string:
            A string containing the GCP project name, region name, and instance
            name separated by colons.

            Example: example-proj:example-region-us6:example-instance

        :type driver: str
        :param: driver:
            A string representing the driver to connect with. Supported drivers are
            pymysql, pg8000, and pytds.

        :param kwargs:
            Pass in any driver-specific arguments needed to connect to the Cloud
            SQL instance.

        :rtype: Connection
        :returns:
            A DB-API connection to the specified Cloud SQL instance.
        """
        try:
            # check if event loop is running in current thread
            if self._loop == asyncio.get_running_loop():
                raise ConnectorLoopError(
                    "Connector event loop is running in current thread!"
                    "Event loop must be attached to a different thread to prevent blocking code!"
                )
        # asyncio.get_running_loop will throw RunTimeError if no running loop is present
        except RuntimeError:
            pass

        # if event loop is not in current thread, proceed with connection
        connect_task = asyncio.run_coroutine_threadsafe(
            self.connect_async(instance_connection_string, driver, **kwargs), self._loop
        )
>       return connect_task.result()

google/cloud/sql/connector/connector.py:154: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = None, timeout = None

    def result(self, timeout=None):
        """Return the result of the call that the future represents.

        Args:
            timeout: The number of seconds to wait for the result if the future
                isn't done. If None, then there is no limit on the wait time.

        Returns:
            The result of the call that the future represents.

        Raises:
            CancelledError: If the future was cancelled.
            TimeoutError: If the future didn't finish executing before the given
                timeout.
            Exception: If the call raised then that exception will be raised.
        """
        try:
            with self._condition:
                if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
                    raise CancelledError()
                elif self._state == FINISHED:
                    return self.__get_result()

                self._condition.wait(timeout)

                if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]:
                    raise CancelledError()
                elif self._state == FINISHED:
>                   return self.__get_result()

/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/concurrent/futures/_base.py:456: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = None

    def __get_result(self):
        if self._exception:
            try:
>               raise self._exception

/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/concurrent/futures/_base.py:401: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
instance_connection_string = 'cloud-sql-connector-testing:us-central1:mysql-proxy-testing'
driver = 'pymysql'
kwargs = {'db': 'proxy_testing', 'password': 'nbfJwk9XENJauafSCBFSe2Ys', 'user': 'mysql-proxy-testing'}

    async def connect_async(
        self, instance_connection_string: str, driver: str, **kwargs: Any
    ) -> Any:
        """Prepares and returns a database connection object and starts a
        background task to refresh the certificates and metadata.

        :type instance_connection_string: str
        :param instance_connection_string:
            A string containing the GCP project name, region name, and instance
            name separated by colons.

            Example: example-proj:example-region-us6:example-instance

        :type driver: str
        :param: driver:
            A string representing the driver to connect with. Supported drivers are
            pymysql, pg8000, asyncpg, and pytds.

        :param kwargs:
            Pass in any driver-specific arguments needed to connect to the Cloud
            SQL instance.

        :rtype: Connection
        :returns:
            A DB-API connection to the specified Cloud SQL instance.
        """
        # Create an Instance object from the connection string.
        # The Instance should verify arguments.
        #
        # Use the Instance to establish an SSL Connection.
        #
        # Return a DBAPI connection
        enable_iam_auth = kwargs.pop("enable_iam_auth", self._enable_iam_auth)
        if instance_connection_string in self._instances:
            instance = self._instances[instance_connection_string]
            if enable_iam_auth != instance._enable_iam_auth:
                raise ValueError(
                    f"connect() called with `enable_iam_auth={enable_iam_auth}`, "
                    f"but previously used enable_iam_auth={instance._enable_iam_auth}`. "
                    "If you require both for your use case, please use a new "
                    "connector.Connector object."
                )
        else:
>           instance = Instance(
                instance_connection_string,
                driver,
                self._keys,
                self._loop,
                self._credentials,
                enable_iam_auth,
                self._quota_project,
                self._sqladmin_api_endpoint,
            )

google/cloud/sql/connector/connector.py:199: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
instance_connection_string = 'cloud-sql-connector-testing:us-central1:mysql-proxy-testing'
driver_name = 'pymysql'
keys = 
loop = <_UnixSelectorEventLoop running=True closed=False debug=False>
credentials = None, enable_iam_auth = False, quota_project = None
sqladmin_api_endpoint = 'https://sqladmin.googleapis.com'

    def __init__(
        self,
        instance_connection_string: str,
        driver_name: str,
        keys: asyncio.Future,
        loop: asyncio.AbstractEventLoop,
        credentials: Optional[Credentials] = None,
        enable_iam_auth: bool = False,
        quota_project: str = None,
        sqladmin_api_endpoint: str = "https://sqladmin.googleapis.com",
    ) -> None:
        # Validate connection string
        connection_string_split = instance_connection_string.split(":")

        if len(connection_string_split) == 3:
            self._instance_connection_string = instance_connection_string
            self._project = connection_string_split[0]
            self._region = connection_string_split[1]
            self._instance = connection_string_split[2]
        else:
            raise ValueError(
                "Arg `instance_connection_string` must have "
                "format: PROJECT:REGION:INSTANCE, "
                f"got {instance_connection_string}."
            )

        self._enable_iam_auth = enable_iam_auth

        self._user_agent_string = f"{APPLICATION_NAME}/{version}+{driver_name}"
        self._quota_project = quota_project
        self._sqladmin_api_endpoint = sqladmin_api_endpoint
        self._loop = loop
        self._keys = keys
        # validate credentials type
        if not isinstance(credentials, Credentials) and credentials is not None:
            raise CredentialsTypeError(
                "Arg credentials must be type 'google.auth.credentials.Credentials' "
                "or None (to use Application Default Credentials)"
            )
>       self._credentials = _auth_init(credentials)

google/cloud/sql/connector/instance.py:250: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

credentials = None

    def _auth_init(credentials: Optional[Credentials]) -> Credentials:
        """Creates google.auth credentials object with scopes required to make
        calls to the Cloud SQL Admin APIs.

        :type credentials: google.auth.credentials.Credentials
        :param credentials
            Credentials object used to authenticate connections to Cloud SQL server.
            If not specified, Application Default Credentials are used.
        """
        scopes = ["https://www.googleapis.com/auth/sqlservice.admin"]
        # if Credentials object is passed in, use for authentication
        if isinstance(credentials, Credentials):
            credentials = with_scopes_if_required(credentials, scopes=scopes)
        # otherwise use application default credentials
        else:
>           credentials, _ = default(scopes=scopes)

google/cloud/sql/connector/utils.py:124: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

scopes = ['https://www.googleapis.com/auth/sqlservice.admin']
request = 
quota_project_id = None, default_scopes = None

    def default(scopes=None, request=None, quota_project_id=None, default_scopes=None):
        """Gets the default credentials for the current environment.

        `Application Default Credentials`_ provides an easy way to obtain
        credentials to call Google APIs for server-to-server or local applications.
        This function acquires credentials from the environment in the following
        order:

        1. If the environment variable ``GOOGLE_APPLICATION_CREDENTIALS`` is set
           to the path of a valid service account JSON private key file, then it is
           loaded and returned. The project ID returned is the project ID defined
           in the service account file if available (some older files do not
           contain project ID information).

           If the environment variable is set to the path of a valid external
           account JSON configuration file (workload identity federation), then the
           configuration file is used to determine and retrieve the external
           credentials from the current environment (AWS, Azure, etc).
           These will then be exchanged for Google access tokens via the Google STS
           endpoint.
           The project ID returned in this case is the one corresponding to the
           underlying workload identity pool resource if determinable.

           If the environment variable is set to the path of a valid GDCH service
           account JSON file (`Google Distributed Cloud Hosted`_), then a GDCH
           credential will be returned. The project ID returned is the project
           specified in the JSON file.
        2. If the `Google Cloud SDK`_ is installed and has application default
           credentials set they are loaded and returned.

           To enable application default credentials with the Cloud SDK run::

                gcloud auth application-default login

           If the Cloud SDK has an active project, the project ID is returned. The
           active project can be set using::

                gcloud config set project

        3. If the application is running in the `App Engine standard environment`_
           (first generation) then the credentials and project ID from the
           `App Identity Service`_ are used.
        4. If the application is running in `Compute Engine`_ or `Cloud Run`_ or
           the `App Engine flexible environment`_ or the `App Engine standard
           environment`_ (second generation) then the credentials and project ID
           are obtained from the `Metadata Service`_.
        5. If no credentials are found,
           :class:`~google.auth.exceptions.DefaultCredentialsError` will be raised.

        .. _Application Default Credentials: https://developers.google.com\
                /identity/protocols/application-default-credentials
        .. _Google Cloud SDK: https://cloud.google.com/sdk
        .. _App Engine standard environment: https://cloud.google.com/appengine
        .. _App Identity Service: https://cloud.google.com/appengine/docs/python\
                /appidentity/
        .. _Compute Engine: https://cloud.google.com/compute
        .. _App Engine flexible environment: https://cloud.google.com\
                /appengine/flexible
        .. _Metadata Service: https://cloud.google.com/compute/docs\
                /storing-retrieving-metadata
        .. _Cloud Run: https://cloud.google.com/run
        .. _Google Distributed Cloud Hosted: https://cloud.google.com/blog/topics\
                /hybrid-cloud/announcing-google-distributed-cloud-edge-and-hosted

        Example::

            import google.auth

            credentials, project_id = google.auth.default()

        Args:
            scopes (Sequence[str]): The list of scopes for the credentials. If
                specified, the credentials will automatically be scoped if
                necessary.
            request (Optional[google.auth.transport.Request]): An object used to make
                HTTP requests. This is used to either detect whether the application
                is running on Compute Engine or to determine the associated project
                ID for a workload identity pool resource (external account
                credentials). If not specified, then it will either use the standard
                library http client to make requests for Compute Engine credentials
                or a google.auth.transport.requests.Request client for external
                account credentials.
            quota_project_id (Optional[str]): The project ID used for
                quota and billing.
            default_scopes (Optional[Sequence[str]]): Default scopes passed by a
                Google client library. Use 'scopes' for user-defined scopes.
        Returns:
            Tuple[~google.auth.credentials.Credentials, Optional[str]]:
                the current environment's credentials and project ID. Project ID
                may be None, which indicates that the Project ID could not be
                ascertained from the environment.

        Raises:
            ~google.auth.exceptions.DefaultCredentialsError:
                If no credentials were found, or if the credentials found were
                invalid.
        """
        from google.auth.credentials import with_scopes_if_required
        from google.auth.credentials import CredentialsWithQuotaProject

        explicit_project_id = os.environ.get(
            environment_vars.PROJECT, os.environ.get(environment_vars.LEGACY_PROJECT)
        )

        checkers = (
            # Avoid passing scopes here to prevent passing scopes to user credentials.
            # with_scopes_if_required() below will ensure scopes/default scopes are
            # safely set on the returned credentials since requires_scopes will
            # guard against setting scopes on user credentials.
            lambda: _get_explicit_environ_credentials(quota_project_id=quota_project_id),
            lambda: _get_gcloud_sdk_credentials(quota_project_id=quota_project_id),
            _get_gae_credentials,
            lambda: _get_gce_credentials(request, quota_project_id=quota_project_id),
        )

        for checker in checkers:
            credentials, project_id = checker()
            if credentials is not None:
                credentials = with_scopes_if_required(
                    credentials, scopes, default_scopes=default_scopes
                )

                # For external account credentials, scopes are required to determine
                # the project ID. Try to get the project ID again if not yet
                # determined.
                if not project_id and callable(
                    getattr(credentials, "get_project_id", None)
                ):
                    if request is None:
                        import google.auth.transport.requests

                        request = google.auth.transport.requests.Request()
>                   project_id = credentials.get_project_id(request=request)

.nox/system-3-11/lib/python3.11/site-packages/google/auth/_default.py:631: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 

    def get_project_id(self, request):
        """Retrieves the project ID corresponding to the workload identity or workforce pool.
        For workforce pool credentials, it returns the project ID corresponding to
        the workforce_pool_user_project.

        When not determinable, None is returned.

        This is introduced to support the current pattern of using the Auth library:

            credentials, project_id = google.auth.default()

        The resource may not have permission (resourcemanager.projects.get) to
        call this API or the required scopes may not be selected:
        https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes

        Args:
            request (google.auth.transport.Request): A callable used to make
                HTTP requests.
        Returns:
            Optional[str]: The project ID corresponding to the workload identity pool
                or workforce pool if determinable.
        """
        if self._project_id:
            # If already retrieved, return the cached project ID value.
            return self._project_id
        scopes = self._scopes if self._scopes is not None else self._default_scopes
        # Scopes are required in order to retrieve a valid access token.
        project_number = self.project_number or self._workforce_pool_user_project
        if project_number and scopes:
            headers = {}
            url = _CLOUD_RESOURCE_MANAGER + project_number
>           self.before_request(request, "GET", url, headers)

.nox/system-3-11/lib/python3.11/site-packages/google/auth/external_account.py:327: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 
method = 'GET'
url = 'https://cloudresourcemanager.googleapis.com/v1/projects/1021588826382'
headers = {}

    def before_request(self, request, method, url, headers):
        """Performs credential-specific before request logic.

        Refreshes the credentials if necessary, then calls :meth:`apply` to
        apply the token to the authentication header.

        Args:
            request (google.auth.transport.Request): The object used to make
                HTTP requests.
            method (str): The request's HTTP method or the RPC method being
                invoked.
            url (str): The request's URI or the RPC service's URI.
            headers (Mapping): The request's headers.
        """
        # pylint: disable=unused-argument
        # (Subclasses may use these arguments to ascertain information about
        # the http request.)
        if not self.valid:
>           self.refresh(request)

.nox/system-3-11/lib/python3.11/site-packages/google/auth/credentials.py:135: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 

    @_helpers.copy_docstring(credentials.Credentials)
    def refresh(self, request):
        scopes = self._scopes if self._scopes is not None else self._default_scopes
        if self._impersonated_credentials:
>           self._impersonated_credentials.refresh(request)

.nox/system-3-11/lib/python3.11/site-packages/google/auth/external_account.py:348: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 

    @_helpers.copy_docstring(credentials.Credentials)
    def refresh(self, request):
>       self._update_token(request)

.nox/system-3-11/lib/python3.11/site-packages/google/auth/impersonated_credentials.py:243: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 

    def _update_token(self, request):
        """Updates credentials with a new access_token representing
        the impersonated account.

        Args:
            request (google.auth.transport.requests.Request): Request object
                to use for refreshing credentials.
        """

        # Refresh our source credentials if it is not valid.
        if not self._source_credentials.valid:
>           self._source_credentials.refresh(request)

.nox/system-3-11/lib/python3.11/site-packages/google/auth/impersonated_credentials.py:256: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 

    @_helpers.copy_docstring(credentials.Credentials)
    def refresh(self, request):
        scopes = self._scopes if self._scopes is not None else self._default_scopes
        if self._impersonated_credentials:
            self._impersonated_credentials.refresh(request)
            self.token = self._impersonated_credentials.token
            self.expiry = self._impersonated_credentials.expiry
        else:
            now = _helpers.utcnow()
            additional_options = None
            # Do not pass workforce_pool_user_project when client authentication
            # is used. The client ID is sufficient for determining the user project.
            if self._workforce_pool_user_project and not self._client_id:
                additional_options = {"userProject": self._workforce_pool_user_project}
            response_data = self._sts_client.exchange_token(
                request=request,
                grant_type=_STS_GRANT_TYPE,
>               subject_token=self.retrieve_subject_token(request),
                subject_token_type=self._subject_token_type,
                audience=self._audience,
                scopes=scopes,
                requested_token_type=_STS_REQUESTED_TOKEN_TYPE,
                additional_options=additional_options,
            )

.nox/system-3-11/lib/python3.11/site-packages/google/auth/external_account.py:361: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 

    @_helpers.copy_docstring(external_account.Credentials)
    def retrieve_subject_token(self, request):
        return self._parse_token_data(
>           self._get_token_data(request),
            self._credential_source_format_type,
            self._credential_source_field_name,
        )

.nox/system-3-11/lib/python3.11/site-packages/google/auth/identity_pool.py:158: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 

    def _get_token_data(self, request):
        if self._credential_source_file:
            return self._get_file_data(self._credential_source_file)
        else:
>           return self._get_url_data(
                request, self._credential_source_url, self._credential_source_headers
            )

.nox/system-3-11/lib/python3.11/site-packages/google/auth/identity_pool.py:167: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 
request = 
url = 'https://pipelines.actions.githubusercontent.com/umAmnh0OhcfbtGEt7J16Yga6HsgM8dYIhPxbPiOYFLVwMnfbKz/00000000-0000-0000...cations%2Fglobal%2FworkloadIdentityPools%2Fgh-13a715-cloud-sql-pyt-dd1c5f%2Fproviders%2Fgh-13a715-cloud-sql-pyt-dd1c5f'
headers = {'Authorization': 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImVCWl9jbjNzWFlBZDBjaDRUSEJLSElnT3dPRSJ9.eyJuYW1...lWLyNUCZ5IwAPSnMcxXiI6sX2mC_tjqva9KvqELcJhU4jQttd8js_ZQ6jzpOfferWVo4U4DqA_C4WIy7a5b7-WMlVsEXOqA9ydsrDbOjb_rzCHkOpBX6Q'}

    def _get_url_data(self, request, url, headers):
        response = request(url=url, method="GET", headers=headers)

        # support both string and bytes type response.data
        response_body = (
            response.data.decode("utf-8")
            if hasattr(response.data, "decode")
            else response.data
        )

        if response.status != 200:
>           raise exceptions.RefreshError(
                "Unable to retrieve Identity Pool subject token", response_body
            )
E           google.auth.exceptions.RefreshError: ('Unable to retrieve Identity Pool subject token', '{ "message": "GitHub Actions is temporarily unavailable. Please visit https://www.githubstatus.com/ for the status of our services.", "ref": "Ref A: ECFFD5AD689345EC9DAA8881D4FC7D22 Ref B: BY3EDGE0309 Ref C: 2023-05-10T20:36:43Z" }')

.nox/system-3-11/lib/python3.11/site-packages/google/auth/identity_pool.py:189: RefreshError

During handling of the above exception, another exception occurred:

    def test_multiple_connectors() -> None:
        """Test that same Cloud SQL instance can connect with two Connector objects."""
        first_connector = Connector()
        second_connector = Connector()
        try:
            pool = init_connection_engine(first_connector)
            pool2 = init_connection_engine(second_connector)

            with pool.connect() as conn:
                conn.execute(sqlalchemy.text("SELECT 1"))

            with pool2.connect() as conn:
                conn.execute(sqlalchemy.text("SELECT 1"))

            instance_connection_string = os.environ["MYSQL_CONNECTION_NAME"]
            assert instance_connection_string in first_connector._instances
            assert instance_connection_string in second_connector._instances
            assert (
                first_connector._instances[instance_connection_string]
                != second_connector._instances[instance_connection_string]
            )
        except Exception as e:
>           logging.exception("Failed to connect with multiple Connector objects!", e)

tests/system/test_connector_object.py:93: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:2123: in exception
    error(msg, *args, exc_info=exc_info, **kwargs)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:2115: in error
    root.error(msg, *args, **kwargs)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:1518: in error
    self._log(ERROR, msg, args, **kwargs)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:1634: in _log
    self.handle(record)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:1644: in handle
    self.callHandlers(record)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:1706: in callHandlers
    hdlr.handle(record)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:978: in handle
    self.emit(record)
.nox/system-3-11/lib/python3.11/site-packages/_pytest/logging.py:350: in emit
    super().emit(record)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:1118: in emit
    self.handleError(record)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:1110: in emit
    msg = self.format(record)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:953: in format
    return fmt.format(record)
.nox/system-3-11/lib/python3.11/site-packages/_pytest/logging.py:114: in format
    return super().format(record)
/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:687: in format
    record.message = record.getMessage()
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

self = 

    def getMessage(self):
        """
        Return the message for this LogRecord.

        Return the message for this LogRecord after merging any user-supplied
        arguments with the message.
        """
        msg = str(self.msg)
        if self.args:
>           msg = msg % self.args
E           TypeError: not all arguments converted during string formatting

/opt/hostedtoolcache/Python/3.11.3/x64/lib/python3.11/logging/__init__.py:377: TypeError
flaky-bot[bot] commented 1 year ago

Looks like this issue is flaky. :worried:

I'm going to leave this open and stop commenting.

A human should fix and close this.


When run at the same commit (953b8a52d0776fccf8b74628abd7a26ba253a0af), this test passed in one build (https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/actions/runs/4941100314) and failed in another build (https://github.com/GoogleCloudPlatform/cloud-sql-python-connector/actions/runs/4941100314).

jackwotherspoon commented 1 year ago

Failed due to github actions token issue