From 6ccac21154294be8cf08d6b8bab52aba127a840f Mon Sep 17 00:00:00 2001 From: Miles Yucht Date: Wed, 11 Jun 2025 14:34:29 +0000 Subject: [PATCH 1/3] No default logging --- NEXT_CHANGELOG.md | 2 ++ databricks/sdk/_widgets/__init__.py | 9 +++++---- databricks/sdk/credentials_provider.py | 4 +++- databricks/sdk/errors/customizer.py | 5 +++-- databricks/sdk/errors/deserializer.py | 12 +++++++----- databricks/sdk/errors/parser.py | 4 +++- databricks/sdk/runtime/__init__.py | 16 ++++++++-------- 7 files changed, 31 insertions(+), 21 deletions(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index ac1055a9c..4b246a011 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -6,6 +6,8 @@ ### Bug Fixes +* Always create a new logger instance, rather than using Python's default global logger instance. + ### Documentation ### Internal Changes diff --git a/databricks/sdk/_widgets/__init__.py b/databricks/sdk/_widgets/__init__.py index 3f9c4eefc..5e421abb9 100644 --- a/databricks/sdk/_widgets/__init__.py +++ b/databricks/sdk/_widgets/__init__.py @@ -3,6 +3,7 @@ import warnings from abc import ABC, abstractmethod +_LOG = logging.getLogger(__name__) class WidgetUtils(ABC): @@ -54,7 +55,7 @@ def _remove_all(self): ) == 0 ): - logging.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.") + _LOG.debug("Not in an interactive notebook. Skipping ipywidgets implementation for dbutils.") raise EnvironmentError("Not in an interactive notebook.") # For import errors in IPyWidgetUtil, we provide a warning message, prompting users to install the @@ -63,7 +64,7 @@ def _remove_all(self): from .ipywidgets_utils import IPyWidgetUtil widget_impl = IPyWidgetUtil - logging.debug("Using ipywidgets implementation for dbutils.") + _LOG.debug("Using ipywidgets implementation for dbutils.") except ImportError as e: # Since we are certain that we are in an interactive notebook, we can make assumptions about @@ -73,11 +74,11 @@ def _remove_all(self): "\tpip install 'databricks-sdk[notebook]'\n" "Falling back to default_value_only implementation for databricks widgets." ) - logging.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.") + _LOG.debug(f"{e.msg}. Skipping ipywidgets implementation for dbutils.") raise e except: from .default_widgets_utils import DefaultValueOnlyWidgetUtils widget_impl = DefaultValueOnlyWidgetUtils - logging.debug("Using default_value_only implementation for dbutils.") + _LOG.debug("Using default_value_only implementation for dbutils.") diff --git a/databricks/sdk/credentials_provider.py b/databricks/sdk/credentials_provider.py index 86bd5c4d2..96deb9aca 100644 --- a/databricks/sdk/credentials_provider.py +++ b/databricks/sdk/credentials_provider.py @@ -25,6 +25,8 @@ TokenCache, TokenSource) from .oidc_token_supplier import GitHubOIDCTokenSupplier +_LOG = logging.getLogger(__name__) + CredentialsProvider = Callable[[], Dict[str, str]] logger = logging.getLogger("databricks.sdk") @@ -558,7 +560,7 @@ def _run_subprocess( kwargs["shell"] = sys.platform.startswith("win") # windows requires shell=True to be able to execute 'az login' or other commands # cannot use shell=True all the time, as it breaks macOS - logging.debug(f'Running command: {" ".join(popenargs)}') + _LOG.debug(f'Running command: {" ".join(popenargs)}') return subprocess.run( popenargs, input=input, diff --git a/databricks/sdk/errors/customizer.py b/databricks/sdk/errors/customizer.py index 6a760b626..cffb16cdd 100644 --- a/databricks/sdk/errors/customizer.py +++ b/databricks/sdk/errors/customizer.py @@ -3,6 +3,7 @@ import requests +_LOG = logging.getLogger(__name__) class _ErrorCustomizer(abc.ABC): """A customizer for errors from the Databricks REST API.""" @@ -23,7 +24,7 @@ class _RetryAfterCustomizer(_ErrorCustomizer): def _parse_retry_after(cls, response: requests.Response) -> int: retry_after = response.headers.get("Retry-After") if retry_after is None: - logging.debug( + _LOG.debug( f"No Retry-After header received in response with status code 429 or 503. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}" ) # 429 requests should include a `Retry-After` header, but if it's missing, @@ -39,7 +40,7 @@ def _parse_retry_after(cls, response: requests.Response) -> int: try: return int(retry_after) except ValueError: - logging.debug( + _LOG.debug( f"Invalid Retry-After header received: {retry_after}. Defaulting to {cls._DEFAULT_RETRY_AFTER_SECONDS}" ) # defaulting to 1 sleep second to make self._is_retryable() simpler diff --git a/databricks/sdk/errors/deserializer.py b/databricks/sdk/errors/deserializer.py index 5a6e0da09..80272c589 100644 --- a/databricks/sdk/errors/deserializer.py +++ b/databricks/sdk/errors/deserializer.py @@ -6,6 +6,8 @@ import requests +_LOG = logging.getLogger(__name__) + class _ErrorDeserializer(abc.ABC): """A parser for errors from the Databricks REST API.""" @@ -34,19 +36,19 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) - payload_str = response_body.decode("utf-8") resp = json.loads(payload_str) except UnicodeDecodeError as e: - logging.debug( + _LOG.debug( "_StandardErrorParser: unable to decode response using utf-8", exc_info=e, ) return None except json.JSONDecodeError as e: - logging.debug( + _LOG.debug( "_StandardErrorParser: unable to deserialize response as json", exc_info=e, ) return None if not isinstance(resp, dict): - logging.debug("_StandardErrorParser: response is valid JSON but not a dictionary") + _LOG.debug("_StandardErrorParser: response is valid JSON but not a dictionary") return None error_args = { @@ -84,7 +86,7 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) - payload_str = response_body.decode("utf-8") match = self.__STRING_ERROR_REGEX.match(payload_str) if not match: - logging.debug("_StringErrorParser: unable to parse response as string") + _LOG.debug("_StringErrorParser: unable to parse response as string") return None error_code, message = match.groups() return { @@ -115,5 +117,5 @@ def deserialize_error(self, response: requests.Response, response_body: bytes) - "message": message, "error_code": response.reason.upper().replace(" ", "_"), } - logging.debug("_HtmlErrorParser: no
 tag found in error response")
+        _LOG.debug("_HtmlErrorParser: no 
 tag found in error response")
         return None
diff --git a/databricks/sdk/errors/parser.py b/databricks/sdk/errors/parser.py
index 64d83de05..42c555b40 100644
--- a/databricks/sdk/errors/parser.py
+++ b/databricks/sdk/errors/parser.py
@@ -13,6 +13,8 @@
 from .private_link import (_get_private_link_validation_error,
                            _is_private_link_redirect)
 
+_LOG = logging.getLogger(__name__)
+
 # A list of _ErrorDeserializers that are tried in order to parse an API error from a response body. Most errors should
 # be parsable by the _StandardErrorDeserializer, but additional parsers can be added here for specific error formats.
 # The order of the parsers is not important, as the set of errors that can be parsed by each parser should be disjoint.
@@ -78,7 +80,7 @@ def get_api_error(self, response: requests.Response) -> Optional[DatabricksError
                             customizer.customize_error(response, error_args)
                         return _error_mapper(response, error_args)
                 except Exception as e:
-                    logging.debug(
+                    _LOG.debug(
                         f"Error parsing response with {parser}, continuing",
                         exc_info=e,
                     )
diff --git a/databricks/sdk/runtime/__init__.py b/databricks/sdk/runtime/__init__.py
index adf26c707..298bc5a78 100644
--- a/databricks/sdk/runtime/__init__.py
+++ b/databricks/sdk/runtime/__init__.py
@@ -3,7 +3,7 @@
 import logging
 from typing import Dict, Optional, Union, cast
 
-logger = logging.getLogger("databricks.sdk")
+_LOG = logging.getLogger(__name__)
 is_local_implementation = True
 
 # All objects that are injected into the Notebook's user namespace should also be made
@@ -28,7 +28,7 @@
     # a workaround here for exposing required information in notebook environment
     from dbruntime.sdk_credential_provider import init_runtime_native_auth
 
-    logger.debug("runtime SDK credential provider available")
+    _LOG.debug("runtime SDK credential provider available")
     dbruntime_objects.append("init_runtime_native_auth")
 except ImportError:
     init_runtime_native_auth = None
@@ -42,10 +42,10 @@ def init_runtime_repl_auth():
 
         ctx = get_context()
         if ctx is None:
-            logger.debug("Empty REPL context returned, skipping runtime auth")
+            _LOG.debug("Empty REPL context returned, skipping runtime auth")
             return None, None
         if ctx.workspaceUrl is None:
-            logger.debug("Workspace URL is not available, skipping runtime auth")
+            _LOG.debug("Workspace URL is not available, skipping runtime auth")
             return None, None
         host = f"https://{ctx.workspaceUrl}"
 
@@ -113,12 +113,12 @@ def inner() -> Dict[str, str]:
         sqlContext: SQLContext = None  # type: ignore
         table = sqlContext.table
     except Exception as e:
-        logging.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
+        _LOG.debug(f"Failed to initialize globals 'sqlContext' and 'table', continuing. Cause: {e}")
 
     try:
         from pyspark.sql.functions import udf  # type: ignore
     except ImportError as e:
-        logging.debug(f"Failed to initialise udf global: {e}")
+        _LOG.debug(f"Failed to initialise udf global: {e}")
 
     try:
         from databricks.connect import DatabricksSession  # type: ignore
@@ -128,13 +128,13 @@ def inner() -> Dict[str, str]:
     except Exception as e:
         # We are ignoring all failures here because user might want to initialize
         # spark session themselves and we don't want to interfere with that
-        logging.debug(f"Failed to initialize globals 'spark' and 'sql', continuing. Cause: {e}")
+        _LOG.debug(f"Failed to initialize globals 'spark' and 'sql', continuing. Cause: {e}")
 
     try:
         # We expect this to fail locally since dbconnect does not support sparkcontext. This is just for typing
         sc = spark.sparkContext  # type: ignore
     except Exception as e:
-        logging.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
+        _LOG.debug(f"Failed to initialize global 'sc', continuing. Cause: {e}")
 
     def display(input=None, *args, **kwargs) -> None:  # type: ignore
         """

From 77dd4e3b7553aa0556a0495a61172d3bb3acaf18 Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Wed, 11 Jun 2025 14:42:59 +0000
Subject: [PATCH 2/3] fix

---
 NEXT_CHANGELOG.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md
index 6e58b8893..3d1a02e39 100644
--- a/NEXT_CHANGELOG.md
+++ b/NEXT_CHANGELOG.md
@@ -6,7 +6,7 @@
 
 ### Bug Fixes
 
-* Always create a new logger instance, rather than using Python's default global logger instance.
+* Always create a new logger instance, rather than using Python's default global logger instance ([#988](https://github.com/databricks/databricks-sdk-py/pull/988)).
 
 ### Documentation
 

From 56487bbd0c808cf046dad7fc96f31556e8c04dae Mon Sep 17 00:00:00 2001
From: Miles Yucht 
Date: Wed, 11 Jun 2025 14:43:46 +0000
Subject: [PATCH 3/3] fix

---
 databricks/sdk/_widgets/__init__.py | 1 +
 databricks/sdk/errors/customizer.py | 1 +
 2 files changed, 2 insertions(+)

diff --git a/databricks/sdk/_widgets/__init__.py b/databricks/sdk/_widgets/__init__.py
index 5e421abb9..c55ae9ff6 100644
--- a/databricks/sdk/_widgets/__init__.py
+++ b/databricks/sdk/_widgets/__init__.py
@@ -5,6 +5,7 @@
 
 _LOG = logging.getLogger(__name__)
 
+
 class WidgetUtils(ABC):
 
     def get(self, name: str):
diff --git a/databricks/sdk/errors/customizer.py b/databricks/sdk/errors/customizer.py
index cffb16cdd..0893ed9dd 100644
--- a/databricks/sdk/errors/customizer.py
+++ b/databricks/sdk/errors/customizer.py
@@ -5,6 +5,7 @@
 
 _LOG = logging.getLogger(__name__)
 
+
 class _ErrorCustomizer(abc.ABC):
     """A customizer for errors from the Databricks REST API."""