20613: Update unconfigured logger check to accommodate NullHandler
[arvados.git] / sdk / python / arvados / __init__.py
index 45370777362a8baaa705401974f29299f59d801f..39fdb110031e12a76b6dc4cbcfcf0c6f1fe21df7 100644 (file)
@@ -1,44 +1,53 @@
+# Copyright (C) The Arvados Authors. All rights reserved.
+#
+# SPDX-License-Identifier: Apache-2.0
+
 from __future__ import print_function
 from __future__ import absolute_import
-import gflags
-import httplib
+from future import standard_library
+standard_library.install_aliases()
+from builtins import object
+import bz2
+import fcntl
+import hashlib
+import http.client
 import httplib2
-import logging
+import json
+import logging as stdliblog
 import os
 import pprint
-import sys
-import types
-import subprocess
-import json
-import UserDict
 import re
-import hashlib
 import string
-import bz2
-import zlib
-import fcntl
+import sys
 import time
-import threading
+import types
+import zlib
 
-from .api import api, http_cache
+if sys.version_info >= (3, 0):
+    from collections import UserDict
+else:
+    from UserDict import UserDict
+
+from .api import api, api_from_config, http_cache
 from .collection import CollectionReader, CollectionWriter, ResumableCollectionWriter
-from .keep import *
-from .stream import *
+from arvados.keep import *
+from arvados.stream import *
 from .arvfile import StreamFileReader
+from .logging import log_format, log_date_format, log_handler
 from .retry import RetryLoop
-from . import errors
-from . import util
+import arvados.errors as errors
+import arvados.util as util
+
+# Override logging module pulled in via `from ... import *`
+# so users can `import arvados.logging`.
+logging = sys.modules['arvados.logging']
 
 # Set up Arvados logging based on the user's configuration.
 # All Arvados code should log under the arvados hierarchy.
-log_handler = logging.StreamHandler()
-log_handler.setFormatter(logging.Formatter(
-        '%(asctime)s %(name)s[%(process)d] %(levelname)s: %(message)s',
-        '%Y-%m-%d %H:%M:%S'))
-logger = logging.getLogger('arvados')
+logger = stdliblog.getLogger('arvados')
 logger.addHandler(log_handler)
-logger.setLevel(logging.DEBUG if config.get('ARVADOS_DEBUG')
-                else logging.WARNING)
+logger.setLevel(stdliblog.DEBUG if config.get('ARVADOS_DEBUG')
+                else stdliblog.WARNING)
 
 def task_set_output(self, s, num_retries=5):
     for tries_left in RetryLoop(num_retries=num_retries, backoff_start=0):
@@ -65,7 +74,7 @@ def current_task(num_retries=5):
     for tries_left in RetryLoop(num_retries=num_retries, backoff_start=2):
         try:
             task = api('v1').job_tasks().get(uuid=os.environ['TASK_UUID']).execute()
-            task = UserDict.UserDict(task)
+            task = UserDict(task)
             task.set_output = types.MethodType(task_set_output, task)
             task.tmpdir = os.environ['TASK_WORK']
             _current_task = task
@@ -85,7 +94,7 @@ def current_job(num_retries=5):
     for tries_left in RetryLoop(num_retries=num_retries, backoff_start=2):
         try:
             job = api('v1').jobs().get(uuid=os.environ['JOB_UUID']).execute()
-            job = UserDict.UserDict(job)
+            job = UserDict(job)
             job.tmpdir = os.environ['JOB_WORK']
             _current_job = job
             return job
@@ -108,7 +117,7 @@ class JobTask(object):
     def __init__(self, parameters=dict(), runtime_constraints=dict()):
         print("init jobtask %s %s" % (parameters, runtime_constraints))
 
-class job_setup:
+class job_setup(object):
     @staticmethod
     def one_task_per_input_file(if_sequence=0, and_end_task=True, input_as_path=False, api_client=None):
         if if_sequence != current_task()['sequence']: