Skip to content

Commit 8f9e81c

Browse files
Merge pull request #1031 from jverswijver/add_logger
Add centralized logger.
2 parents 91a43ed + 5d4d625 commit 8f9e81c

20 files changed

+119
-50
lines changed

CHANGELOG.md

+5-1
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,14 @@
11
## Release notes
22

3+
### 0.13.6 -- Jun 13, 2022
4+
* Add - unified package level logger for package (#667) PR #1031
5+
* Update - swap various datajoint messages, warnings, etc. to use the new logger. (#667) PR #1031
6+
37
### 0.13.5 -- May 19, 2022
48
* Update - Import ABC from collections.abc for Python 3.10 compatibility
59
* Bugfix - Fix multiprocessing value error (#1013) PR #1026
610

7-
### 0.13.4 -- March, 28 2022
11+
### 0.13.4 -- Mar, 28 2022
812
* Add - Allow reading blobs produced by legacy 32-bit compiled mYm library for matlab. PR #995
913
* Bugfix - Add missing `jobs` argument for multiprocessing PR #997
1014
* Add - Test for multiprocessing PR #1008

LNX-docker-compose.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ services:
3232
interval: 1s
3333
fakeservices.datajoint.io:
3434
<<: *net
35-
image: datajoint/nginx:v0.1.1
35+
image: datajoint/nginx:v0.2.1
3636
environment:
3737
- ADD_db_TYPE=DATABASE
3838
- ADD_db_ENDPOINT=db:3306

datajoint/__init__.py

+1
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@
5252
"key_hash",
5353
]
5454

55+
from .logging import logger
5556
from .version import __version__
5657
from .settings import config
5758
from .connection import conn, Connection

datajoint/autopopulate.py

+10-4
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313

1414
# noinspection PyExceptionInherit,PyCallingNonCallable
1515

16-
logger = logging.getLogger(__name__)
16+
logger = logging.getLogger(__name__.split(".")[0])
1717

1818

1919
# --- helper functions for multiprocessing --
@@ -159,7 +159,7 @@ def populate(
159159
max_calls=None,
160160
display_progress=False,
161161
processes=1,
162-
make_kwargs=None
162+
make_kwargs=None,
163163
):
164164
"""
165165
``table.populate()`` calls ``table.make(key)`` for every primary key in
@@ -207,7 +207,7 @@ def handler(signum, frame):
207207
elif order == "random":
208208
random.shuffle(keys)
209209

210-
logger.info("Found %d keys to populate" % len(keys))
210+
logger.debug("Found %d keys to populate" % len(keys))
211211

212212
keys = keys[:max_calls]
213213
nkeys = len(keys)
@@ -275,7 +275,7 @@ def _populate1(
275275
if jobs is not None:
276276
jobs.complete(self.target.table_name, self._job_key(key))
277277
else:
278-
logger.info("Populating: " + str(key))
278+
logger.debug(f"Making {key} -> {self.target.full_table_name}")
279279
self.__class__._allow_insert = True
280280
try:
281281
make(dict(key), **(make_kwargs or {}))
@@ -288,6 +288,9 @@ def _populate1(
288288
exception=error.__class__.__name__,
289289
msg=": " + str(error) if str(error) else "",
290290
)
291+
logger.debug(
292+
f"Error making {key} -> {self.target.full_table_name} - {error_message}"
293+
)
291294
if jobs is not None:
292295
# show error name and error message (if any)
293296
jobs.error(
@@ -303,6 +306,9 @@ def _populate1(
303306
return key, error if return_exception_objects else error_message
304307
else:
305308
self.connection.commit_transaction()
309+
logger.debug(
310+
f"Success making {key} -> {self.target.full_table_name}"
311+
)
306312
if jobs is not None:
307313
jobs.complete(self.target.table_name, self._job_key(key))
308314
finally:

datajoint/connection.py

+6-6
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
from .hash import uuid_from_buffer
1818
from .plugin import connection_plugins
1919

20-
logger = logging.getLogger(__name__)
20+
logger = logging.getLogger(__name__.split(".")[0])
2121
query_log_max_length = 300
2222

2323

@@ -187,7 +187,7 @@ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None)
187187
self.conn_info["ssl_input"] = use_tls
188188
self.conn_info["host_input"] = host_input
189189
self.init_fun = init_fun
190-
print("Connecting {user}@{host}:{port}".format(**self.conn_info))
190+
logger.info("Connecting {user}@{host}:{port}".format(**self.conn_info))
191191
self._conn = None
192192
self._query_cache = None
193193
connect_host_hook(self)
@@ -341,7 +341,7 @@ def query(
341341
except errors.LostConnectionError:
342342
if not reconnect:
343343
raise
344-
warnings.warn("MySQL server has gone away. Reconnecting to the server.")
344+
logger.warning("MySQL server has gone away. Reconnecting to the server.")
345345
connect_host_hook(self)
346346
if self._in_transaction:
347347
self.cancel_transaction()
@@ -382,15 +382,15 @@ def start_transaction(self):
382382
raise errors.DataJointError("Nested connections are not supported.")
383383
self.query("START TRANSACTION WITH CONSISTENT SNAPSHOT")
384384
self._in_transaction = True
385-
logger.info("Transaction started")
385+
logger.debug("Transaction started")
386386

387387
def cancel_transaction(self):
388388
"""
389389
Cancels the current transaction and rolls back all changes made during the transaction.
390390
"""
391391
self.query("ROLLBACK")
392392
self._in_transaction = False
393-
logger.info("Transaction cancelled. Rolling back ...")
393+
logger.debug("Transaction cancelled. Rolling back ...")
394394

395395
def commit_transaction(self):
396396
"""
@@ -399,7 +399,7 @@ def commit_transaction(self):
399399
"""
400400
self.query("COMMIT")
401401
self._in_transaction = False
402-
logger.info("Transaction committed and closed.")
402+
logger.debug("Transaction committed and closed.")
403403

404404
# -------- context manager for transactions
405405
@property

datajoint/declare.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def match_type(attribute_type):
7575
)
7676

7777

78-
logger = logging.getLogger(__name__)
78+
logger = logging.getLogger(__name__.split(".")[0])
7979

8080

8181
def build_foreign_key_parser_old():
@@ -207,7 +207,7 @@ def compile_foreign_key(
207207
)
208208

209209
if obsolete:
210-
warnings.warn(
210+
logger.warning(
211211
'Line "{line}" uses obsolete syntax that will no longer be supported in datajoint 0.14. '
212212
"For details, see issue #780 https://github.com/datajoint/datajoint-python/issues/780".format(
213213
line=line

datajoint/diagram.py

+7-6
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,14 @@
22
import re
33
import functools
44
import io
5-
import warnings
5+
import logging
66
import inspect
77
from .table import Table
88
from .dependencies import unite_master_parts
9+
from .user_tables import Manual, Imported, Computed, Lookup, Part
10+
from .errors import DataJointError
11+
from .table import lookup_class_name
12+
913

1014
try:
1115
from matplotlib import pyplot as plt
@@ -21,11 +25,8 @@
2125
except:
2226
diagram_active = False
2327

24-
from .user_tables import Manual, Imported, Computed, Lookup, Part
25-
from .errors import DataJointError
26-
from .table import lookup_class_name
27-
2828

29+
logger = logging.getLogger(__name__.split(".")[0])
2930
user_table_classes = (Manual, Lookup, Computed, Imported, Part)
3031

3132

@@ -63,7 +64,7 @@ class Diagram:
6364
"""
6465

6566
def __init__(self, *args, **kwargs):
66-
warnings.warn(
67+
logger.warning(
6768
"Please install matplotlib and pygraphviz libraries to enable the Diagram feature."
6869
)
6970

datajoint/expression.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
)
1818
from .declare import CONSTANT_LITERALS
1919

20-
logger = logging.getLogger(__name__)
20+
logger = logging.getLogger(__name__.split(".")[0])
2121

2222

2323
class QueryExpression:

datajoint/fetch.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
from functools import partial
22
from pathlib import Path
3-
import warnings
3+
import logging
44
import pandas
55
import itertools
66
import re
@@ -12,6 +12,8 @@
1212
from .settings import config
1313
from .utils import safe_write
1414

15+
logger = logging.getLogger(__name__.split(".")[0])
16+
1517

1618
class key:
1719
"""
@@ -209,7 +211,7 @@ def __call__(
209211
)
210212

211213
if limit is None and offset is not None:
212-
warnings.warn(
214+
logger.warning(
213215
"Offset set, but no limit. Setting limit to a large number. "
214216
"Consider setting a limit explicitly."
215217
)

datajoint/heading.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from .attribute_adapter import get_adapter, AttributeAdapter
1515

1616

17-
logger = logging.getLogger(__name__)
17+
logger = logging.getLogger(__name__.split(".")[0])
1818

1919
default_attribute_properties = (
2020
dict( # these default values are set in computed attributes

datajoint/logging.py

+32
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import logging
2+
import os
3+
import sys
4+
import io
5+
6+
logger = logging.getLogger(__name__.split(".")[0])
7+
8+
log_level = os.getenv("DJ_LOG_LEVEL", "info").upper()
9+
10+
log_format = logging.Formatter("[%(asctime)s][%(levelname)s]: %(message)s")
11+
12+
stream_handler = logging.StreamHandler() # default handler
13+
stream_handler.setFormatter(log_format)
14+
15+
logger.setLevel(level=log_level)
16+
logger.handlers = [stream_handler]
17+
18+
19+
def excepthook(exc_type, exc_value, exc_traceback):
20+
if issubclass(exc_type, KeyboardInterrupt):
21+
sys.__excepthook__(exc_type, exc_value, exc_traceback)
22+
return
23+
24+
if logger.getEffectiveLevel() == 10:
25+
logger.debug(
26+
"Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
27+
)
28+
else:
29+
logger.error(f"Uncaught exception: {exc_value}")
30+
31+
32+
sys.excepthook = excepthook

datajoint/plugin.py

+7-4
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@
33
from pathlib import Path
44
from cryptography.exceptions import InvalidSignature
55
from otumat import hash_pkg, verify
6+
import logging
7+
8+
logger = logging.getLogger(__name__.split(".")[0])
69

710

811
def _update_error_stack(plugin_name):
@@ -12,13 +15,13 @@ def _update_error_stack(plugin_name):
1215
plugin_meta = pkg_resources.get_distribution(plugin_name)
1316

1417
data = hash_pkg(pkgpath=str(Path(plugin_meta.module_path, plugin_name)))
15-
signature = plugin_meta.get_metadata("{}.sig".format(plugin_name))
16-
pubkey_path = str(Path(base_meta.egg_info, "{}.pub".format(base_name)))
18+
signature = plugin_meta.get_metadata(f"{plugin_name}.sig")
19+
pubkey_path = str(Path(base_meta.egg_info, f"{base_name}.pub"))
1720
verify(pubkey_path=pubkey_path, data=data, signature=signature)
18-
print("DataJoint verified plugin `{}` detected.".format(plugin_name))
21+
logger.info(f"DataJoint verified plugin `{plugin_name}` detected.")
1922
return True
2023
except (FileNotFoundError, InvalidSignature):
21-
print("Unverified plugin `{}` detected.".format(plugin_name))
24+
logger.warning(f"Unverified plugin `{plugin_name}` detected.")
2225
return False
2326

2427

datajoint/s3.py

+1-2
Original file line numberDiff line numberDiff line change
@@ -4,13 +4,12 @@
44
from io import BytesIO
55
import minio # https://docs.minio.io/docs/python-client-api-reference
66
import urllib3
7-
import warnings
87
import uuid
98
import logging
109
from pathlib import Path
1110
from . import errors
1211

13-
logger = logging.getLogger(__name__)
12+
logger = logging.getLogger(__name__.split(".")[0])
1413

1514

1615
class Folder:

datajoint/schemas.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from .table import lookup_class_name, Log, FreeTable
1717
import types
1818

19-
logger = logging.getLogger(__name__)
19+
logger = logging.getLogger(__name__.split(".")[0])
2020

2121

2222
def ordered_dir(class_):
@@ -134,7 +134,7 @@ def activate(
134134
)
135135
)
136136
# create database
137-
logger.info("Creating schema `{name}`.".format(name=schema_name))
137+
logger.debug("Creating schema `{name}`.".format(name=schema_name))
138138
try:
139139
self.connection.query(
140140
"CREATE DATABASE `{name}`".format(name=schema_name)
@@ -360,12 +360,12 @@ def drop(self, force=False):
360360
)
361361
== "yes"
362362
):
363-
logger.info("Dropping `{database}`.".format(database=self.database))
363+
logger.debug("Dropping `{database}`.".format(database=self.database))
364364
try:
365365
self.connection.query(
366366
"DROP DATABASE `{database}`".format(database=self.database)
367367
)
368-
logger.info(
368+
logger.debug(
369369
"Schema `{database}` was dropped successfully.".format(
370370
database=self.database
371371
)

datajoint/settings.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@
4949
}
5050
)
5151

52-
logger = logging.getLogger(__name__)
52+
logger = logging.getLogger(__name__.split(".")[0])
5353
log_levels = {
5454
"INFO": logging.INFO,
5555
"WARNING": logging.WARNING,
@@ -104,7 +104,7 @@ def save(self, filename, verbose=False):
104104
with open(filename, "w") as fid:
105105
json.dump(self._conf, fid, indent=4)
106106
if verbose:
107-
print("Saved settings in " + filename)
107+
logger.info("Saved settings in " + filename)
108108

109109
def load(self, filename):
110110
"""
@@ -240,8 +240,8 @@ def __getitem__(self, key):
240240
return self._conf[key]
241241

242242
def __setitem__(self, key, value):
243-
logger.log(
244-
logging.INFO, "Setting {0:s} to {1:s}".format(str(key), str(value))
243+
logger.debug(
244+
logging.DEBUG, "Setting {0:s} to {1:s}".format(str(key), str(value))
245245
)
246246
if validators[key](value):
247247
self._conf[key] = value

0 commit comments

Comments
 (0)