Skip to content

Commit

Permalink
use specific package level logger; update metadata (#1732)
Browse files Browse the repository at this point in the history
  • Loading branch information
njzjz authored Oct 16, 2022
1 parent 050383f commit ccb09da
Show file tree
Hide file tree
Showing 7 changed files with 33 additions and 32 deletions.
5 changes: 2 additions & 3 deletions reacnetgenerator/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Copyright 2018-2019, East China Normal University
# Copyright 2018-2022, East China Normal University
"""
``reacnetgenerator``
====================
Expand All @@ -14,12 +14,11 @@
"""

__date__ = '2018-03-11'
__update__ = '2019-10-07'
__author__ = 'Jinzhe Zeng'
__email__ = 'jinzhe.zeng@rutgers.edu'
__credits__ = ['Jinzhe Zeng', 'Tong Zhu',
'Liqun Cao', 'Chih-Hao Chin', 'John ZH Zhang']
__copyright__ = 'Copyright 2018-2019, East China Normal University'
__copyright__ = 'Copyright 2018-2022, East China Normal University'

import matplotlib as mpl
mpl.use("svg") # noqa
Expand Down
13 changes: 6 additions & 7 deletions reacnetgenerator/_draw.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
placement. Software: Practice and experince. 1991, 21(11),1129-1164.
"""

import logging
import math
import traceback
from io import StringIO
Expand All @@ -31,6 +30,7 @@
import scour.scour

from .utils import SCOUROPTIONS, SharedRNGData
from ._logging import logger


class _DrawNetwork(SharedRNGData):
Expand Down Expand Up @@ -73,8 +73,8 @@ def _draw(self, timeaxis=None):
fixed=list(self.pos) if self.pos else None,
k=self.k)
if pos:
logging.info("The position of the species in the network is:")
logging.info(pos)
logger.info("The position of the species in the network is:")
logger.info(pos)
for with_labels in ([True] if not self.nolabel else [True, False]):
nx.draw(
G, pos=pos, width=widths, node_size=self.node_size,
Expand All @@ -88,8 +88,7 @@ def _draw(self, timeaxis=None):
stringio.getvalue(), SCOUROPTIONS))
plt.close()
except Exception as e:
logging.error(f"Error: cannot draw images. Details: {e}")
traceback.print_tb(e.__traceback__)
logger.exception(f"Error: cannot draw images. Details: {e}")

def _readtable(self, filename):
df = pd.read_csv(filename, sep=' ', index_col=0, header=0)
Expand All @@ -106,7 +105,7 @@ def _handlespecies(self, name):
else:
showname = dict([(u, u) for u in species])
if species:
logging.info("Species are:")
logger.info("Species are:")
for specname, n in showname.items():
logging.info("{} {}".format(n, specname))
logger.info("{} {}".format(n, specname))
return species, showname
6 changes: 4 additions & 2 deletions reacnetgenerator/_logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import logging
import coloredlogs
from ._version import __version__

logger = logging.getLogger(__name__)
coloredlogs.install(
fmt=f'%(asctime)s - ReacNetGen {__version__} - %(levelname)s: %(message)s',
level=logging.INFO, milliseconds=True)
fmt=f'%(asctime)s - ReacNetGenerator {__version__} - %(levelname)s: %(message)s',
level=logging.INFO, milliseconds=True, logger=logger)
4 changes: 2 additions & 2 deletions reacnetgenerator/_reachtml.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
"""

import json
import logging
import re
import os
from collections import defaultdict
Expand All @@ -20,6 +19,7 @@
import scour.scour

from .utils import SCOUROPTIONS, SharedRNGData
from ._logging import logger


class _HTMLResult(SharedRNGData):
Expand All @@ -39,7 +39,7 @@ def report(self):
"""Generate a web page to show the result."""
self._readdata()
self._generateresult()
logging.info(
logger.info(
f"Report is generated. Please see {self.resultfilename} for more details.")

def _re(self, smi):
Expand Down
8 changes: 4 additions & 4 deletions reacnetgenerator/gui.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,18 +14,18 @@
import webbrowser
import base64
import pkg_resources
import logging
from multiprocessing import cpu_count

from . import ReacNetGenerator, __version__
from ._logging import logger


class GUI:
"""GUI class."""

def __init__(self):
"""Init GUI class."""
logging.info(__doc__)
logger.info(__doc__)
self._filename = ''

self._top = tk.Tk()
Expand Down Expand Up @@ -109,10 +109,10 @@ def _run(self):
webbrowser.open_new(os.path.abspath(
reacnetgenerator.resultfilename))
except Exception as e:
logging.error(e)
logger.exception(e)
tkmb.showerror("Error", e)
else:
logging.error("File not exsit.")
logger.error("File not exsit.")
tkmb.showerror("Error", "File not exsit.")

def _openfiles(self):
Expand Down
14 changes: 7 additions & 7 deletions reacnetgenerator/reacnetgen.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@


import gc
import logging
import os
import time
import itertools
Expand All @@ -50,7 +49,8 @@

import numpy as np

from . import __version__, __date__, __update__
from . import __version__, __date__
from ._logging import logger
from ._detect import _Detect
from ._draw import _DrawNetwork
from ._mergeiso import _mergeISO
Expand Down Expand Up @@ -119,8 +119,8 @@ class ReacNetGenerator:

def __init__(self, **kwargs):
"""Init ReacNetGenerator."""
logging.info(__doc__)
logging.info(
logger.info(__doc__)
logger.info(
f"Version: {__version__} Creation date: {__date__}")

# process kwargs
Expand Down Expand Up @@ -292,7 +292,7 @@ def _process(self, steps):
# garbage collect
gc.collect()
timearray.append(time.perf_counter())
logging.info(
logger.info(
f"Step {i}: Done! Time consumed (s): {timearray[-1]-timearray[-2]:.3f} ({runstep})")

# delete tempfile
Expand All @@ -304,5 +304,5 @@ def _process(self, steps):
except OSError:
pass
# Summary
logging.info("====== Summary ======")
logging.info(f"Total time(s): {timearray[-1]-timearray[0]:.3f} s")
logger.info("====== Summary ======")
logger.info(f"Total time(s): {timearray[-1]-timearray[0]:.3f} s")
15 changes: 8 additions & 7 deletions reacnetgenerator/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
import os
import shutil
import itertools
import logging
import pickle
import hashlib
import asyncio
Expand All @@ -19,6 +18,8 @@
from requests.adapters import HTTPAdapter
from tqdm import tqdm

from ._logging import logger


class WriteBuffer:
"""Store a buffer for writing files.
Expand Down Expand Up @@ -355,11 +356,11 @@ def checksha256(filename, sha256_check):
for n in iter(lambda: f.readinto(mv), 0):
h.update(mv[:n])
sha256 = h.hexdigest()
logging.info(f"SHA256 of {filename}: {sha256}")
logger.info(f"SHA256 of {filename}: {sha256}")
if sha256 in must_be_list(sha256_check):
return True
logging.warning("SHA256 is not correct.")
logging.warning(open(filename).read())
logger.warning("SHA256 is not correct.")
logger.warning(open(filename).read())
return False


Expand Down Expand Up @@ -389,13 +390,13 @@ async def download_file(urls, pathfilename, sha256):

# from https://stackoverflow.com/questions/16694907
for url in must_be_list(urls):
logging.info(f"Try to download {pathfilename} from {url}")
logger.info(f"Try to download {pathfilename} from {url}")
with s.get(url, stream=True) as r, open(pathfilename, 'wb') as f:
try:
shutil.copyfileobj(r.raw, f)
break
except requests.exceptions.RequestException as e:
logging.warning(f"Request {pathfilename} Error.", exc_info=e)
logger.warning(f"Request {pathfilename} Error.", exc_info=e)
else:
raise RuntimeError(f"Cannot download {pathfilename}.")

Expand Down Expand Up @@ -441,7 +442,7 @@ def run_mp(nproc, **arg):
yield item
semaphore.release()
except:
logging.exception("run_mp failed")
logger.exception("run_mp failed")
pool.terminate()
raise
else:
Expand Down

0 comments on commit ccb09da

Please sign in to comment.