diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index d386d4d4d..d7b33901b 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -2,7 +2,7 @@ name: Bug report about: Create a report to help us improve title: '' -labels: 'bug, awaiting-triage' +labels: 'bug' assignees: '' --- diff --git a/.github/workflows/development.yaml b/.github/workflows/development.yaml index bc8d16989..cc0e4491a 100644 --- a/.github/workflows/development.yaml +++ b/.github/workflows/development.yaml @@ -3,48 +3,69 @@ on: push: branches: - '**' # every branch + - '!gh-pages' # exclude gh-pages branch - '!stage*' # exclude branches beginning with stage + tags: + - '\d+\.\d+\.\d+' # only semver tags pull_request: branches: - '**' # every branch + - '!gh-pages' # exclude gh-pages branch - '!stage*' # exclude branches beginning with stage jobs: - build-docs: + build: runs-on: ubuntu-latest + strategy: + matrix: + include: + - py_ver: '3.9' + distro: debian + image: djbase env: + PY_VER: ${{matrix.py_ver}} + DISTRO: ${{matrix.distro}} + IMAGE: ${{matrix.image}} DOCKER_CLIENT_TIMEOUT: "120" COMPOSE_HTTP_TIMEOUT: "120" steps: - - uses: actions/checkout@v2 - - name: Compile docs static artifacts + - uses: actions/checkout@v3 + - name: Validate version and release notes + run: | + DJ_VERSION=$(grep -oP '\d+\.\d+\.\d+' datajoint/version.py) + RELEASE_BODY=$(python -c \ + 'print(open("./CHANGELOG.md").read().split("\n\n")[1].split("\n", 1)[1])' \ + ) + echo "DJ_VERSION=${DJ_VERSION}" >> $GITHUB_ENV + echo "RELEASE_BODY<> $GITHUB_ENV + echo "$RELEASE_BODY" >> $GITHUB_ENV + echo "EOF" >> $GITHUB_ENV + - name: Build pip artifacts run: | export HOST_UID=$(id -u) - docker-compose -f ./docs-api/docker-compose.yaml up --exit-code-from docs-builder --build - - name: Add docs static artifacts - uses: actions/upload-artifact@v2 + docker-compose -f docker-compose-build.yaml up --exit-code-from app --build + echo "DJ_VERSION=${DJ_VERSION}" >> $GITHUB_ENV + - if: matrix.py_ver == '3.9' && matrix.distro == 'debian' + name: Add pip artifacts + uses: actions/upload-artifact@v3 with: - name: docs-api-static - path: docs-api/build/html + name: pip-datajoint-${{env.DJ_VERSION}} + path: dist retention-days: 1 test: - if: github.event_name == 'push' || github.event_name == 'pull_request' runs-on: ubuntu-latest strategy: matrix: - # py_ver: ["3.10"] py_ver: ["3.9"] mysql_ver: ["8.0", "5.7"] include: - # - py_ver: "3.9" - # mysql_ver: "5.7" - py_ver: "3.8" mysql_ver: "5.7" - py_ver: "3.7" mysql_ver: "5.7" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{matrix.py_ver}} - uses: actions/setup-python@v2 + uses: actions/setup-python@v4 with: python-version: ${{matrix.py_ver}} - name: Install dependencies @@ -55,56 +76,135 @@ jobs: run: flake8 datajoint --count --select=E9,F63,F7,F82 --show-source --statistics - name: Run primary tests env: - HOST_UID: "1001" - GID: "121" PY_VER: ${{matrix.py_ver}} MYSQL_VER: ${{matrix.mysql_ver}} DISTRO: alpine MINIO_VER: RELEASE.2021-09-03T03-56-13Z + DOCKER_CLIENT_TIMEOUT: "120" COMPOSE_HTTP_TIMEOUT: "120" - run: docker-compose -f LNX-docker-compose.yml up --build --exit-code-from app + run: | + export HOST_UID=$(id -u) + docker-compose -f LNX-docker-compose.yml up --build --exit-code-from app - name: Run style tests run: | - flake8 --ignore=E203,E722,F401,W503 datajoint \ + flake8 --ignore=E203,E722,W503 datajoint \ --count --max-complexity=62 --max-line-length=127 --statistics black datajoint --check -v black tests --check -v publish-docs: if: | github.event_name == 'push' && - ( - github.repository_owner == 'datajoint' || - github.repository_owner == 'datajoint-company' || - github.repository_owner == 'dj-sciops' - ) - needs: build-docs + startsWith(github.ref, 'refs/tags') + needs: test runs-on: ubuntu-latest + env: + DOCKER_CLIENT_TIMEOUT: "120" + COMPOSE_HTTP_TIMEOUT: "120" + steps: + - uses: actions/checkout@v3 + - name: Deploy docs + run: | + export MODE=BUILD + export PACKAGE=datajoint + export UPSTREAM_REPO=https://github.com/${GITHUB_REPOSITORY}.git + export HOST_UID=$(id -u) + docker compose -f docs/docker-compose.yaml up --exit-code-from docs --build + git push origin gh-pages + publish-release: + if: | + github.event_name == 'push' && + startsWith(github.ref, 'refs/tags') + needs: test + runs-on: ubuntu-latest + strategy: + matrix: + include: + - py_ver: '3.9' + distro: debian + image: djbase + env: + PY_VER: ${{matrix.py_ver}} + DISTRO: ${{matrix.distro}} + IMAGE: ${{matrix.image}} + TWINE_USERNAME: ${{secrets.twine_username}} + TWINE_PASSWORD: ${{secrets.twine_password}} + DOCKER_CLIENT_TIMEOUT: "120" + COMPOSE_HTTP_TIMEOUT: "120" + outputs: + release_upload_url: ${{steps.create_gh_release.outputs.upload_url}} steps: - - uses: actions/checkout@v2 - - name: Fetch docs static artifacts - uses: actions/download-artifact@v2 + - uses: actions/checkout@v3 + - name: Set up Python ${{matrix.py_ver}} + uses: actions/setup-python@v4 with: - name: docs-api-static - path: docs-api/build/html - - name: Commit documentation changes + python-version: ${{matrix.py_ver}} + - name: Determine package version run: | - git clone https://github.com/${GITHUB_REPOSITORY}.git \ - --branch gh-pages --single-branch gh-pages - rm -R gh-pages/* - cp -r docs-api/build/html/* gh-pages/ - cp .gitignore gh-pages/ - touch gh-pages/.nojekyll - echo "docs-api.datajoint.org" > gh-pages/CNAME - cd gh-pages - git config --local user.email "action@github.com" - git config --local user.name "GitHub Action" - git add . --all - git commit -m "Update documentation" -a || true - # The above command will fail if no changes were present, so we ignore - # the return code. - - name: Push changes - uses: ad-m/github-push-action@master + DJ_VERSION=$(grep -oP '\d+\.\d+\.\d+' datajoint/version.py) + RELEASE_BODY=$(python -c \ + 'print(open("./CHANGELOG.md").read().split("\n\n")[1].split("\n", 1)[1])' \ + ) + echo "DJ_VERSION=${DJ_VERSION}" >> $GITHUB_ENV + echo "RELEASE_BODY<> $GITHUB_ENV + echo "$RELEASE_BODY" >> $GITHUB_ENV + echo "EOF" >> $GITHUB_ENV + - name: Create GH release + id: create_gh_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} with: - branch: gh-pages - directory: gh-pages - github_token: ${{secrets.GITHUB_TOKEN}} \ No newline at end of file + tag_name: ${{env.DJ_VERSION}} + release_name: Release ${{env.DJ_VERSION}} + body: ${{env.RELEASE_BODY}} + prerelease: false + draft: false + - name: Fetch pip artifacts + uses: actions/download-artifact@v3 + with: + name: pip-datajoint-${{env.DJ_VERSION}} + path: dist + - name: Determine pip artifact paths + run: | + echo "DJ_WHEEL_PATH=$(ls dist/datajoint-*.whl)" >> $GITHUB_ENV + echo "DJ_SDIST_PATH=$(ls dist/datajoint-*.tar.gz)" >> $GITHUB_ENV + - name: Upload pip wheel asset to release + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + with: + upload_url: ${{steps.create_gh_release.outputs.upload_url}} + asset_path: ${{env.DJ_WHEEL_PATH}} + asset_name: pip-datajoint-${{env.DJ_VERSION}}.whl + asset_content_type: application/zip + - name: Upload pip sdist asset to release + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + with: + upload_url: ${{steps.create_gh_release.outputs.upload_url}} + asset_path: ${{env.DJ_SDIST_PATH}} + asset_name: pip-datajoint-${{env.DJ_VERSION}}.tar.gz + asset_content_type: application/gzip + - name: Publish pip release + run: | + export HOST_UID=$(id -u) + docker-compose -f docker-compose-build.yaml run \ + -e TWINE_USERNAME=${TWINE_USERNAME} -e TWINE_PASSWORD=${TWINE_PASSWORD} app \ + sh -lc "pip install twine && python -m twine upload dist/*" + - name: Login to DockerHub + uses: docker/login-action@v1 + with: + username: ${{secrets.docker_username}} + password: ${{secrets.docker_password}} + - name: Publish image + run: | + IMAGE=$(docker images --filter "reference=datajoint/datajoint*" --format "{{.Repository}}") + TAG=$(docker images --filter "reference=datajoint/datajoint*" --format "{{.Tag}}") + docker push "${IMAGE}:${TAG}" + docker tag "${IMAGE}:${TAG}" "${IMAGE}:${TAG}-${GITHUB_SHA:0:7}" + docker push "${IMAGE}:${TAG}-${GITHUB_SHA:0:7}" + [ "$PY_VER" == "3.9" ] && [ "$DISTRO" == "debian" ] \ + && docker tag "${IMAGE}:${TAG}" "${IMAGE}:latest" \ + && docker push "${IMAGE}:latest" \ + || echo "skipping 'latest' tag..." diff --git a/.gitignore b/.gitignore index 877c5f614..1c60cd8f7 100644 --- a/.gitignore +++ b/.gitignore @@ -24,5 +24,6 @@ notebook .vscode __main__.py jupyter_custom.js -apk_requirements.txt .eggs +*.code-workspace +docs/site diff --git a/CHANGELOG.md b/CHANGELOG.md index af2e47a3d..77a84589c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,32 @@ ## Release notes -### 0.13.5 -- TBA -* Update - Switch testing image from `pydev` to `djtest` PR #1012 - -### 0.13.4 -- March, 28 2022 +### 0.14.0 -- TBA +* Bugfix - Activating a schema requires all tables to exist even if `create_tables=False` PR [#1058](https://github.com/datajoint/datajoint-python/pull/1058) +* Update - Populate call with `reserve_jobs=True` to exclude `error` and `ignore` keys - PR [#1062](https://github.com/datajoint/datajoint-python/pull/1062) +* Add - Support for inserting data with CSV files - PR [#1067](https://github.com/datajoint/datajoint-python/pull/1067) +* Update - Switch testing image from `pydev` to `djtest` PR [#1012](https://github.com/datajoint/datajoint-python/pull/1012) + +### 0.13.8 -- Sep 21, 2022 +* Add - New documentation structure based on markdown PR [#1052](https://github.com/datajoint/datajoint-python/pull/1052) +* Bugfix - Fix queries with backslashes ([#999](https://github.com/datajoint/datajoint-python/issues/999)) PR [#1052](https://github.com/datajoint/datajoint-python/pull/1052) + +### 0.13.7 -- Jul 13, 2022 +* Bugfix - Fix networkx incompatable change by version pinning to 2.6.3 (#1035) PR #1036 +* Add - Support for serializing numpy datetime64 types (#1022) PR #1036 +* Update - Add traceback to default logging PR #1036 + +### 0.13.6 -- Jun 13, 2022 +* Add - Config option to set threshold for when to stop using checksums for filepath stores. PR #1025 +* Add - Unified package level logger for package (#667) PR #1031 +* Update - Swap various datajoint messages, warnings, etc. to use the new logger. (#667) PR #1031 +* Bugfix - Fix query caching deleting non-datajoint files PR #1027 +* Update - Minimum Python version for Datajoint-Python is now 3.7 PR #1027 + +### 0.13.5 -- May 19, 2022 +* Update - Import ABC from collections.abc for Python 3.10 compatibility +* Bugfix - Fix multiprocessing value error (#1013) PR #1026 + +### 0.13.4 -- Mar, 28 2022 * Add - Allow reading blobs produced by legacy 32-bit compiled mYm library for matlab. PR #995 * Bugfix - Add missing `jobs` argument for multiprocessing PR #997 * Add - Test for multiprocessing PR #1008 @@ -253,7 +276,7 @@ Documentation and tutorials available at https://docs.datajoint.io and https://t ### 0.3.4 * Added method the `ERD.add_parts` method, which adds the part tables of all tables currently in the ERD. -* `ERD() + arg` and `ERD() - arg` can now accept relation classes as arg. +* `ERD() + arg` and `ERD() - arg` can now accept table classes as arg. ### 0.3.3 * Suppressed warnings (redirected them to logging). Previoiusly, scipy would throw warnings in ERD, for example. @@ -263,5 +286,5 @@ Documentation and tutorials available at https://docs.datajoint.io and https://t ### 0.3.2. * Fixed issue #223: `insert` can insert relations without fetching. -* ERD() now takes the `context` argument, which specifies in which context to look for classes. The default is taken from the argument (schema or relation). +* ERD() now takes the `context` argument, which specifies in which context to look for classes. The default is taken from the argument (schema or table). * ERD.draw() no longer has the `prefix` argument: class names are shown as found in the context. diff --git a/Dockerfile b/Dockerfile index f23896e46..59da930a5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,9 @@ -FROM datajoint/djbase - -COPY --chown=anaconda:anaconda . /tmp/src -RUN pip install --no-cache-dir /tmp/src && \ - rm -rf /tmp/src +ARG IMAGE=djbase +ARG PY_VER=3.9 +ARG DISTRO=debian +FROM datajoint/${IMAGE}:py${PY_VER}-${DISTRO} +COPY --chown=anaconda:anaconda ./setup.py ./datajoint.pub ./requirements.txt /main/ +COPY --chown=anaconda:anaconda ./datajoint /main/datajoint +RUN \ + pip install --no-cache-dir /main && \ + rm -r /main/* diff --git a/LNX-docker-compose.yml b/LNX-docker-compose.yml index a8ee96c1b..bb8736f11 100644 --- a/LNX-docker-compose.yml +++ b/LNX-docker-compose.yml @@ -1,12 +1,12 @@ -# docker-compose -f LNX-docker-compose.yml --env-file LNX.env up --exit-code-from app --build -version: '2.2' +# docker compose -f LNX-docker-compose.yml --env-file LNX.env up --exit-code-from app --build +version: '2.4' x-net: &net networks: - main services: db: <<: *net - image: datajoint/mysql:$MYSQL_VER + image: datajoint/mysql:${MYSQL_VER} environment: - MYSQL_ROOT_PASSWORD=simple # ports: @@ -15,7 +15,7 @@ services: # - ./mysql/data:/var/lib/mysql minio: <<: *net - image: minio/minio:$MINIO_VER + image: minio/minio:${MINIO_VER} environment: - MINIO_ACCESS_KEY=datajoint - MINIO_SECRET_KEY=datajoint @@ -32,7 +32,7 @@ services: interval: 1s fakeservices.datajoint.io: <<: *net - image: datajoint/nginx:v0.2.0 + image: datajoint/nginx:v0.2.4 environment: - ADD_db_TYPE=DATABASE - ADD_db_ENDPOINT=db:3306 @@ -77,16 +77,14 @@ services: set -e pip install --user nose nose-cov pip install -e . - pip freeze | grep datajoint + pip list --format=freeze | grep datajoint nosetests -vsw tests --with-coverage --cover-package=datajoint - # jupyter notebook # ports: # - "8888:8888" user: ${HOST_UID}:anaconda volumes: - .:/src - /tmp/.X11-unix:/tmp/.X11-unix:rw - # - ./apk_requirements.txt:/tmp/apk_requirements.txt # - ./notebooks:/home/dja/notebooks networks: main: diff --git a/Makefile b/Makefile deleted file mode 100644 index 85e421b32..000000000 --- a/Makefile +++ /dev/null @@ -1,28 +0,0 @@ -all: - @echo 'MakeFile for DataJoint packaging ' - @echo ' ' - @echo 'make sdist Creates source distribution ' - @echo 'make wheel Creates Wheel distribution ' - @echo 'make pypi Package and upload to PyPI ' - @echo 'make pypitest Package and upload to PyPI test server' - @echo 'make clean Remove all build related directories ' - - -sdist: - python3 setup.py sdist >/dev/null 2>&1 - -wheel: - python3 setup.py bdist_wheel >/dev/null 2>&1 - -pypi:clean sdist wheel - twine upload dist/* - -pypitest: clean sdist wheel - twine upload -r pypitest dist/* - -clean: - rm -rf dist && rm -rf build && rm -rf *.egg-info - - - - diff --git a/README.md b/README.md index d752766dd..8934d5b53 100644 --- a/README.md +++ b/README.md @@ -112,15 +112,15 @@ important DataJoint schema or records. ### API docs -The API documentation can be built using sphinx by running +The API documentation can be built with mkdocs using the docker compose file in +`docs/` with the following command: ``` bash -pip install sphinx sphinx_rtd_theme -(cd docs-api/sphinx && make html) +MODE="LIVE" PACKAGE=datajoint UPSTREAM_REPO=https://github.com/datajoint/datajoint-python.git HOST_UID=$(id -u) docker compose -f docs/docker-compose.yaml up --build ``` -Generated docs are written to `docs-api/docs/html/index.html`. -More details in [docs-api/README.md](docs-api/README.md). +The site will then be available at `http://localhost/`. When finished, be sure to run +the same command as above, but replace `up --build` with `down`. ## Running Tests Locally
@@ -133,7 +133,6 @@ MYSQL_VER=5.7 DISTRO=alpine MINIO_VER=RELEASE.2022-01-03T18-22-58Z HOST_UID=1000 -GID=1000 ``` * `cp local-docker-compose.yml docker-compose.yml` * `docker-compose up -d` (Note configured `JUPYTER_PASSWORD`) @@ -141,11 +140,11 @@ GID=1000 * Add entry in `/etc/hosts` for `127.0.0.1 fakeservices.datajoint.io` * Run desired tests. Some examples are as follows: -| Use Case | Shell Code | -| ---------------------------- | ------------------------------------------------------------------------------ | -| Run all tests | `nosetests -vsw tests --with-coverage --cover-package=datajoint` | -| Run one specific class test | `nosetests -vs --tests=tests.test_fetch:TestFetch.test_getattribute_for_fetch1` | -| Run one specific basic test | `nosetests -vs --tests=tests.test_external_class:test_insert_and_fetch` | +| Use Case | Shell Code | +| ---------------------------- | ------------------------------------------------------------------------------ | +| Run all tests | `nosetests -vsw tests --with-coverage --cover-package=datajoint` | +| Run one specific class test | `nosetests -vs --tests=tests.test_fetch:TestFetch.test_getattribute_for_fetch1` | +| Run one specific basic test | `nosetests -vs --tests=tests.test_external_class:test_insert_and_fetch` | ### Launch Docker Terminal diff --git a/apk_requirements.txt b/apk_requirements.txt deleted file mode 100644 index ac08cdcf9..000000000 --- a/apk_requirements.txt +++ /dev/null @@ -1 +0,0 @@ -make \ No newline at end of file diff --git a/datajoint/__init__.py b/datajoint/__init__.py index 8cbe5be8a..9817d5c30 100644 --- a/datajoint/__init__.py +++ b/datajoint/__init__.py @@ -50,8 +50,11 @@ "DataJointError", "key", "key_hash", + "logger", + "migrate_dj011_external_blob_storage_to_dj012", ] +from .logging import logger from .version import __version__ from .settings import config from .connection import conn, Connection diff --git a/datajoint/autopopulate.py b/datajoint/autopopulate.py index 4a23b9a52..2b40b1e61 100644 --- a/datajoint/autopopulate.py +++ b/datajoint/autopopulate.py @@ -5,14 +5,16 @@ import random import inspect from tqdm import tqdm +from .hash import key_hash from .expression import QueryExpression, AndList from .errors import DataJointError, LostConnectionError import signal import multiprocessing as mp +import contextlib # noinspection PyExceptionInherit,PyCallingNonCallable -logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__.split(".")[0]) # --- helper functions for multiprocessing -- @@ -42,8 +44,8 @@ def _call_populate1(key): class AutoPopulate: """ - AutoPopulate is a mixin class that adds the method populate() to a Relation class. - Auto-populated relations must inherit from both Relation and AutoPopulate, + AutoPopulate is a mixin class that adds the method populate() to a Table class. + Auto-populated tables must inherit from both Table and AutoPopulate, must define the property `key_source`, and must define the callback method `make`. """ @@ -116,7 +118,7 @@ def _job_key(self, key): def _jobs_to_do(self, restrictions): """ - :return: the relation containing the keys to be computed (derived from self.key_source) + :return: the query yeilding the keys to be computed (derived from self.key_source) """ if self.restriction: raise DataJointError( @@ -158,7 +160,7 @@ def populate( max_calls=None, display_progress=False, processes=1, - make_kwargs=None + make_kwargs=None, ): """ ``table.populate()`` calls ``table.make(key)`` for every primary key in @@ -173,8 +175,7 @@ def populate( :param limit: if not None, check at most this many keys :param max_calls: if not None, populate at most this many keys :param display_progress: if True, report progress_bar - :param processes: number of processes to use. When set to a large number, then - uses as many as CPU cores + :param processes: number of processes to use. Set to None to use all cores :param make_kwargs: Keyword arguments which do not affect the result of computation to be passed down to each ``make()`` call. Computation arguments should be specified within the pipeline e.g. using a `dj.Lookup` table. @@ -202,18 +203,29 @@ def handler(signum, frame): old_handler = signal.signal(signal.SIGTERM, handler) keys = (self._jobs_to_do(restrictions) - self.target).fetch("KEY", limit=limit) + + # exclude "error" or "ignore" jobs + if reserve_jobs: + exclude_key_hashes = ( + jobs + & {"table_name": self.target.table_name} + & 'status in ("error", "ignore")' + ).fetch("key_hash") + keys = [key for key in keys if key_hash(key) not in exclude_key_hashes] + if order == "reverse": keys.reverse() elif order == "random": random.shuffle(keys) - logger.info("Found %d keys to populate" % len(keys)) + logger.debug("Found %d keys to populate" % len(keys)) keys = keys[:max_calls] nkeys = len(keys) + if not nkeys: + return - if processes > 1: - processes = min(processes, nkeys, mp.cpu_count()) + processes = min(_ for _ in (processes, nkeys, mp.cpu_count()) if _) error_list = [] populate_kwargs = dict( @@ -235,17 +247,16 @@ def handler(signum, frame): del self.connection._conn.ctx # SSLContext is not pickleable with mp.Pool( processes, _initialize_populate, (self, jobs, populate_kwargs) - ) as pool: - if display_progress: - with tqdm(desc="Processes: ", total=nkeys) as pbar: - for error in pool.imap(_call_populate1, keys, chunksize=1): - if error is not None: - error_list.append(error) - pbar.update() - else: - for error in pool.imap(_call_populate1, keys): - if error is not None: - error_list.append(error) + ) as pool, ( + tqdm(desc="Processes: ", total=nkeys) + if display_progress + else contextlib.nullcontext() + ) as progress_bar: + for error in pool.imap(_call_populate1, keys, chunksize=1): + if error is not None: + error_list.append(error) + if display_progress: + progress_bar.update() self.connection.connect() # reconnect parent process to MySQL server # restore original signal handler: @@ -275,7 +286,7 @@ def _populate1( if jobs is not None: jobs.complete(self.target.table_name, self._job_key(key)) else: - logger.info("Populating: " + str(key)) + logger.debug(f"Making {key} -> {self.target.full_table_name}") self.__class__._allow_insert = True try: make(dict(key), **(make_kwargs or {})) @@ -288,6 +299,9 @@ def _populate1( exception=error.__class__.__name__, msg=": " + str(error) if str(error) else "", ) + logger.debug( + f"Error making {key} -> {self.target.full_table_name} - {error_message}" + ) if jobs is not None: # show error name and error message (if any) jobs.error( @@ -303,6 +317,9 @@ def _populate1( return key, error if return_exception_objects else error_message else: self.connection.commit_transaction() + logger.debug( + f"Success making {key} -> {self.target.full_table_name}" + ) if jobs is not None: jobs.complete(self.target.table_name, self._job_key(key)) finally: diff --git a/datajoint/blob.py b/datajoint/blob.py index df51e4136..9f4a148ca 100644 --- a/datajoint/blob.py +++ b/datajoint/blob.py @@ -14,32 +14,44 @@ from .settings import config -mxClassID = dict( - ( - # see http://www.mathworks.com/help/techdoc/apiref/mxclassid.html - ("mxUNKNOWN_CLASS", None), - ("mxCELL_CLASS", None), - ("mxSTRUCT_CLASS", None), - ("mxLOGICAL_CLASS", np.dtype("bool")), - ("mxCHAR_CLASS", np.dtype("c")), - ("mxVOID_CLASS", np.dtype("O")), - ("mxDOUBLE_CLASS", np.dtype("float64")), - ("mxSINGLE_CLASS", np.dtype("float32")), - ("mxINT8_CLASS", np.dtype("int8")), - ("mxUINT8_CLASS", np.dtype("uint8")), - ("mxINT16_CLASS", np.dtype("int16")), - ("mxUINT16_CLASS", np.dtype("uint16")), - ("mxINT32_CLASS", np.dtype("int32")), - ("mxUINT32_CLASS", np.dtype("uint32")), - ("mxINT64_CLASS", np.dtype("int64")), - ("mxUINT64_CLASS", np.dtype("uint64")), - ("mxFUNCTION_CLASS", None), - ) -) - -rev_class_id = {dtype: i for i, dtype in enumerate(mxClassID.values())} -dtype_list = list(mxClassID.values()) -type_names = list(mxClassID) +deserialize_lookup = { + 0: {"dtype": None, "scalar_type": "UNKNOWN"}, + 1: {"dtype": None, "scalar_type": "CELL"}, + 2: {"dtype": None, "scalar_type": "STRUCT"}, + 3: {"dtype": np.dtype("bool"), "scalar_type": "LOGICAL"}, + 4: {"dtype": np.dtype("c"), "scalar_type": "CHAR"}, + 5: {"dtype": np.dtype("O"), "scalar_type": "VOID"}, + 6: {"dtype": np.dtype("float64"), "scalar_type": "DOUBLE"}, + 7: {"dtype": np.dtype("float32"), "scalar_type": "SINGLE"}, + 8: {"dtype": np.dtype("int8"), "scalar_type": "INT8"}, + 9: {"dtype": np.dtype("uint8"), "scalar_type": "UINT8"}, + 10: {"dtype": np.dtype("int16"), "scalar_type": "INT16"}, + 11: {"dtype": np.dtype("uint16"), "scalar_type": "UINT16"}, + 12: {"dtype": np.dtype("int32"), "scalar_type": "INT32"}, + 13: {"dtype": np.dtype("uint32"), "scalar_type": "UINT32"}, + 14: {"dtype": np.dtype("int64"), "scalar_type": "INT64"}, + 15: {"dtype": np.dtype("uint64"), "scalar_type": "UINT64"}, + 16: {"dtype": None, "scalar_type": "FUNCTION"}, + 65_536: {"dtype": np.dtype("datetime64[Y]"), "scalar_type": "DATETIME64[Y]"}, + 65_537: {"dtype": np.dtype("datetime64[M]"), "scalar_type": "DATETIME64[M]"}, + 65_538: {"dtype": np.dtype("datetime64[W]"), "scalar_type": "DATETIME64[W]"}, + 65_539: {"dtype": np.dtype("datetime64[D]"), "scalar_type": "DATETIME64[D]"}, + 65_540: {"dtype": np.dtype("datetime64[h]"), "scalar_type": "DATETIME64[h]"}, + 65_541: {"dtype": np.dtype("datetime64[m]"), "scalar_type": "DATETIME64[m]"}, + 65_542: {"dtype": np.dtype("datetime64[s]"), "scalar_type": "DATETIME64[s]"}, + 65_543: {"dtype": np.dtype("datetime64[ms]"), "scalar_type": "DATETIME64[ms]"}, + 65_544: {"dtype": np.dtype("datetime64[us]"), "scalar_type": "DATETIME64[us]"}, + 65_545: {"dtype": np.dtype("datetime64[ns]"), "scalar_type": "DATETIME64[ns]"}, + 65_546: {"dtype": np.dtype("datetime64[ps]"), "scalar_type": "DATETIME64[ps]"}, + 65_547: {"dtype": np.dtype("datetime64[fs]"), "scalar_type": "DATETIME64[fs]"}, + 65_548: {"dtype": np.dtype("datetime64[as]"), "scalar_type": "DATETIME64[as]"}, +} +serialize_lookup = { + v["dtype"]: {"type_id": k, "scalar_type": v["scalar_type"]} + for k, v in deserialize_lookup.items() + if v["dtype"] is not None +} + compression = {b"ZL123\0": zlib.decompress} @@ -176,7 +188,7 @@ def pack_blob(self, obj): return self.pack_float(obj) if isinstance(obj, np.ndarray) and obj.dtype.fields: return self.pack_recarray(np.array(obj)) - if isinstance(obj, np.number): + if isinstance(obj, (np.number, np.datetime64)): return self.pack_array(np.array(obj)) if isinstance(obj, (bool, np.bool_)): return self.pack_array(np.array(obj)) @@ -188,17 +200,17 @@ def pack_blob(self, obj): return self.pack_decimal(obj) if isinstance(obj, uuid.UUID): return self.pack_uuid(obj) - if isinstance(obj, collections.Mapping): + if isinstance(obj, collections.abc.Mapping): return self.pack_dict(obj) if isinstance(obj, str): return self.pack_string(obj) - if isinstance(obj, collections.ByteString): + if isinstance(obj, collections.abc.ByteString): return self.pack_bytes(obj) - if isinstance(obj, collections.MutableSequence): + if isinstance(obj, collections.abc.MutableSequence): return self.pack_list(obj) - if isinstance(obj, collections.Sequence): + if isinstance(obj, collections.abc.Sequence): return self.pack_tuple(obj) - if isinstance(obj, collections.Set): + if isinstance(obj, collections.abc.Set): return self.pack_set(obj) if obj is None: return self.pack_none() @@ -211,14 +223,18 @@ def read_array(self): shape = self.read_value(count=n_dims) n_elem = np.prod(shape, dtype=int) dtype_id, is_complex = self.read_value("uint32", 2) - dtype = dtype_list[dtype_id] - if type_names[dtype_id] == "mxVOID_CLASS": + # Get dtype from type id + dtype = deserialize_lookup[dtype_id]["dtype"] + + # Check if name is void + if deserialize_lookup[dtype_id]["scalar_type"] == "VOID": data = np.array( list(self.read_blob(self.read_value()) for _ in range(n_elem)), dtype=np.dtype("O"), ) - elif type_names[dtype_id] == "mxCHAR_CLASS": + # Check if name is char + elif deserialize_lookup[dtype_id]["scalar_type"] == "CHAR": # compensate for MATLAB packing of char arrays data = self.read_value(dtype, count=2 * n_elem) data = data[::2].astype("U1") @@ -240,6 +256,8 @@ def pack_array(self, array): """ Serialize an np.ndarray into bytes. Scalars are encoded with ndim=0. """ + if "datetime64" in array.dtype.name: + self.set_dj0() blob = ( b"A" + np.uint64(array.ndim).tobytes() @@ -248,22 +266,26 @@ def pack_array(self, array): is_complex = np.iscomplexobj(array) if is_complex: array, imaginary = np.real(array), np.imag(array) - type_id = ( - rev_class_id[array.dtype] - if array.dtype.char != "U" - else rev_class_id[np.dtype("O")] - ) - if dtype_list[type_id] is None: - raise DataJointError("Type %s is ambiguous or unknown" % array.dtype) + try: + type_id = serialize_lookup[array.dtype]["type_id"] + except KeyError: + # U is for unicode string + if array.dtype.char == "U": + type_id = serialize_lookup[np.dtype("O")]["type_id"] + else: + raise DataJointError(f"Type {array.dtype} is ambiguous or unknown") blob += np.array([type_id, is_complex], dtype=np.uint32).tobytes() - if type_names[type_id] == "mxVOID_CLASS": # array of dtype('O') + if ( + array.dtype.char == "U" + or serialize_lookup[array.dtype]["scalar_type"] == "VOID" + ): blob += b"".join( len_u64(it) + it for it in (self.pack_blob(e) for e in array.flatten(order="F")) ) self.set_dj0() # not supported by original mym - elif type_names[type_id] == "mxCHAR_CLASS": # array of dtype('c') + elif serialize_lookup[array.dtype]["scalar_type"] == "CHAR": blob += ( array.view(np.uint8).astype(np.uint16).tobytes() ) # convert to 16-bit chars for MATLAB diff --git a/datajoint/condition.py b/datajoint/condition.py index 397f68b53..034698925 100644 --- a/datajoint/condition.py +++ b/datajoint/condition.py @@ -109,7 +109,7 @@ def prep_value(k, v): ): return '"%s"' % v if isinstance(v, str): - return '"%s"' % v.replace("%", "%%") + return '"%s"' % v.replace("%", "%%").replace("\\", "\\\\") return "%r" % v negate = False diff --git a/datajoint/connection.py b/datajoint/connection.py index 2a93cbb17..565015bfd 100644 --- a/datajoint/connection.py +++ b/datajoint/connection.py @@ -17,10 +17,13 @@ from .hash import uuid_from_buffer from .plugin import connection_plugins -logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__.split(".")[0]) query_log_max_length = 300 +cache_key = "query_cache" # the key to lookup the query_cache folder in dj.config + + def get_host_hook(host_input): if "://" in host_input: plugin_name = host_input.split("://")[0] @@ -184,7 +187,7 @@ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None) self.conn_info["ssl_input"] = use_tls self.conn_info["host_input"] = host_input self.init_fun = init_fun - print("Connecting {user}@{host}:{port}".format(**self.conn_info)) + logger.info("Connecting {user}@{host}:{port}".format(**self.conn_info)) self._conn = None self._query_cache = None connect_host_hook(self) @@ -220,7 +223,7 @@ def connect(self): k: v for k, v in self.conn_info.items() if k not in ["ssl_input", "host_input"] - } + }, ) except client.err.InternalError: self._conn = client.connect( @@ -236,7 +239,7 @@ def connect(self): or k == "ssl" and self.conn_info["ssl_input"] is None ) - } + }, ) self._conn.autocommit(True) @@ -254,13 +257,12 @@ def set_query_cache(self, query_cache=None): def purge_query_cache(self): """Purges all query cache.""" if ( - "query_cache" in config - and isinstance(config["query_cache"], str) - and pathlib.Path(config["query_cache"]).is_dir() + isinstance(config.get(cache_key), str) + and pathlib.Path(config[cache_key]).is_dir() ): - path_iter = pathlib.Path(config["query_cache"]).glob("**/*") - for path in path_iter: - path.unlink() + for path in pathlib.Path(config[cache_key]).iterdir(): + if not path.is_dir(): + path.unlink() def close(self): self._conn.close() @@ -313,15 +315,15 @@ def query( "Only SELECT queries are allowed when query caching is on." ) if use_query_cache: - if not config["query_cache"]: + if not config[cache_key]: raise errors.DataJointError( - "Provide filepath dj.config['query_cache'] when using query caching." + f"Provide filepath dj.config['{cache_key}'] when using query caching." ) hash_ = uuid_from_buffer( (str(self._query_cache) + re.sub(r"`\$\w+`", "", query)).encode() + pack(args) ) - cache_path = pathlib.Path(config["query_cache"]) / str(hash_) + cache_path = pathlib.Path(config[cache_key]) / str(hash_) try: buffer = cache_path.read_bytes() except FileNotFoundError: @@ -339,7 +341,7 @@ def query( except errors.LostConnectionError: if not reconnect: raise - warnings.warn("MySQL server has gone away. Reconnecting to the server.") + logger.warning("MySQL server has gone away. Reconnecting to the server.") connect_host_hook(self) if self._in_transaction: self.cancel_transaction() @@ -380,7 +382,7 @@ def start_transaction(self): raise errors.DataJointError("Nested connections are not supported.") self.query("START TRANSACTION WITH CONSISTENT SNAPSHOT") self._in_transaction = True - logger.info("Transaction started") + logger.debug("Transaction started") def cancel_transaction(self): """ @@ -388,7 +390,7 @@ def cancel_transaction(self): """ self.query("ROLLBACK") self._in_transaction = False - logger.info("Transaction cancelled. Rolling back ...") + logger.debug("Transaction cancelled. Rolling back ...") def commit_transaction(self): """ @@ -397,7 +399,7 @@ def commit_transaction(self): """ self.query("COMMIT") self._in_transaction = False - logger.info("Transaction committed and closed.") + logger.debug("Transaction committed and closed.") # -------- context manager for transactions @property diff --git a/datajoint/declare.py b/datajoint/declare.py index 0916f7bf3..74673a928 100644 --- a/datajoint/declare.py +++ b/datajoint/declare.py @@ -5,7 +5,6 @@ import re import pyparsing as pp import logging -import warnings from .errors import DataJointError, _support_filepath_types, FILEPATH_FEATURE_SWITCH from .attribute_adapter import get_adapter @@ -75,7 +74,7 @@ def match_type(attribute_type): ) -logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__.split(".")[0]) def build_foreign_key_parser_old(): @@ -207,7 +206,7 @@ def compile_foreign_key( ) if obsolete: - warnings.warn( + logger.warning( 'Line "{line}" uses obsolete syntax that will no longer be supported in datajoint 0.14. ' "For details, see issue #780 https://github.com/datajoint/datajoint-python/issues/780".format( line=line diff --git a/datajoint/diagram.py b/datajoint/diagram.py index 9fe9c95f9..9ae9b9e90 100644 --- a/datajoint/diagram.py +++ b/datajoint/diagram.py @@ -2,10 +2,14 @@ import re import functools import io -import warnings +import logging import inspect from .table import Table from .dependencies import unite_master_parts +from .user_tables import Manual, Imported, Computed, Lookup, Part +from .errors import DataJointError +from .table import lookup_class_name + try: from matplotlib import pyplot as plt @@ -21,11 +25,8 @@ except: diagram_active = False -from .user_tables import Manual, Imported, Computed, Lookup, Part -from .errors import DataJointError -from .table import lookup_class_name - +logger = logging.getLogger(__name__.split(".")[0]) user_table_classes = (Manual, Lookup, Computed, Imported, Part) @@ -63,7 +64,7 @@ class Diagram: """ def __init__(self, *args, **kwargs): - warnings.warn( + logger.warning( "Please install matplotlib and pygraphviz libraries to enable the Diagram feature." ) @@ -77,7 +78,7 @@ class Diagram(nx.DiGraph): >>> diag = Diagram(source) - source can be a base relation object, a base relation class, a schema, or a module that has a schema. + source can be a base table object, a base table class, a schema, or a module that has a schema. >>> diag.draw() @@ -93,7 +94,6 @@ class Diagram(nx.DiGraph): """ def __init__(self, source, context=None): - if isinstance(source, Diagram): # copy constructor self.nodes_to_show = set(source.nodes_to_show) @@ -301,7 +301,6 @@ def _make_graph(self): return graph def make_dot(self): - graph = self._make_graph() graph.nodes() diff --git a/datajoint/expression.py b/datajoint/expression.py index c3c385b22..ab2d27eec 100644 --- a/datajoint/expression.py +++ b/datajoint/expression.py @@ -17,7 +17,7 @@ ) from .declare import CONSTANT_LITERALS -logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__.split(".")[0]) class QueryExpression: diff --git a/datajoint/external.py b/datajoint/external.py index c04cc40c4..265152cd4 100644 --- a/datajoint/external.py +++ b/datajoint/external.py @@ -1,6 +1,7 @@ from pathlib import Path, PurePosixPath, PureWindowsPath from collections.abc import Mapping from tqdm import tqdm +import logging from .settings import config from .errors import DataJointError, MissingExternalFile from .hash import uuid_from_buffer, uuid_from_file @@ -10,6 +11,8 @@ from . import s3 from .utils import safe_write, safe_copy +logger = logging.getLogger(__name__.split(".")[0]) + CACHE_SUBFOLDING = ( 2, 2, @@ -72,9 +75,7 @@ def definition(self): @property def table_name(self): - return "{external_table_root}_{store}".format( - external_table_root=EXTERNAL_TABLE_ROOT, store=self.store - ) + return f"{EXTERNAL_TABLE_ROOT}_{self.store}" @property def s3(self): @@ -276,9 +277,7 @@ def upload_filepath(self, local_filepath): # the tracking entry exists, check that it's the same file as before if contents_hash != check_hash[0]: raise DataJointError( - "A different version of '{file}' has already been placed.".format( - file=relative_filepath - ) + f"A different version of '{relative_filepath}' has already been placed." ) else: # upload the file and create its tracking entry @@ -304,27 +303,43 @@ def download_filepath(self, filepath_hash): :param filepath_hash: The hash (UUID) of the relative_path :return: hash (UUID) of the contents of the downloaded file or Nones """ + + def _need_checksum(local_filepath, expected_size): + limit = config.get("filepath_checksum_size_limit") + actual_size = Path(local_filepath).stat().st_size + if expected_size != actual_size: + # this should never happen without outside interference + raise DataJointError( + f"'{local_filepath}' downloaded but size did not match." + ) + return limit is None or actual_size < limit + if filepath_hash is not None: - relative_filepath, contents_hash = (self & {"hash": filepath_hash}).fetch1( - "filepath", "contents_hash" - ) + relative_filepath, contents_hash, size = ( + self & {"hash": filepath_hash} + ).fetch1("filepath", "contents_hash", "size") external_path = self._make_external_filepath(relative_filepath) local_filepath = Path(self.spec["stage"]).absolute() / relative_filepath - file_exists = ( - Path(local_filepath).is_file() - and uuid_from_file(local_filepath) == contents_hash + + file_exists = Path(local_filepath).is_file() and ( + not _need_checksum(local_filepath, size) + or uuid_from_file(local_filepath) == contents_hash ) + if not file_exists: self._download_file(external_path, local_filepath) - checksum = uuid_from_file(local_filepath) if ( - checksum != contents_hash - ): # this should never happen without outside interference + _need_checksum(local_filepath, size) + and uuid_from_file(local_filepath) != contents_hash + ): + # this should never happen without outside interference raise DataJointError( - "'{file}' downloaded but did not pass checksum'".format( - file=local_filepath - ) + f"'{local_filepath}' downloaded but did not pass checksum." ) + if not _need_checksum(local_filepath, size): + logger.warning( + f"Skipped checksum for file with hash: {contents_hash}, and path: {local_filepath}" + ) return str(local_filepath), contents_hash # --- UTILITIES --- @@ -402,7 +417,7 @@ def delete( delete_external_files=None, limit=None, display_progress=True, - errors_as_string=True + errors_as_string=True, ): """ diff --git a/datajoint/fetch.py b/datajoint/fetch.py index 936624400..cb5940e06 100644 --- a/datajoint/fetch.py +++ b/datajoint/fetch.py @@ -1,6 +1,6 @@ from functools import partial from pathlib import Path -import warnings +import logging import pandas import itertools import re @@ -12,6 +12,8 @@ from .settings import config from .utils import safe_write +logger = logging.getLogger(__name__.split(".")[0]) + class key: """ @@ -156,7 +158,7 @@ def __call__( unpacks blob attributes. :param attrs: zero or more attributes to fetch. If not provided, the call will return all attributes of this - relation. If provided, returns tuples with an entry for each attribute. + table. If provided, returns tuples with an entry for each attribute. :param offset: the number of tuples to skip in the returned result :param limit: the maximum number of tuples to return :param order_by: a single attribute or the list of attributes to order the results. No ordering should be assumed @@ -168,7 +170,7 @@ def __call__( True for .fetch('KEY') :param squeeze: if True, remove extra dimensions from arrays :param download_path: for fetches that download data, e.g. attachments - :return: the contents of the relation in the form of a structured numpy.array or a dict list + :return: the contents of the table in the form of a structured numpy.array or a dict list """ if order_by is not None: # if 'order_by' passed in a string, make into list @@ -209,7 +211,7 @@ def __call__( ) if limit is None and offset is not None: - warnings.warn( + logger.warning( "Offset set, but no limit. Setting limit to a large number. " "Consider setting a limit explicitly." ) @@ -315,7 +317,7 @@ def __call__(self, *attrs, squeeze=False, download_path="."): If attrs is empty, the return result is a dict :param squeeze: When true, remove extra dimensions from arrays in attributes :param download_path: for fetches that download data, e.g. attachments - :return: the one tuple in the relation in the form of a dict + :return: the one tuple in the table in the form of a dict """ heading = self._expression.heading diff --git a/datajoint/heading.py b/datajoint/heading.py index 027ad77f7..db3b9a3cb 100644 --- a/datajoint/heading.py +++ b/datajoint/heading.py @@ -14,7 +14,7 @@ from .attribute_adapter import get_adapter, AttributeAdapter -logger = logging.getLogger(__name__) +logger = logging.getLogger(__name__.split(".")[0]) default_attribute_properties = ( dict( # these default values are set in computed attributes @@ -84,7 +84,7 @@ def original_name(self): class Heading: """ - Local class for relations' headings. + Local class for table headings. Heading contains the property attributes, which is an dict in which the keys are the attribute names and the values are Attributes. """ @@ -273,7 +273,6 @@ def _init_from_database(self): # additional attribute properties for attr in attributes: - attr.update( in_key=(attr["in_key"] == "PRI"), database=database, diff --git a/datajoint/jobs.py b/datajoint/jobs.py index 8aedded42..cd7f50224 100644 --- a/datajoint/jobs.py +++ b/datajoint/jobs.py @@ -12,7 +12,7 @@ class JobTable(Table): """ - A base relation with no definition. Allows reserving jobs + A base table with no definition. Allows reserving jobs """ def __init__(self, conn, database): diff --git a/datajoint/logging.py b/datajoint/logging.py new file mode 100644 index 000000000..b432e1a4b --- /dev/null +++ b/datajoint/logging.py @@ -0,0 +1,26 @@ +import logging +import os +import sys + +logger = logging.getLogger(__name__.split(".")[0]) + +log_level = os.getenv("DJ_LOG_LEVEL", "info").upper() + +log_format = logging.Formatter("[%(asctime)s][%(levelname)s]: %(message)s") + +stream_handler = logging.StreamHandler() # default handler +stream_handler.setFormatter(log_format) + +logger.setLevel(level=log_level) +logger.handlers = [stream_handler] + + +def excepthook(exc_type, exc_value, exc_traceback): + if issubclass(exc_type, KeyboardInterrupt): + sys.__excepthook__(exc_type, exc_value, exc_traceback) + return + + logger.error("Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)) + + +sys.excepthook = excepthook diff --git a/datajoint/plugin.py b/datajoint/plugin.py index 96f388089..48dce6561 100644 --- a/datajoint/plugin.py +++ b/datajoint/plugin.py @@ -3,6 +3,9 @@ from pathlib import Path from cryptography.exceptions import InvalidSignature from otumat import hash_pkg, verify +import logging + +logger = logging.getLogger(__name__.split(".")[0]) def _update_error_stack(plugin_name): @@ -12,13 +15,13 @@ def _update_error_stack(plugin_name): plugin_meta = pkg_resources.get_distribution(plugin_name) data = hash_pkg(pkgpath=str(Path(plugin_meta.module_path, plugin_name))) - signature = plugin_meta.get_metadata("{}.sig".format(plugin_name)) - pubkey_path = str(Path(base_meta.egg_info, "{}.pub".format(base_name))) + signature = plugin_meta.get_metadata(f"{plugin_name}.sig") + pubkey_path = str(Path(base_meta.egg_info, f"{base_name}.pub")) verify(pubkey_path=pubkey_path, data=data, signature=signature) - print("DataJoint verified plugin `{}` detected.".format(plugin_name)) + logger.info(f"DataJoint verified plugin `{plugin_name}` detected.") return True except (FileNotFoundError, InvalidSignature): - print("Unverified plugin `{}` detected.".format(plugin_name)) + logger.warning(f"Unverified plugin `{plugin_name}` detected.") return False diff --git a/datajoint/preview.py b/datajoint/preview.py index f761cf533..5188cc81f 100644 --- a/datajoint/preview.py +++ b/datajoint/preview.py @@ -56,20 +56,20 @@ def repr_html(query_expression): css = """