diff --git a/.bumpversion.cfg b/.bumpversion.cfg new file mode 100644 index 00000000..6495ee22 --- /dev/null +++ b/.bumpversion.cfg @@ -0,0 +1,9 @@ +[bumpversion] +current_version = 0.0.1 +commit = True +tag = True + +[bumpversion:file:./pyproject.toml] +[bumpversion:file:./README.md] +[bumpversion:file:gplugins/__init__.py] + diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..0aada4dc --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" # Location of package manifests + schedule: + interval: "daily" + + - package-ecosystem: github-actions + directory: / + schedule: + interval: monthly diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml new file mode 100644 index 00000000..791f15ab --- /dev/null +++ b/.github/workflows/pages.yml @@ -0,0 +1,46 @@ +name: Sphinx docs to gh-pages + +on: + push: + branches: + - main + workflow_dispatch: + +jobs: + build-docs: + runs-on: ubuntu-latest + name: Sphinx docs to gh-pages + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Installing the library + shell: bash -l {0} + run: | + make dev + - name: make docs + run: | + make docs + - name: Upload artifact + uses: actions/upload-pages-artifact@v2 + with: + path: "./docs/_build/html/" + deploy-docs: + needs: build-docs + permissions: + pages: write + id-token: write + + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + + runs-on: ubuntu-latest + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v2 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 00000000..a0ae80fb --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,29 @@ +name: Release PyPI, installers and docker container + +on: + push: + tags: "v*" + +jobs: + release_pypi: + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: 3.x + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine + - name: Build and publish + env: + TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} + TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} + run: | + make build + twine upload dist/* + diff --git a/.github/workflows/test_code.yml b/.github/workflows/test_code.yml new file mode 100644 index 00000000..02ed7c35 --- /dev/null +++ b/.github/workflows/test_code.yml @@ -0,0 +1,86 @@ +name: Test pre-commit, code and docs + +on: + pull_request: + push: + branches: + - main + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Test pre-commit hooks + run: | + python -m pip install --upgrade pip + pip install pre-commit + pre-commit run -a + test_code: + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ["3.10"] + os: [ubuntu-latest, windows-latest, macos-latest] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: pytest + test_code_coverage: + runs-on: ubuntu-latest + needs: [pre-commit] + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: | + pytest --cov + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: false + test_docs: + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ['3.10'] + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + make dev + - name: Test documentation + run: | + make docs diff --git a/.github/workflows/test_code_conda.yml b/.github/workflows/test_code_conda.yml new file mode 100644 index 00000000..cf350426 --- /dev/null +++ b/.github/workflows/test_code_conda.yml @@ -0,0 +1,92 @@ +name: Test pre-commit, code and docs + +on: + pull_request: + push: + branches: + - main + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Test pre-commit hooks + run: | + python -m pip install --upgrade pip + pip install pre-commit + pre-commit run -a + test_code: + needs: [pre-commit] + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ["3.10"] + os: [ubuntu-latest, windows-latest, macos-latest] + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + cache-dependency-path: pyproject.toml + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: pytest + test_code_coverage: + runs-on: ubuntu-latest + needs: [pre-commit] + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + - name: Install dependencies + run: | + make dev + - name: Test with pytest + run: | + pytest --cov={{ cookiecutter.package_name }} {{ cookiecutter.package_name }} + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + fail_ci_if_error: false + test_docs: + needs: [pre-commit] + runs-on: ${{ matrix.os }} + strategy: + max-parallel: 12 + matrix: + python-version: ['3.10'] + os: [ubuntu-latest] + + steps: + - uses: actions/checkout@v3 + - uses: conda-incubator/setup-miniconda@v2 + with: + python-version: '3.10' + mamba-version: "*" + channels: conda-forge,defaults + channel-priority: true + activate-environment: anaconda-client-env + - name: Add conda to system path + run: | + echo $CONDA/bin >> $GITHUB_PATH + - name: Install dependencies + run: | + make dev + - name: Test documentation + run: | + make docs diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..3a314478 --- /dev/null +++ b/.gitignore @@ -0,0 +1,146 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so +*.fsp + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +extra/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +*.ipynb +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints +**/*.ipynb_checkpoints/ + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +*.DS_Store +.DS_Store +*Thumbs.db diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 00000000..e72bd2a5 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,71 @@ +image: python:3.7 + +before_script: + - pip list + - python -V # Print out python version for debugging + - which python # Print out which python for debugging + - pip install -r requirements.txt --upgrade + - python setup.py develop + +stages: + - test + - release + +pre-commit: + stage: test + script: + - pip install pre-commit + - pre-commit install + - pre-commit run -a + +test: + stage: test + script: + - pytest + +python37: + image: python:3.7 + stage: test + script: + - python -m pip install tox + - python -m tox -e py37 + +python38: + image: python:3.8 + stage: test + script: + - python -m pip install tox + - python -m tox -e py38 + +python39: + image: python:3.9 + stage: test + script: + - python -m pip install tox + - python -m tox -e py39 + +doctest: + stage: test + script: + - python -m pip install tox + - python -m tox -e docs + +flake8: + stage: test + script: + - python -m pip install tox + - python -m tox -e flake8 + +mypy: + stage: test + script: + - python -m pip install tox + - python -m tox -e mypy + +docs: + stage: release + script: + - cd docs + - make install clean html upload + only: + - release diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..1b1745dc --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,135 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: "9260cbc9c84c06022993bfbcc42fdbf0305c5b8e" + hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + - id: debug-statements + - id: end-of-file-fixer + - id: mixed-line-ending + - id: name-tests-test + args: ["--pytest-test-first"] + - id: trailing-whitespace + + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "6a0ba1854991b693612486cc84a2254de82d071d" + hooks: + - id: ruff + + - repo: https://github.com/psf/black + rev: "d9b8a6407e2f46304a8d36b18e4a73d8e0613519" + hooks: + - id: black + + - repo: https://github.com/asottile/pyupgrade + rev: ddb39ad37166dbc938d853cc77606526a0b1622a + hooks: + - id: pyupgrade + args: [--py37-plus, --keep-runtime-typing] + + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: 953faa6870f6663ac0121ab4a800f1ce76bca31f + hooks: + - id: shellcheck + + - repo: https://github.com/PyCQA/bandit + rev: fe1361fdcc274850d4099885a802f2c9f28aca08 + hooks: + - id: bandit + args: [--exit-zero] + # ignore all tests, not just tests data + exclude: ^tests/ + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: "v1.0.1" + hooks: + - id: mypy + exclude: ^(docs/|example-plugin/|tests/fixtures) + additional_dependencies: + - "pydantic" + + # - repo: https://github.com/terrencepreilly/darglint + # rev: master + # hooks: + # - id: darglint + # - repo: https://github.com/pycqa/pydocstyle + # rev: "a6fe4222012e990042c86cdaaa904a8d059752ee" + # hooks: + # - id: pydocstyle + # additional_dependencies: ["pydocstyle[toml]"] + # - repo: https://github.com/asottile/reorder_python_imports + # rev: 2b2f0c74acdb3de316e23ceb7dd0d7945c354050 + # hooks: + # - id: reorder-python-imports + # - repo: https://github.com/PyCQA/pylint + # rev: v2.14.1 + # hooks: + # - id: pylint + # args: [--exit-zero] + # - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + # rev: 6565d773ca281682d7062d4c0be74538cc474cc9 + # hooks: + # - id: pretty-format-java + # args: [--autofix] + # - id: pretty-format-kotlin + # args: [--autofix] + # - id: pretty-format-yaml + # args: [--autofix, --indent, "2"] + # - repo: https://github.com/adrienverge/yamllint.git + # rev: v1.21.0 # or higher tag + # hooks: + # - id: yamllint + # args: [--format, parsable, --strict] + # - repo: https://github.com/jumanjihouse/pre-commit-hook-yamlfmt + # rev: 0.1.0 # or specific tag + # hooks: + # - id: yamlfmt + # - repo: https://github.com/pre-commit/pygrep-hooks + # rev: "f6fb13e9516d1a9720a30a4049c930235abd642e" + # hooks: + # - id: python-check-blanket-noqa + # - id: python-check-blanket-type-ignore + # - id: python-no-log-warn + # - id: python-no-eval + # - id: python-use-type-annotations + # - id: rst-backticks + # - id: rst-directive-colons + # - id: rst-inline-touching-normal + # - repo: https://github.com/nbQA-dev/nbQA + # rev: 952c895a21aced4601165c43f77188559a5e825c + # hooks: + # - id: nbqa-black + # additional_dependencies: [jupytext, black] # optional, only if you're using Jupytext + # - id: nbqa-pyupgrade + # args: ["--py37-plus"] + # - id: nbqa-flake8 + # - id: nbqa-isort + # args: ["--float-to-top"] + # - repo: https://github.com/hakancelik96/unimport + # rev: 0.14.1 + # hooks: + # - id: unimport + # args: [--remove, --include-star-import] + # - repo: https://github.com/pycqa/isort + # rev: "3a72e069635a865a92b8a0273aa829f630cbcd6f" + # hooks: + # - id: isort + # files: "demo_project/.*" + # args: ["--profile", "black", "--filter-files"] + + # - repo: https://github.com/kynan/nbstripout + # rev: e4c5b4dcbab4afa0b88b7a9243db42b1a8d95dde + # hooks: + # - id: nbstripout + # files: ".ipynb" + # - repo: https://github.com/codespell-project/codespell + # rev: 68ea3cff5eba266e0814794cce6cd19537f58137 + # hooks: + # - id: codespell + # - repo: https://github.com/pre-commit/pygrep-hooks + # rev: 7b4409161486c6956bb3206ce96db5d56731b1b9 # Use the ref you want to point at + # hooks: + # - id: python-use-type-annotations diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..2a044f0e --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2023 gdsfactory + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..503399bb --- /dev/null +++ b/Makefile @@ -0,0 +1,57 @@ + +install: + pip install -e .[dev] + pre-commit install + +dev: + pip install -e .[dev,docs] + + +test: + pytest -s + +cov: + pytest --cov=gplugins + +mypy: + mypy . --ignore-missing-imports + +pylint: + pylint gplugins + +ruff: + ruff --fix gplugins/*.py + +git-rm-merged: + git branch -D `git branch --merged | grep -v \* | xargs` + +update: + pur + +update-pre: + pre-commit autoupdate --bleeding-edge + +git-rm-merged: + git branch -D `git branch --merged | grep -v \* | xargs` + +release: + git push + git push origin --tags + +build: + rm -rf dist + pip install build + python -m build + +jupytext: + jupytext docs/**/*.ipynb --to py + +notebooks: + jupytext docs/**/*.py --to ipynb + + +docs: + jb build docs + +.PHONY: drc doc docs + diff --git a/README.md b/README.md new file mode 100644 index 00000000..a98867fa --- /dev/null +++ b/README.md @@ -0,0 +1,10 @@ +# gplugins 0.0.1 + +gdsfactory plugins + +## Usage + +## Installation + +`make install` + diff --git a/docs/_config.yml b/docs/_config.yml new file mode 100755 index 00000000..0873d32f --- /dev/null +++ b/docs/_config.yml @@ -0,0 +1,58 @@ +# Book settings +# Learn more at https://jupyterbook.org/customize/config.html + +title: gplugins +author: gdsfactory + +# Force re-execution of notebooks on each build. +# See https://jupyterbook.org/content/execute.html +execute: + execute_notebooks: cache + timeout: -1 + allow_errors: true + # execute_notebooks: force + # execute_notebooks: "off" + # exclude_patterns: + # - '*notebooks/devsim/01_pin_waveguide*' + +latex: + latex_engine: pdflatex # one of 'pdflatex', 'xelatex' (recommended for unicode), 'luatex', 'platex', 'uplatex' + use_jupyterbook_latex: true # use sphinx-jupyterbook-latex for pdf builds as default + +# Add a bibtex file so that we can create citations + +html: + home_page_in_navbar: true + use_edit_page_button: true + use_repository_button: true + use_issues_button: true + baseurl: https://github.com/gdsfactory/gplugins + +# Information about where the book exists on the web +repository: + url: https://github.com/gdsfactory/gplugins + path_to_book: docs # Optional path to your book, relative to the repository root + branch: main # Which branch of the repository should be used when creating links (optional) + +launch_buttons: + notebook_interface: jupyterlab + colab_url: "https://colab.research.google.com" + +sphinx: + extra_extensions: + - "sphinx.ext.autodoc" + - "sphinx.ext.autodoc.typehints" + - "sphinx.ext.autosummary" + - "sphinx.ext.napoleon" + - "sphinx.ext.viewcode" + - "matplotlib.sphinxext.plot_directive" + - "sphinxcontrib.autodoc_pydantic" + config: + #autodoc_typehints: description + autodoc_type_aliases: + "ComponentSpec": "ComponentSpec" + nb_execution_show_tb: True + nb_custom_formats: + .py: + - jupytext.reads + - fmt: py diff --git a/docs/_toc.yml b/docs/_toc.yml new file mode 100644 index 00000000..24f7c624 --- /dev/null +++ b/docs/_toc.yml @@ -0,0 +1,8 @@ +# Table of contents +# Learn more at https://jupyterbook.org/customize/toc.html + +format: jb-book +root: index +chapters: + - file: api + - file: changelog diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..bc1964e5 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,17 @@ +gplugins +=================================== + +Config +--------------------- + +.. automodule:: gplugins.config + +Config +--------------------- + +.. rubric:: config + +.. autosummary:: + :toctree: _autosummary/ + + gplugins.config diff --git a/docs/changelog.md b/docs/changelog.md new file mode 100644 index 00000000..ba6026f6 --- /dev/null +++ b/docs/changelog.md @@ -0,0 +1 @@ +# [CHANGELOG](https://keepachangelog.com/en/1.0.0/) diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..451bedae --- /dev/null +++ b/docs/index.md @@ -0,0 +1,2 @@ +```{include} ../README.md +``` diff --git a/docs/tutorial.md b/docs/tutorial.md new file mode 100644 index 00000000..ef1f1f2a --- /dev/null +++ b/docs/tutorial.md @@ -0,0 +1,6 @@ + +```{eval-rst} +.. automodule:: gplugins + :members: + +``` diff --git a/gplugins/__init__.py b/gplugins/__init__.py new file mode 100644 index 00000000..0c71cff3 --- /dev/null +++ b/gplugins/__init__.py @@ -0,0 +1,3 @@ +"""gplugins - gdsfactory plugins""" + +__version__ = '0.0.1' diff --git a/gplugins/config.py b/gplugins/config.py new file mode 100644 index 00000000..8d79c3b0 --- /dev/null +++ b/gplugins/config.py @@ -0,0 +1,27 @@ +"""Store configuration.""" + +__all__ = ["PATH"] + +import pathlib + +home = pathlib.Path.home() +cwd = pathlib.Path.cwd() +cwd_config = cwd / "config.yml" + +home_config = home / ".config" / "gplugins.yml" +config_dir = home / ".config" +config_dir.mkdir(exist_ok=True) +module_path = pathlib.Path(__file__).parent.absolute() +repo_path = module_path.parent + + +class Path: + module = module_path + repo = repo_path + + +PATH = Path() + +if __name__ == "__main__": + print(PATH) + diff --git a/gplugins/dagster/Makefile b/gplugins/dagster/Makefile new file mode 100644 index 00000000..cca491f3 --- /dev/null +++ b/gplugins/dagster/Makefile @@ -0,0 +1,2 @@ +run: + dagit -f workflow.py diff --git a/gplugins/dagster/__init__.py b/gplugins/dagster/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gplugins/dagster/workflow.py b/gplugins/dagster/workflow.py new file mode 100644 index 00000000..e6df3eff --- /dev/null +++ b/gplugins/dagster/workflow.py @@ -0,0 +1,58 @@ +from typing import Dict + +from dagster import ConfigurableResource, Definitions, asset + + +class ReaderResource(ConfigurableResource): + value: str + + +@asset +def design(reader: ReaderResource) -> Dict: + # read_based_on_config() + return {"design:": reader.value} + + +@asset +def verification(): + return {"verification:": 1} + + +@asset +def manufacturing(): + return {"manufacturing:": 1} + + +@asset +def validation(): + return {"validation:": 1} + + +@asset +def structure_layer( + context, + design, + verification, + manufacturing, + validation, +): + combined = { + **design, + **verification, + **manufacturing, + **validation, + } + context.log.info(f"Combined data {combined}") + return combined + + +defs = Definitions( + assets=[ + design, + verification, + manufacturing, + validation, + structure_layer, + ], + resources={"reader": ReaderResource(value="configured-value")}, +) diff --git a/gplugins/database/README.md b/gplugins/database/README.md new file mode 100644 index 00000000..74b522db --- /dev/null +++ b/gplugins/database/README.md @@ -0,0 +1,25 @@ +# Database + +Database is hosted on PlanetScale. +Blobs (gds files, yaml files) are hosted on S3. + +## Environment variables + +The following environment variables need to be set: + +``` +PS_DATABASE=gdslib +PS_HOST=xxx.xxxxxxx.xxxx.xxxxx +PS_USERNAME= +PS_PASSWORD= +PS_SSL_CERT=/etc/ssl/certs/ca-certificates.crt +AWS_ACCESS_KEY_ID= +AWS_SECRET_ACCESS_KEY= +``` + +## Tables + +We currently have the following tables in our PlanetScale database: + +- `GdsFile` (test table, not used in production) +- `Component`: Links a component through a hash to blobs (GDS, settings as yaml) in S3. diff --git a/gplugins/database/__init__.py b/gplugins/database/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/gplugins/database/db_upload.py b/gplugins/database/db_upload.py new file mode 100644 index 00000000..ad97413a --- /dev/null +++ b/gplugins/database/db_upload.py @@ -0,0 +1,186 @@ +""" Upload a component / simulation result to the database """ + +import hashlib +import os +import tempfile +from functools import lru_cache +from typing import List, Optional + +import boto3 +import boto3.session +import numpy as np +import pandas as pd +from sqlmodel import Field +from sqlmodel import Session as _Session +from sqlmodel import SQLModel, create_engine + +import gdsfactory as gf + + +class Session(_Session): + def safe_add(self, model: SQLModel): + """adds a model to the database, but ignores it if it's already in there.""" + return self.execute(model.__class__.__table__.insert().prefix_with("IGNORE").values([model.dict()])) # type: ignore + + def safe_add_all(self, models: List[SQLModel]): + """adds a model to the database, but ignores it if it's already in there.""" + cls = models[0].__class__ + assert all(model.__class__ is cls for model in models) + return self.execute(cls.__table__.insert().prefix_with("IGNORE").values([model.dict() for model in models])) # type: ignore + + +class Component(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + function_name: str = Field(min_length=1, max_length=20) + module: str = Field(min_length=1, max_length=40) + name: str = Field(min_length=1, max_length=40) + hash: str = Field(min_length=32, max_length=32, unique=True) + + +class Simulation(SQLModel, table=True): + id: Optional[int] = Field(default=None, primary_key=True) + function_name: str = Field(min_length=1, max_length=20) + hash: str = Field(min_length=32, max_length=32, unique=True) + component_hash: str = Field(min_length=32, max_length=32, unique=True) + wavelength: float + port_in: str = Field(min_length=1, max_length=10) + port_out: str = Field(min_length=1, max_length=10) + abs: float + angle: float + + +@lru_cache(maxsize=None) +def get_database_engine(): + host = os.getenv("PS_HOST", "") + database = os.getenv("PS_DATABASE", "") + username = os.getenv("PS_USERNAME", "") + password = os.getenv("PS_PASSWORD", "") + ssl_ca = os.getenv("PS_SSL_CERT", "") + connection_string = f"mysql+pymysql://{username}:{password}@{host}/{database}" + return create_engine( + connection_string, echo=True, connect_args={"ssl": {"ca": ssl_ca}} + ) + + +@lru_cache(maxsize=None) +def s3_client(): + return boto3.client("s3") + + +def get_component_hash(component: gf.Component) -> str: + with tempfile.NamedTemporaryFile() as file: + path = os.path.abspath(file.name) + component.write_gds(path) + return hashlib.md5(file.read()).hexdigest() + + +def get_s3_key_from_hash(prefix: str, hash: str, ext: str = "gds") -> str: + return os.path.join(f"{prefix}/{ext}/{hash}.{ext}") + + +def convert_to_db_format(sp: dict) -> pd.DataFrame: + df = pd.DataFrame(sp) + wls = df.pop("wavelengths").values + dfs = [] + for c in df.columns: + port_in, port_out = (p.strip() for p in c.split(",")) + cdf = pd.DataFrame( + { + "wavelength": wls, + "port_in": port_in, + "port_out": port_out, + "abs": np.abs(df[c].values), + "angle": np.angle(df[c].values), + } + ) + dfs.append(cdf) + + return pd.concat(dfs, axis=0) + + +if __name__ == "__main__": + import gdsfactory.simulation.gmeep as gm + + component = gf.components.taper(length=100) + component_yaml = component.to_yaml() + + component_model = Component( + function_name=component.metadata["function_name"], + module=component.metadata["module"], + name=component.name, + hash=get_component_hash(component), + ) + engine = get_database_engine() + with Session(engine) as session: + session.safe_add(component_model) + session.commit() + + s3 = s3_client() + with tempfile.TemporaryDirectory() as tempdir: + component_gds_path = os.path.join(tempdir, f"{component_model.hash}.gds") + component_yaml_path = os.path.join(tempdir, f"{component_model.hash}.yml") + component.write_gds(component_gds_path) + with open(component_yaml_path, "w") as file: + file.write(component_yaml) + + component_key = get_s3_key_from_hash("component", component_model.hash, "gds") + component_yaml_key = get_s3_key_from_hash( + "component", component_model.hash, "yml" + ) + s3.upload_file(component_gds_path, "gdslib", component_key) + s3.upload_file(component_yaml_path, "gdslib", component_yaml_key) + + # with tempfile.TemporaryDirectory() as tmpdir: + tmpdir = "/tmp" + tmppath = gm.write_sparameters_meep_1x1( + component, is_3d=False, dirpath=tmpdir, only_return_filepath_sim_settings=True + ) # TODO: split simulation yaml file generation and actual simulation... + simulation_hash = str(tmppath)[-36:-4] + sp = gm.write_sparameters_meep_1x1(component, is_3d=False, dirpath=tmpdir) + yaml_filename = next(fn for fn in os.listdir(tmpdir) if fn.endswith(".yml")) + yaml_path = os.path.join(tmpdir, yaml_filename) + df = convert_to_db_format(sp) + df["component_hash"] = component_model.hash + df["hash"] = simulation_hash + df["function_name"] = "write_sparameters_meep_1x1" + df = df[ + [ + "function_name", + "hash", + "component_hash", + "wavelength", + "port_in", + "port_out", + "abs", + "angle", + ] + ] + simulation_models = [] + for ( + function_name, + hash, + component_hash, + wavelength, + port_in, + port_out, + abs, + angle, + ) in df.values: + simulation_model = Simulation( + function_name=function_name, + hash=hash, + component_hash=component_hash, + wavelength=wavelength, + port_in=port_in, + port_out=port_out, + abs=abs, + angle=angle, + ) + simulation_models.append(simulation_model) + with Session(engine) as session: + session.safe_add_all(simulation_models) + session.commit() + + s3 = s3_client() + yaml_key = get_s3_key_from_hash("simulation", simulation_hash, "yml") + s3.upload_file(yaml_path, "gdslib", yaml_key) diff --git a/gplugins/database/models.py b/gplugins/database/models.py new file mode 100644 index 00000000..5f5d7688 --- /dev/null +++ b/gplugins/database/models.py @@ -0,0 +1,450 @@ +from sqlalchemy import TIMESTAMP, Column, Float, ForeignKey, Integer, String, text +from sqlalchemy.dialects.mysql import TEXT +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import relationship + +Base = declarative_base() +metadata = Base.metadata + + +class Process(Base): + __tablename__ = "process" + __table_args__ = {"comment": "This table holds all foundry process info."} + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + name = Column(String(200), nullable=False) + process = Column(String(200), nullable=False) + version = Column(String(50), nullable=False) + type = Column(String(50)) + description = Column(String(200)) + + +class Unit(Base): + __tablename__ = "unit" + __table_args__ = { + "comment": "This table holds all units. A unit is here understood as definite magnitude of a quantity." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + name = Column(String(200), nullable=False) + quantity = Column(String(200), nullable=False) + symbol = Column(String(50), nullable=False) + description = Column(String(200)) + + +class Wafer(Base): + __tablename__ = "wafer" + __table_args__ = {"comment": "This table holds the base definition of a wafer."} + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + serial_number = Column(String(200), nullable=False) + name = Column(String(200)) + description = Column(String(200)) + + +class ComputedResult(Base): + __tablename__ = "computed_result" + __table_args__ = { + "comment": "This table holds all results obtained after computation/analysis of the raw results contained in the table result." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + name = Column(String(200), nullable=False) + type = Column(String(50), nullable=False) + unit_id = Column(ForeignKey("unit.id"), index=True) + domain_unit_id = Column(ForeignKey("unit.id"), index=True) + value = Column(TEXT, nullable=False) + domain = Column(TEXT) + description = Column(String(200)) + + domain_unit = relationship( + "Unit", primaryjoin="ComputedResult.domain_unit_id == Unit.id" + ) + unit = relationship("Unit", primaryjoin="ComputedResult.unit_id == Unit.id") + + +class Result(Base): + __tablename__ = "result" + __table_args__ = {"comment": "This table holds all results."} + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + name = Column(String(200), nullable=False) + type = Column(String(50), nullable=False) + unit_id = Column(ForeignKey("unit.id"), index=True) + domain_unit_id = Column(ForeignKey("unit.id"), index=True) + value = Column(TEXT, nullable=False) + domain = Column(TEXT) + description = Column(String(200)) + + domain_unit = relationship("Unit", primaryjoin="Result.domain_unit_id == Unit.id") + unit = relationship("Unit", primaryjoin="Result.unit_id == Unit.id") + + +class Reticle(Base): + __tablename__ = "reticle" + __table_args__ = {"comment": "This table holds the definition of a reticle."} + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + name = Column(String(200), nullable=False) + position = Column( + String(50), comment="Position of the reticle on the wafer. (ROW, COLUMN)" + ) + size = Column( + String(50), + comment="The size of the reticle (X,Y) having the convention that -Å· points towards the notch/flat of the wafer.", + ) + wafer_id = Column(ForeignKey("wafer.id"), nullable=False, index=True) + description = Column(String(200)) + + wafer = relationship("Wafer") + + +class ComputedResultSelfRelation(Base): + __tablename__ = "computed_result_self_relation" + __table_args__ = { + "comment": "This table holds all computed results self relation. This is used to link computed results together" + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + computed_result1_id = Column( + ForeignKey("computed_result.id"), nullable=False, index=True + ) + computed_result2_id = Column( + ForeignKey("computed_result.id"), nullable=False, index=True + ) + + computed_result1 = relationship( + "ComputedResult", + primaryjoin="ComputedResultSelfRelation.computed_result1_id == ComputedResult.id", + ) + computed_result2 = relationship( + "ComputedResult", + primaryjoin="ComputedResultSelfRelation.computed_result2_id == ComputedResult.id", + ) + + +class Die(Base): + __tablename__ = "die" + __table_args__ = {"comment": "This table holds die definition."} + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + reticle_id = Column(ForeignKey("reticle.id"), nullable=False, index=True) + name = Column(String(200), nullable=False) + position = Column(String(50)) + size = Column(String(50)) + description = Column(String(200)) + + reticle = relationship("Reticle") + + +class ResultComputedResultRelation(Base): + __tablename__ = "result_computed_result_relation" + __table_args__ = { + "comment": "This table holds the relations in between the results and the computed results." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + result_id = Column(ForeignKey("result.id"), nullable=False, index=True) + computed_result_id = Column( + ForeignKey("computed_result.id"), nullable=False, index=True + ) + + computed_result = relationship("ComputedResult") + result = relationship("Result") + + +class ResultInfo(Base): + __tablename__ = "result_info" + __table_args__ = { + "comment": "This table holds extra information about specific results." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + name = Column(String(200), nullable=False) + value = Column(String(200), nullable=False) + result_id = Column(ForeignKey("result.id"), nullable=False, index=True) + computed_result_id = Column( + ForeignKey("computed_result.id"), nullable=False, index=True + ) + unit_id = Column(ForeignKey("unit.id"), index=True) + description = Column(String(200)) + + computed_result = relationship("ComputedResult") + result = relationship("Result") + unit = relationship("Unit") + + +class ResultProcessRelation(Base): + __tablename__ = "result_process_relation" + __table_args__ = { + "comment": "This table holds all results and simulation result relation." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + result_id = Column(ForeignKey("result.id"), nullable=False, index=True) + process_id = Column(ForeignKey("process.id"), nullable=False, index=True) + + process = relationship("Process") + result = relationship("Result") + + +class ResultSelfRelation(Base): + __tablename__ = "result_self_relation" + __table_args__ = { + "comment": "This table holds all results self relation. This is used to link results together" + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + result1_id = Column(ForeignKey("result.id"), nullable=False, index=True) + result2_id = Column(ForeignKey("result.id"), nullable=False, index=True) + + result1 = relationship( + "Result", primaryjoin="ResultSelfRelation.result1_id == Result.id" + ) + result2 = relationship( + "Result", primaryjoin="ResultSelfRelation.result2_id == Result.id" + ) + + +class Component(Base): + __tablename__ = "component" + __table_args__ = {"comment": "This table holds the definition of components."} + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + die_id = Column(ForeignKey("die.id"), nullable=False, index=True) + name = Column(String(250), nullable=False) + description = Column(String(200)) + + die = relationship("Die") + + +class Port(Base): + __tablename__ = "port" + __table_args__ = {"comment": "This table holds all ports definition."} + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + component_id = Column(ForeignKey("component.id"), nullable=False, index=True) + name = Column(String(200), server_default=text("''")) + port_type = Column(String(200)) + position = Column(String(50), nullable=False) + orientation = Column(Float(asdecimal=True), nullable=False) + description = Column(String(200)) + + component = relationship("Component") + + +class ComponentInfo(Base): + __tablename__ = "component_info" + __table_args__ = { + "comment": "This table holds information for the component using name/value pairs with optional description." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + component_id = Column(ForeignKey("component.id"), index=True) + die_id = Column(ForeignKey("die.id"), index=True) + port_id = Column(ForeignKey("port.id"), index=True) + reticle_id = Column(ForeignKey("reticle.id"), index=True) + wafer_id = Column(ForeignKey("wafer.id"), index=True) + name = Column(String(200), nullable=False) + value = Column(String(200), nullable=False) + description = Column(String(200)) + + component = relationship("Component") + die = relationship("Die") + port = relationship("Port") + reticle = relationship("Reticle") + wafer = relationship("Wafer") + + +class ResultComponentRelation(Base): + __tablename__ = "result_component_relation" + __table_args__ = { + "comment": "This table holds the relations in between results and components." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + result_id = Column(ForeignKey("result.id"), nullable=False, index=True) + component_id = Column(ForeignKey("component.id"), index=True) + die_id = Column(ForeignKey("die.id"), index=True) + port_id = Column(ForeignKey("port.id"), index=True) + reticle_id = Column(ForeignKey("reticle.id"), index=True) + wafer_id = Column(ForeignKey("wafer.id"), index=True) + + component = relationship("Component") + die = relationship("Die") + port = relationship("Port") + result = relationship("Result") + reticle = relationship("Reticle") + wafer = relationship("Wafer") + + +class RelationInfo(Base): + __tablename__ = "relation_info" + __table_args__ = { + "comment": "This table holds extra information about specific relation." + } + + id = Column(Integer, primary_key=True) + created = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + updated = Column( + TIMESTAMP, nullable=False, server_default=text("CURRENT_TIMESTAMP") + ) + computed_result_self_relation_id = Column( + ForeignKey("computed_result_self_relation.id"), index=True + ) + result_self_relation_id = Column(ForeignKey("result_self_relation.id"), index=True) + result_process_relation_id = Column( + ForeignKey("result_process_relation.id"), index=True + ) + result_component_relation_id = Column( + ForeignKey("result_component_relation.id"), index=True + ) + result_computed_result_relation_id = Column( + ForeignKey("result_computed_result_relation.id"), index=True + ) + name = Column(String(200), nullable=False) + value = Column(String(200), nullable=False) + description = Column(String(200)) + + computed_result_self_relation = relationship("ComputedResultSelfRelation") + result_component_relation = relationship("ResultComponentRelation") + result_computed_result_relation = relationship("ResultComputedResultRelation") + result_process_relation = relationship("ResultProcessRelation") + result_self_relation = relationship("ResultSelfRelation") + + +if __name__ == "__main__": + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + + import gdsfactory as gf + + engine = create_engine("sqlite:///database.db", echo=True, future=True) + metadata.create_all(engine) + + c = gf.components.ring_single(radius=10) + + with Session(engine) as session: + w1 = Wafer(name="12", serial_number="ABC") + r1 = Reticle(name="sky1", wafer_id=w1.id, wafer=w1) + d1 = Die(name="d00", reticle_id=r1.id, reticle=r1) + c1 = Component(name=c.name, die_id=d1.id, die=d1) + + component_settings = [] + + for key, value in c.settings.changed.items(): + s = ComponentInfo(component=c1, component_id=c1.id, name=key, value=value) + component_settings.append(s) + + for port in c.ports.values(): + s = Port( + component=c1, + component_id=c1.id, + port_type=port.port_type, + name=port.name, + orientation=port.orientation, + position=port.center, + ) + component_settings.append(s) + + session.add_all([w1, r1, d1, c1]) + session.add_all(component_settings) + session.commit() diff --git a/gplugins/schematic_editor/__init__.py b/gplugins/schematic_editor/__init__.py new file mode 100644 index 00000000..09710d79 --- /dev/null +++ b/gplugins/schematic_editor/__init__.py @@ -0,0 +1,3 @@ +from gplugins.schematic_editor.schematic_editor import SchematicEditor + +__all__ = ["SchematicEditor"] diff --git a/gplugins/schematic_editor/circuitviz.py b/gplugins/schematic_editor/circuitviz.py new file mode 100644 index 00000000..76edb7df --- /dev/null +++ b/gplugins/schematic_editor/circuitviz.py @@ -0,0 +1,525 @@ +import contextlib +from collections import defaultdict +from functools import partial +from typing import Dict, List, NamedTuple, Union + +import numpy as np +import pandas as pd +import yaml + + +import gdsfactory as gf +from gdsfactory.picmodel import PicYamlConfiguration, Placement, SchematicConfiguration + +try: + import bokeh.events as be + from bokeh import io as bio + from bokeh import models as bm + from bokeh import plotting as bp + from natsort import natsorted +except ImportError: + print("No bokeh and natsort found!\n" "pip install bokeh natsort") + +data = { + "srcs": defaultdict(lambda: defaultdict(lambda: [])), + "dss": {}, +} + +COLORS_BY_PORT_TYPE = { + "optical": "#0000ff", + "electrical": "#00ff00", + "placement": "white", + None: "gray", # default +} + + +def save_netlist(netlist, filename): + with open(filename, mode="w") as f: + d = netlist.dict(exclude_none=True) + if "placements" in d: + placements_dict = d["placements"] + elif "schematic_placements" in d: + placements_dict = d["schematic_placements"] + else: + raise ValueError("No placements attribute found in netlist") + pkeys = list(placements_dict.keys()) + p: dict = placements_dict + for pk in pkeys: + pv = p[pk] + if pv: + for kk in ["x", "y"]: + if kk in pv: + with contextlib.suppress(Exception): + pv[kk] = float(pv[kk]) + else: + p.pop(pk) + yaml.dump(d, f, sort_keys=False, default_flow_style=None) + + +class Rect(NamedTuple): + tag: str + x: float + y: float + w: float + h: float + c: str + + +class LayerPolygons(NamedTuple): + tag: str + xs: List[List[List[float]]] + ys: List[List[List[float]]] + c: str + alpha: float + + +class LineSegment(NamedTuple): + tag: str + x0: float + y0: float + x1: float + y1: float + name: str + + +def _enlarge_limits(ax, x, y, w=0.0, h=0.0) -> None: + xlim = ax.get_xlim() + ylim = ax.get_ylim() + x_min, x_max = xlim if xlim != (0.0, 1.0) else (np.inf, -np.inf) + y_min, y_max = ylim if ylim != (0.0, 1.0) else (np.inf, -np.inf) + x_min = min(x_min, x) + x_max = max(x + w, x_max) + y_min = min(y_min, y) + y_max = max(y + h, y_max) + if x_max == x_min: + x_max += 1.0 + if y_max == y_min: + y_max += 1.0 + ax.set_xlim(x_min, x_max) + ax.set_ylim(y_min, y_max) + + +def _get_sources(objs): + srcs = defaultdict(lambda: defaultdict(lambda: [])) + for obj in objs: + if isinstance(obj, LineSegment): + src = srcs["MultiLine"] + src["tag"].append(obj.tag) + src["x"].append(np.array([obj.x0, obj.x1])) + src["y"].append(np.array([obj.y0, obj.y1])) + src["line_color"].append("#000000") + src["name"].append(obj.name) + elif isinstance(obj, Rect): + src = srcs["Rect"] + src["tag"].append(obj.tag) + src["x"].append(obj.x + obj.w / 2) + src["y"].append(obj.y + obj.h / 2) + src["width"].append(obj.w) + src["height"].append(obj.h) + src["fill_color"].append(obj.c) + src["fill_alpha"].append(0.1) + elif isinstance(obj, gf.Port): + src = srcs["Port"] + src["tag"].append(obj.tag) + src["x"].append(obj.x) + src["y"].append(obj.y) + # src["width"].append(obj.w) + # src["height"].append(obj.h) + color = COLORS_BY_PORT_TYPE.get(obj.port_type, COLORS_BY_PORT_TYPE[None]) + src["fill_color"].append(color) + src["fill_alpha"].append(0.5) + elif isinstance(obj, LayerPolygons): + src = srcs["Polygons"] + src["tag"].append(obj.tag) + src["xs"].append(obj.xs) + src["ys"].append(obj.ys) + src["fill_color"].append(obj.c) + src["fill_alpha"].append(obj.alpha) + return srcs + + +def _get_column_data_sources(srcs): + _srcs = {} + for k, src in srcs.items(): + ds = bm.ColumnDataSource(dict(src.items())) + _srcs[k] = ds + return _srcs + + +def viz_bk( + netlist: Union[SchematicConfiguration, PicYamlConfiguration], + instances, + netlist_filename, + fig=None, + **kwargs, +): + global data + if fig is None: + fig = bp.figure() + + if isinstance(netlist, (PicYamlConfiguration, SchematicConfiguration)): + objs = viz_netlist(netlist, instances, **kwargs) + else: + objs = netlist + netlist = None + + if not isinstance(objs, list): + raise ValueError("viz_bk can only visualize a list of objects.") + + # data['srcs'] = _viz_bk_srcs(objs) + + srcs = _get_sources(objs) + dss = data["dss"] = _get_column_data_sources(srcs) + netlist = data["netlist"] + + def cb_rect_on_change_data(attr, old, new) -> None: + tags = np.array(old["tag"], dtype=object) + xy_old = np.stack([old["x"], old["y"]], 1) + xy_new = np.stack([new["x"], new["y"]], 1) + if xy_old.shape != xy_new.shape: + dss["Rect"].data.__dict__.update(old) + return + dxs, dys = (xy_new - xy_old).T # type: ignore + idxs = np.where(dxs**2 + dys**2 > 1.0)[0] + tags = tags[idxs] + dxs = dxs[idxs] + dys = dys[idxs] + + for tag, dx, dy in zip(tags, dxs, dys): # loop over all displaced rectangles + if netlist is not None: + if tag not in netlist.placements: + netlist.placements[tag] = Placement(x=0, y=0, dx=0, dy=0) + dx_, dy_ = netlist.placements[tag].dx, netlist.placements[tag].dy + dx_ = 0.0 if dx_ is None else dx_ + dy_ = 0.0 if dy_ is None else dy_ + netlist.placements[tag].dx = float(dx_ + dx) + netlist.placements[tag].dy = float(dy_ + dy) + for k, v in dss.items(): + if k == "Rect": + continue + elif k == "MultiLine": + data = dict(v.data) + for i, tag_ in enumerate(data["tag"]): + if "," in tag_: + tag1, tag2 = tag_.split(",") + if tag == tag1: + data["x"][i][0] += dx + data["y"][i][0] += dy + elif tag == tag2: + data["x"][i][-1] += dx + data["y"][i][-1] += dy + else: + continue + else: + if tag_ == tag: + data["x"][i][:] += dx + data["y"][i][:] += dy + else: + continue + v.data = data + elif k == "Polygons": + data = dict(v.data) + for i, tag_ in enumerate(data["tag"]): + if tag_ == tag: + for i_poly in range(len(data["xs"][i])): + for i_boundary in range(len(data["xs"][i][i_poly])): + data["xs"][i][i_poly][i_boundary] += dx + data["ys"][i][i_poly][i_boundary] += dy + v.data = data + elif k == "Port": + data = dict(v.data) + for i, tag_ in enumerate(data["tag"]): + if tag_ == tag: + data["x"][i] += dx + data["y"][i] += dy + else: + continue + v.data = data + save_netlist(netlist, netlist_filename) + + def cb_rect_selected_on_change_indices(attr, old, new) -> None: + if len(new) > 1: + data["dss"]["Rect"].selected.indices = [new[0]] + + def cp_double_tap(event) -> None: + # only works on 'hierarchical netlists...' + if netlist is None: + return + df = pd.DataFrame(data["dss"]["Rect"].data) + mask = np.ones_like(df.x, dtype=bool) + mask &= df.x - df.width / 2 < event.x + mask &= event.x < df.x + df.width / 2 + mask &= df.y - df.height / 2 < event.y + mask &= event.y < df.y + df.width / 2 + df = df[mask] + + tags = df.tag.values + if tags.shape[0] != 1: + return + + tag = tags[0] + if tag in netlist.placements: + cur_rotation = netlist.placements[tag].rotation or 0 + netlist.placements[tag].rotation = (cur_rotation + 90) % 360 + else: + return + + update_schematic_plot(schematic=netlist, instances=instances) + + data["dss"]["Rect"].on_change("data", cb_rect_on_change_data) + data["dss"]["Rect"].selected.on_change( + "indices", cb_rect_selected_on_change_indices + ) + fig.on_event(be.DoubleTap, cp_double_tap) + + inst_glyph = fig.add_glyph( + data["dss"]["Rect"], + bm.Rect( + x="x", + y="y", + width="width", + height="height", + fill_color="fill_color", + fill_alpha="fill_alpha", + ), + name="instances", + ) + if "Polygons" in data["dss"]: + fig.add_glyph( + data["dss"]["Polygons"], + bm.MultiPolygons( + xs="xs", ys="ys", fill_color="fill_color", fill_alpha="fill_alpha" + ), + ) + net_glyph = None + if "MultiLine" in data["dss"]: + net_glyph = fig.add_glyph( + data["dss"]["MultiLine"], + bm.MultiLine(xs="x", ys="y"), + name="nets", + ) # , line_color="line_color")) + fig.add_glyph( + data["dss"]["Port"], glyph=bm.Circle(x="x", y="y", fill_color="fill_color") + ) + del fig.tools[:] + draw_tool = bm.PointDrawTool( + renderers=[r for r in fig.renderers if isinstance(r.glyph, bm.Rect)], + empty_value="black", + ) + hover_tool = bm.HoverTool( + renderers=[inst_glyph], + tooltips=[("Instance", "@tag")], + ) + if net_glyph: + hover_tool_nets = bm.HoverTool( + renderers=[net_glyph], + tooltips=[("Net", "@name")], + show_arrow=True, + line_policy="interp", + ) + # pan_tool = bm.PanTool() + tap_tool = bm.TapTool() + zoom = bm.WheelZoomTool() + fig.add_tools(draw_tool, hover_tool, hover_tool_nets, tap_tool, zoom) + fig.toolbar.active_scroll = zoom + fig.toolbar.active_tap = tap_tool + fig.toolbar.active_drag = draw_tool + fig.toolbar.logo = None + fig.xaxis.major_label_text_font_size = "0pt" + fig.yaxis.major_label_text_font_size = "0pt" + fig.match_aspect = True + + def bkapp(doc) -> None: + doc.add_root(fig) + data["doc"] = doc + + return bkapp + + +def get_ports(component): + comp = component + return natsorted(comp.ports.keys()) + + +def is_output_port(port): + if "," in port: + return is_output_port(port.split(",")[-1]) + return port.startswith("out") + + +def is_input_port(port) -> bool: + return not is_output_port(port) + + +def get_input_ports(component): + ports = get_ports(component) + return [p for p in ports if is_input_port(p)] + + +def get_output_ports(component): + ports = get_ports(component) + return [p for p in ports if is_output_port(p)] + + +def ports_ys(ports, instance_height): + h = instance_height + if len(ports) < 1: + return [h / 2] + ys = np.linspace(0, h, len(ports) + 1) + dy = ys[1] - ys[0] + return ys[1:] - dy / 2 + + +def viz_instance( + netlist: Union[PicYamlConfiguration, SchematicConfiguration], + instance_name, + component, + instance_size, +): + # inst_spec = netlist.instances[instance_name].dict() + inst_ref = component.named_references[instance_name] + bbox = inst_ref.bbox + w = bbox[1][0] - bbox[0][0] + h = bbox[1][1] - bbox[0][1] + x0 = bbox[0][0] + y0 = bbox[0][1] + # pl = w / 10 + # input_ports = get_input_ports(component) + # output_ports = get_output_ports(component) + # y_inputs = ports_ys(input_ports, h) + # y_outputs = ports_ys(output_ports, h) + # x, y = get_placements(netlist).get(instance_name, (0, 0)) + x, y = x0, y0 + polys_by_layer = inst_ref.get_polygons(by_spec=True, as_array=False) + layer_polys = [] + layer_views = gf.pdk.get_layer_views() + + for layer, polys in polys_by_layer.items(): + if layer not in layer_views.get_layer_tuples(): + print(f"layer {layer} not found") + continue + lv = layer_views.get_from_tuple(layer) + if lv: + xs = [[p.points[:, 0]] for p in polys] + ys = [[p.points[:, 1]] for p in polys] + lp = LayerPolygons( + tag=instance_name, + xs=xs, + ys=ys, + c=lv.get_color_dict()["fill_color"], + alpha=lv.get_alpha(), + ) + layer_polys.append(lp) + + ports: List[gf.Port] = inst_ref.ports.values() + ports = [p.copy() for p in ports] + for p in ports: + # p.move((x, y)) + p.tag = instance_name + c = "#000000" + + r = Rect(tag=instance_name, x=x, y=y, w=w, h=h, c=c) + return [r, *ports] + layer_polys + + +def split_port(port, netlist): + if "," not in port: + port = netlist.ports[port] + *instance_name, port = port.split(",") + return ",".join(instance_name), port + + +def viz_connection(netlist, p_in, p_out, instance_size, point1, point2): + x1, y1 = point1 + x2, y2 = point2 + tag = f"{p_in.split(',')[0]},{p_out.split(',')[0]}" + name = f"{p_in} ➔ {p_out}" + line = LineSegment(tag, x1, y1, x2, y2, name=name) + return [line] + + +def viz_netlist(netlist, instances, instance_size=20): + schematic_dict = netlist.dict() + schematic_as_layout = { + "instances": schematic_dict["instances"], + "placements": schematic_dict["schematic_placements"], + } + schematic_component = gf.read.from_yaml(schematic_as_layout, mode="schematic") + + els = [] + port_coords = {} + for instance_name in netlist.instances: + els += viz_instance(netlist, instance_name, schematic_component, instance_size) + for el in els: + if isinstance(el, gf.Port): + port_name = f"{instance_name},{el.name}" + port_coords[port_name] = el.center + + for net in netlist.nets: + p_in, p_out = net + point1 = port_coords[p_in] + point2 = port_coords[p_out] + els += viz_connection(netlist, p_in, p_out, instance_size, point1, point2) + return els + + +def show_netlist( + schematic: SchematicConfiguration, instances: Dict, netlist_filename +) -> None: + global data + data["netlist"] = schematic + fig = bp.figure(width=800, height=500) + app = viz_bk( + schematic, + instances=instances, + fig=fig, + instance_size=50, + netlist_filename=netlist_filename, + ) + bio.show(app) + + +def update_schematic_plot( + schematic: SchematicConfiguration, instances: Dict, *args, **kwargs +) -> None: + global data + + if "doc" in data: + doc = data["doc"] + doc.add_next_tick_callback( + partial( + _update_schematic_plot, + schematic=schematic, + instances=instances, + ) + ) + + +def _update_schematic_plot( + schematic: SchematicConfiguration, instances: Dict, *args, **kwargs +) -> None: + srcs = _get_sources(viz_netlist(schematic, instances=instances)) + for k in srcs: + data["dss"][k].data = srcs[k] + + +def add_instance(name: str, component) -> None: + inst_viz = viz_instance( + data["netlist"], instance_name=name, component=component, instance_size=0 + ) + srcs = _get_sources([inst_viz]) + for k, src in srcs.items(): + cds: bm.ColumnDataSource = data["dss"][k] + cds.stream(src) + + +def get_deltas(netlist): + return { + k: {"dx": p.dx or 0, "dy": p.dy or 0} for k, p in netlist.placements.items() + } + + +def apply_deltas(netlist, deltas) -> None: + for k, d in deltas.items(): + netlist.placements[k].dx = d["dx"] + netlist.placements[k].dy = d["dy"] diff --git a/gplugins/schematic_editor/schematic_editor.py b/gplugins/schematic_editor/schematic_editor.py new file mode 100644 index 00000000..55b7dd6b --- /dev/null +++ b/gplugins/schematic_editor/schematic_editor.py @@ -0,0 +1,473 @@ +from pathlib import Path +from typing import Optional, Union + +import bokeh.io +import ipywidgets as widgets +import yaml + +import gdsfactory as gf +from gplugins.schematic_editor import circuitviz +from gdsfactory.picmodel import ( + PicYamlConfiguration, + Route, + RouteSettings, + SchematicConfiguration, +) + + +class SchematicEditor: + def __init__( + self, filename: Union[str, Path], pdk: Optional[gf.Pdk] = None + ) -> None: + """An interactive Schematic editor, meant to be used from a Jupyter Notebook. + + Args: + filename: the filename or path to use for the input/output schematic + pdk: the PDK to use (uses the current active PDK if None) + """ + filepath = filename if isinstance(filename, Path) else Path(filename) + self.path = filepath + + self.pdk = pdk or gf.get_active_pdk() + self.component_list = list(gf.get_active_pdk().cells.keys()) + + self.on_instance_added = [] + self.on_instance_removed = [] + self.on_settings_updated = [] + self.on_nets_modified = [] + self._notebook_handle = None + self._inst_boxes = [] + self._connected_ports = {} + + if filepath.is_file(): + self.load_netlist() + else: + self._schematic = SchematicConfiguration( + instances={}, schematic_placements={}, nets=[], ports={} + ) + self._instance_grid = widgets.VBox() + self._net_grid = widgets.VBox() + self._port_grid = widgets.VBox() + + first_inst_box = self._get_instance_selector() + first_inst_box.children[0].observe(self._add_row_when_full, names=["value"]) + first_inst_box.children[1].observe( + self._on_instance_component_modified, names=["value"] + ) + self._instance_grid.children += (first_inst_box,) + + first_net_box = self._get_net_selector() + first_net_box.children[0].observe(self._add_net_row_when_full, names=["value"]) + self._net_grid.children += (first_net_box,) + for row in self._net_grid.children: + for child in row.children: + child.observe(self._on_net_modified, names=["value"]) + + # write netlist whenever the netlist changes, in any way + self.on_instance_added.append(self.write_netlist) + self.on_settings_updated.append(self.write_netlist) + self.on_nets_modified.append(self.write_netlist) + self.on_instance_removed.append(self.write_netlist) + + # events triggered when instances are added + self.on_instance_added.append(self._update_instance_options) + self.on_instance_added.append(self._make_instance_removable) + + def _get_instance_selector(self, inst_name=None, component_name=None): + component_selector = widgets.Combobox( + placeholder="Pick a component", + options=self.component_list, + ensure_option=True, + disabled=False, + ) + instance_box = widgets.Text(placeholder="Enter a name", disabled=False) + component_selector._instance_selector = instance_box + can_remove = False + if inst_name: + instance_box.value = inst_name + if component_name: + component_selector.value = component_name + can_remove = True + remove_button = widgets.Button( + description="Remove", + icon="xmark", + disabled=(not can_remove), + tooltip="Remove this instance from the schematic", + button_style="", + ) + remove_button.on_click(self._on_remove_button_clicked) + + row = widgets.Box([instance_box, component_selector, remove_button]) + row._component_selector = component_selector + row._instance_box = instance_box + row._remove_button = remove_button + + remove_button._row = row + instance_box._row = row + component_selector._row = row + return row + + def _get_port_selector( + self, port_name: Optional[str] = None, port: Optional[str] = None + ): + instance_port_selector = widgets.Text( + placeholder="InstanceName:PortName", disabled=False + ) + + port_name_box = widgets.Text(placeholder="Port name", disabled=False) + instance_port_selector._instance_selector = port_name_box + can_remove = False + if port_name: + port_name_box.value = port_name + if port: + instance_port_selector.value = port + # can_remove = True + can_remove = False + remove_button = widgets.Button( + description="Remove", + icon="xmark", + disabled=(not can_remove), + tooltip="Remove this port from the schematic", + button_style="", + ) + remove_button.on_click(self._on_remove_button_clicked) + + row = widgets.Box([port_name_box, instance_port_selector, remove_button]) + row._component_selector = instance_port_selector + row._instance_box = port_name_box + row._remove_button = remove_button + + remove_button._row = row + port_name_box._row = row + instance_port_selector._row = row + return row + + def _update_instance_options(self, **kwargs) -> None: + inst_names = self._schematic.instances.keys() + for inst_box in self._inst_boxes: + inst_box.options = list(inst_names) + + def _make_instance_removable(self, instance_name, **kwargs) -> None: + for row in self._instance_grid.children: + if row._instance_box.value == instance_name: + row._remove_button.disabled = False + return + + def _get_net_selector(self, inst1=None, port1=None, inst2=None, port2=None): + inst_names = list(self._schematic.instances.keys()) + inst1_selector = widgets.Combobox( + placeholder="inst1", options=inst_names, ensure_option=True, disabled=False + ) + inst2_selector = widgets.Combobox( + placeholder="inst2", options=inst_names, ensure_option=True, disabled=False + ) + self._inst_boxes.extend([inst1_selector, inst2_selector]) + port1_selector = widgets.Text(placeholder="port1", disabled=False) + port2_selector = widgets.Text(placeholder="port2", disabled=False) + if inst1: + inst1_selector.value = inst1 + if inst2: + inst2_selector.value = inst2 + if port1: + port1_selector.value = port1 + if port2: + port2_selector.value = port2 + return widgets.Box( + [inst1_selector, port1_selector, inst2_selector, port2_selector] + ) + + def _add_row_when_full(self, change) -> None: + if change["old"] == "" and change["new"] != "": + this_box = change["owner"] + last_box = self._instance_grid.children[-1].children[0] + if this_box is last_box: + new_row = self._get_instance_selector() + self._instance_grid.children += (new_row,) + new_row.children[0].observe(self._add_row_when_full, names=["value"]) + new_row.children[1].observe( + self._on_instance_component_modified, names=["value"] + ) + new_row._associated_component = None + + def _add_net_row_when_full(self, change) -> None: + if change["old"] == "" and change["new"] != "": + this_box = change["owner"] + last_box = self._net_grid.children[-1].children[0] + if this_box is last_box: + new_row = self._get_net_selector() + self._net_grid.children += (new_row,) + new_row.children[0].observe( + self._add_net_row_when_full, names=["value"] + ) + for child in new_row.children: + child.observe(self._on_net_modified, names=["value"]) + new_row._associated_component = None + + def _update_schematic_plot(self, **kwargs) -> None: + circuitviz.update_schematic_plot( + schematic=self._schematic, + instances=self.symbols, + ) + + def _on_instance_component_modified(self, change) -> None: + this_box = change["owner"] + inst_box = this_box._instance_selector + inst_name = inst_box.value + component_name = this_box.value + + if change["old"] == "": + if change["new"] != "": + self.add_instance(instance_name=inst_name, component=component_name) + elif change["new"] != change["old"]: + self.update_component(instance=inst_name, component=component_name) + + def _on_remove_button_clicked(self, button) -> None: + row = button._row + self.remove_instance(instance_name=row._instance_box.value) + self._instance_grid.children = tuple( + child for child in self._instance_grid.children if child is not row + ) + + def _get_data_from_row(self, row): + inst_name, component_name = (w.value for w in row.children) + return {"instance_name": inst_name, "component_name": component_name} + + def _get_instance_data(self): + inst_data = [ + self._get_data_from_row(row) for row in self._instance_grid.children + ] + inst_data = [d for d in inst_data if d["instance_name"] != ""] + return inst_data + + def _get_net_from_row(self, row): + return [c.value for c in row.children] + + def _get_net_data(self): + net_data = [self._get_net_from_row(row) for row in self._net_grid.children] + net_data = [d for d in net_data if "" not in d] + return net_data + + def _on_net_modified(self, change) -> None: + if change["new"] == change["old"]: + return + net_data = self._get_net_data() + new_nets = [[f"{n[0]},{n[1]}", f"{n[2]},{n[3]}"] for n in net_data] + connected_ports = {} + for n1, n2 in new_nets: + connected_ports[n1] = n2 + connected_ports[n2] = n1 + self._connected_ports = connected_ports + old_nets = self._schematic.nets + self._schematic.nets = new_nets + for callback in self.on_nets_modified: + callback(old_nets=old_nets, new_nets=new_nets) + + @property + def instance_widget(self): + return self._instance_grid + + @property + def net_widget(self): + return self._net_grid + + @property + def port_widget(self): + return self._port_grid + + def visualize(self) -> None: + circuitviz.show_netlist(self.schematic, self.symbols, self.path) + + self.on_instance_added.append(self._update_schematic_plot) + self.on_settings_updated.append(self._update_schematic_plot) + self.on_nets_modified.append(self._update_schematic_plot) + self.on_instance_removed.append(self._update_schematic_plot) + + @property + def instances(self): + insts = {} + inst_data = self._schematic.instances + for inst_name, inst in inst_data.items(): + component_spec = inst.dict() + # if component_spec['settings'] is None: + # component_spec['settings'] = {} + # validates the settings + insts[inst_name] = gf.get_component(component_spec) + return insts + + @property + def symbols(self): + insts = {} + inst_data = self._schematic.instances + for inst_name, inst in inst_data.items(): + component_spec = inst.dict() + insts[inst_name] = self.pdk.get_symbol(component_spec) + return insts + + def add_instance( + self, instance_name: str, component: Union[str, gf.Component] + ) -> None: + self._schematic.add_instance(name=instance_name, component=component) + for callback in self.on_instance_added: + callback(instance_name=instance_name) + + def remove_instance(self, instance_name: str) -> None: + self._schematic.instances.pop(instance_name) + if instance_name in self._schematic.placements: + self._schematic.placements.pop(instance_name) + for callback in self.on_instance_removed: + callback(instance_name=instance_name) + + def update_component(self, instance, component) -> None: + self._schematic.instances[instance].component = component + self.update_settings(instance=instance, clear_existing=True) + + def update_settings( + self, instance, clear_existing: bool = False, **settings + ) -> None: + old_settings = self._schematic.instances[instance].settings.copy() + if clear_existing: + self._schematic.instances[instance].settings.clear() + if settings: + self._schematic.instances[instance].settings.update(settings) + for callback in self.on_settings_updated: + callback( + instance_name=instance, settings=settings, old_settings=old_settings + ) + + def add_net(self, inst1, port1, inst2, port2): + p1 = f"{inst1},{port1}" + p2 = f"{inst2},{port2}" + if p1 in self._connected_ports: + if self._connected_ports[p1] == p2: + return + current_port = self._connected_ports[p1] + raise ValueError( + f"{p1} is already connected to {current_port}. Can't connect to {p2}" + ) + self._connected_ports[p1] = p2 + self._connected_ports[p2] = p1 + old_nets = self._schematic.nets.copy() + self._schematic.nets.append([p1, p2]) + new_row = self._get_net_selector( + inst1=inst1, inst2=inst2, port1=port1, port2=port2 + ) + existing_rows = self._net_grid.children + new_rows = existing_rows[:-1] + (new_row, existing_rows[-1]) + self._net_grid.children = new_rows + for callback in self.on_nets_modified: + callback(old_nets=old_nets, new_nets=self._schematic.nets) + + def get_netlist(self): + return self._schematic.dict() + + @property + def schematic(self): + return self._schematic + + def write_netlist(self, **kwargs) -> None: + netlist = self.get_netlist() + with open(self.path, mode="w") as f: + yaml.dump(netlist, f, default_flow_style=None, sort_keys=False) + + def load_netlist(self) -> None: + with open(self.path) as f: + netlist = yaml.safe_load(f) + + schematic = SchematicConfiguration.parse_obj(netlist) + self._schematic = schematic + + # process instances + instances = netlist["instances"] + nets = netlist.get("nets", []) + new_rows = [] + for inst_name, inst in instances.items(): + component_name = inst["component"] + new_row = self._get_instance_selector( + inst_name=inst_name, component_name=component_name + ) + new_row.children[0].observe(self._add_row_when_full, names=["value"]) + new_row.children[1].observe( + self._on_instance_component_modified, names=["value"] + ) + new_rows.append(new_row) + self._instance_grid = widgets.VBox(new_rows) + + # process nets + unpacked_nets = [] + net_rows = [] + for net in nets: + unpacked_net = [] + for net_entry in net: + inst_name, port_name = net_entry.split(",") + unpacked_net.extend([inst_name, port_name]) + unpacked_nets.append(unpacked_net) + net_rows.append(self._get_net_selector(*unpacked_net)) + self._connected_ports[net[0]] = net[1] + self._connected_ports[net[1]] = net[0] + self._net_grid = widgets.VBox(net_rows) + + # process ports + ports = netlist.get("ports", {}) + schematic.ports = ports + + new_rows = [] + for port_name, port in ports.items(): + new_row = self._get_port_selector(port_name=port_name, port=port) + new_row.children[0].observe(self._add_row_when_full, names=["value"]) + new_row.children[1].observe( + self._on_instance_component_modified, names=["value"] + ) + new_rows.append(new_row) + self._port_grid = widgets.VBox(new_rows) + + def instantiate_layout( + self, + output_filename, + default_router="get_bundle", + default_cross_section="strip", + ): + schematic = self._schematic + routes = {} + for inet, net in enumerate(schematic.nets): + route = Route( + routing_strategy=default_router, + links={net[0]: net[1]}, + settings=RouteSettings(cross_section=default_cross_section), + ) + routes[f"r{inet}"] = route + pic_conf = PicYamlConfiguration( + instances=schematic.instances, + placements=schematic.placements, + routes=routes, + ports=schematic.ports, + ) + pic_conf.to_yaml(output_filename) + return pic_conf + + def save_schematic_html( + self, filename: Union[str, Path], title: Optional[str] = None + ) -> None: + """Saves the schematic visualization to a standalone html file (read-only). + + Args: + filename: the (*.html) filename to write to + title: title for the output page + """ + filename = Path(filename) + if title is None: + title = f"{filename.stem} Schematic" + if "doc" not in circuitviz.data: + self.visualize() + if "doc" in circuitviz.data: + bokeh.io.save(circuitviz.data["doc"], filename=filename, title=title) + else: + raise ValueError( + "Unable to save the schematic to a standalone html file! Has the visualization been loaded yet?" + ) + + +if __name__ == "__main__": + from gdsfactory.config import PATH + + se = SchematicEditor(PATH.notebooks / "test.schem.yml") + print(se.schematic) diff --git a/gplugins/web/Makefile b/gplugins/web/Makefile new file mode 100644 index 00000000..914b8d10 --- /dev/null +++ b/gplugins/web/Makefile @@ -0,0 +1,2 @@ +run: + uvicorn main:app --reload diff --git a/gplugins/web/__init__.py b/gplugins/web/__init__.py new file mode 100644 index 00000000..b2f01558 --- /dev/null +++ b/gplugins/web/__init__.py @@ -0,0 +1 @@ +__version__ = "0.0.11" diff --git a/gplugins/web/gds_files/C.gds b/gplugins/web/gds_files/C.gds new file mode 100644 index 00000000..a16a30ab Binary files /dev/null and b/gplugins/web/gds_files/C.gds differ diff --git a/gplugins/web/gds_files/C_width20.gds b/gplugins/web/gds_files/C_width20.gds new file mode 100644 index 00000000..ffd9e397 Binary files /dev/null and b/gplugins/web/gds_files/C_width20.gds differ diff --git a/gplugins/web/gds_files/C_width5.gds b/gplugins/web/gds_files/C_width5.gds new file mode 100644 index 00000000..6959e224 Binary files /dev/null and b/gplugins/web/gds_files/C_width5.gds differ diff --git a/gplugins/web/gds_files/coh_rx_dual_pol.gds b/gplugins/web/gds_files/coh_rx_dual_pol.gds new file mode 100644 index 00000000..8c75e5d2 Binary files /dev/null and b/gplugins/web/gds_files/coh_rx_dual_pol.gds differ diff --git a/gplugins/web/gds_files/coh_rx_single_pol.gds b/gplugins/web/gds_files/coh_rx_single_pol.gds new file mode 100644 index 00000000..7e8c25a4 Binary files /dev/null and b/gplugins/web/gds_files/coh_rx_single_pol.gds differ diff --git a/gplugins/web/gds_files/coh_rx_single_pol_pad_d_19b6c499.gds b/gplugins/web/gds_files/coh_rx_single_pol_pad_d_19b6c499.gds new file mode 100644 index 00000000..e57ed3aa Binary files /dev/null and b/gplugins/web/gds_files/coh_rx_single_pol_pad_d_19b6c499.gds differ diff --git a/gplugins/web/gds_files/coh_rx_single_pol_pad_d_d4526895.gds b/gplugins/web/gds_files/coh_rx_single_pol_pad_d_d4526895.gds new file mode 100644 index 00000000..f33c89df Binary files /dev/null and b/gplugins/web/gds_files/coh_rx_single_pol_pad_d_d4526895.gds differ diff --git a/gplugins/web/gds_files/coh_tx_dual_pol.gds b/gplugins/web/gds_files/coh_tx_dual_pol.gds new file mode 100644 index 00000000..005ee4ef Binary files /dev/null and b/gplugins/web/gds_files/coh_tx_dual_pol.gds differ diff --git a/gplugins/web/gds_files/crossing_arm.gds b/gplugins/web/gds_files/crossing_arm.gds new file mode 100644 index 00000000..3ba4be79 Binary files /dev/null and b/gplugins/web/gds_files/crossing_arm.gds differ diff --git a/gplugins/web/gds_files/dbr_cavity.gds b/gplugins/web/gds_files/dbr_cavity.gds new file mode 100644 index 00000000..a386a12e Binary files /dev/null and b/gplugins/web/gds_files/dbr_cavity.gds differ diff --git a/gplugins/web/gds_files/die_bbox_frame.gds b/gplugins/web/gds_files/die_bbox_frame.gds new file mode 100644 index 00000000..490f6571 Binary files /dev/null and b/gplugins/web/gds_files/die_bbox_frame.gds differ diff --git a/gplugins/web/gds_files/loss_deembedding_ch14_23.gds b/gplugins/web/gds_files/loss_deembedding_ch14_23.gds new file mode 100644 index 00000000..6566a5df Binary files /dev/null and b/gplugins/web/gds_files/loss_deembedding_ch14_23.gds differ diff --git a/gplugins/web/gds_files/pad_array_add_fiducials.gds b/gplugins/web/gds_files/pad_array_add_fiducials.gds new file mode 100644 index 00000000..d6e98f01 Binary files /dev/null and b/gplugins/web/gds_files/pad_array_add_fiducials.gds differ diff --git a/gplugins/web/gds_files/spiral_inner_io_add_gra_f2760628.gds b/gplugins/web/gds_files/spiral_inner_io_add_gra_f2760628.gds new file mode 100644 index 00000000..b0b67e46 Binary files /dev/null and b/gplugins/web/gds_files/spiral_inner_io_add_gra_f2760628.gds differ diff --git a/gplugins/web/gds_files/wg.gds b/gplugins/web/gds_files/wg.gds new file mode 100644 index 00000000..16321dab Binary files /dev/null and b/gplugins/web/gds_files/wg.gds differ diff --git a/gplugins/web/main.py b/gplugins/web/main.py new file mode 100644 index 00000000..953c6822 --- /dev/null +++ b/gplugins/web/main.py @@ -0,0 +1,285 @@ +import base64 +import importlib +import os +from pathlib import Path +import pathlib +from typing import Optional + +from glob import glob +import orjson + +from fastapi import Form, HTTPException +from fastapi import FastAPI, Request, status +from fastapi.responses import HTMLResponse, RedirectResponse +from fastapi.staticfiles import StaticFiles +from fastapi.templating import Jinja2Templates + +from loguru import logger +from starlette.routing import WebSocketRoute + +import gdsfactory as gf +from gplugins.web.middleware import ProxiedHeadersMiddleware +from gdsfactory.config import PATH, GDSDIR_TEMP, CONF +from gplugins.web.server import LayoutViewServerEndpoint, get_layout_view + +from gdsfactory.watch import FileWatcher +from gdsfactory.cell import Settings + +module_path = Path(__file__).parent.absolute() + +app = FastAPI( + routes=[WebSocketRoute("/view/{cell_name}/ws", endpoint=LayoutViewServerEndpoint)] +) +app.add_middleware(ProxiedHeadersMiddleware) +# app = FastAPI() +app.mount("/static", StaticFiles(directory=PATH.web / "static"), name="static") + +# gdsfiles = StaticFiles(directory=home_path) +# app.mount("/gds_files", gdsfiles, name="gds_files") +templates = Jinja2Templates(directory=PATH.web / "templates") + + +def load_pdk() -> gf.Pdk: + pdk = os.environ.get("PDK", "generic") + + if pdk == "generic": + active_pdk = gf.get_active_pdk() + else: + active_module = importlib.import_module(pdk) + active_pdk = active_module.PDK + active_pdk.activate() + return active_pdk + + +def get_url(request: Request) -> str: + port_mod = "" + if request.url.port is not None and len(str(request.url).split(".")) < 3: + port_mod = f":{str(request.url.port)}" + + hostname = request.url.hostname + + if "github" in hostname: + port_mod = "" + + url = str( + request.url.scheme + + "://" + + (hostname or "localhost") + + port_mod + + request.url.path + ) + return url + + +@app.get("/", response_class=HTMLResponse) +async def root(request: Request): + return templates.TemplateResponse("index.html.j2", {"request": request}) + + +@app.get("/gds_list", response_class=HTMLResponse) +async def gds_list(request: Request): + """List all saved GDS files.""" + files_root = GDSDIR_TEMP + paths_list = glob(str(files_root / "*.gds")) + files_list = sorted(Path(gdsfile).stem for gdsfile in paths_list) + files_metadata = [ + {"name": file_name, "url": f"view/{file_name}"} for file_name in files_list + ] + return templates.TemplateResponse( + "file_browser.html.j2", + { + "request": request, + "message": f"GDS files in {str(files_root)!r}", + "files_root": files_root, + "files_metadata": files_metadata, + }, + ) + + +@app.get("/gds_current", response_class=HTMLResponse) +async def gds_current(request: Request) -> RedirectResponse: + """List all saved GDS files.""" + if CONF.last_saved_files: + return RedirectResponse(f"/view/{CONF.last_saved_files[-1].stem}") + else: + return RedirectResponse( + "/", + status_code=status.HTTP_302_FOUND, + ) + + +@app.get("/pdk", response_class=HTMLResponse) +async def pdk(request: Request): + if "preview.app.github" in str(request.url): + return RedirectResponse(str(request.url).replace(".preview", "")) + active_pdk = load_pdk() + pdk_name = active_pdk.name + components = list(active_pdk.cells.keys()) + return templates.TemplateResponse( + "pdk.html.j2", + { + "request": request, + "title": "Main", + "pdk_name": pdk_name, + "components": sorted(components), + }, + ) + + +LOADED_COMPONENTS = {} + + +@app.get("/view/{cell_name}", response_class=HTMLResponse) +async def view_cell(request: Request, cell_name: str, variant: Optional[str] = None): + gds_files = GDSDIR_TEMP.glob("*.gds") + gds_names = [gdspath.stem for gdspath in gds_files] + + if "preview.app.github" in str(request.url): + return RedirectResponse(str(request.url).replace(".preview", "")) + + if variant in LOADED_COMPONENTS: + component = LOADED_COMPONENTS[variant] + else: + try: + component = gf.get_component(cell_name) + except Exception as e: + if cell_name not in gds_names: + raise HTTPException( + status_code=400, detail=f"Component not found. {e}" + ) from e + + gdspath = GDSDIR_TEMP / cell_name + component = gf.import_gds(gdspath=gdspath.with_suffix(".gds")) + component.settings = Settings(name=component.name) + layout_view = get_layout_view(component) + pixel_data = layout_view.get_pixels_with_options(800, 400).to_png_data() + b64_data = base64.b64encode(pixel_data).decode("utf-8") + return templates.TemplateResponse( + "viewer.html.j2", + { + "request": request, + "cell_name": cell_name, + "variant": variant, + "title": "Viewer", + "initial_view": b64_data, + "component": component, + "url": get_url(request), + }, + ) + + +def _parse_value(value: str): + if not value.startswith("{") and not value.startswith("["): + return value + try: + return orjson.loads(value.replace("'", '"')) + except orjson.JSONDecodeError as e: + raise ValueError(f"Unable to decode parameter value, {value}: {e.msg}") from e + + +@app.post("/update/{cell_name}") +async def update_cell(request: Request, cell_name: str): + """Cell name is the name of the PCell function.""" + data = await request.form() + settings = {k: _parse_value(v) for k, v in data.items() if v != ""} + if not settings: + return RedirectResponse( + f"/view/{cell_name}", + status_code=status.HTTP_302_FOUND, + ) + component = gf.get_component({"component": cell_name, "settings": settings}) + variant = component.name + + LOADED_COMPONENTS[component.name] = component + return RedirectResponse( + f"/view/{cell_name}?variant={variant}", + status_code=status.HTTP_302_FOUND, + ) + + +@app.post("/search", response_class=RedirectResponse) +async def search(name: str = Form(...)): + logger.info(f"Searching for {name}...") + try: + gf.get_component(name) + except ValueError: + return RedirectResponse("/", status_code=status.HTTP_404_NOT_FOUND) + logger.info(f"Successfully found {name}! Redirecting...") + return RedirectResponse(f"/view/{name}", status_code=status.HTTP_302_FOUND) + + +######################### +# filewatcher +####################### + +watched_folder = None +watcher = None +output = "" +component = None + + +@app.get("/filewatcher", response_class=HTMLResponse) +async def filewatcher(request: Request): + global component + + if CONF.last_saved_files: + component = gf.import_gds(gf.CONF.last_saved_files[-1]) + component.settings = Settings(name=component.name) + else: + component = gf.components.straight() + + layout_view = get_layout_view(component) + pixel_data = layout_view.get_pixels_with_options(800, 400).to_png_data() + b64_data = base64.b64encode(pixel_data).decode("utf-8") + + return templates.TemplateResponse( + "filewatcher.html.j2", + { + "request": request, + "output": output, + "cell_name": str(component.name), + "variant": None, + "title": "Viewer", + "initial_view": b64_data, + "component": component, + "url": get_url(request), + }, + ) + + +@app.post("/filewatcher_start") +async def watch_folder(request: Request, folder_path: str = Form(...)): + global component + global output + global watched_folder + global watcher + + if folder_path is None or not folder_path.strip(): + raise HTTPException(status_code=400, detail="Folder path is required.") + if not os.path.exists(folder_path) or not os.path.isdir(folder_path): + raise HTTPException(status_code=400, detail="Folder does not exist.") + + watched_folder = folder_path + watched_folder = pathlib.Path(folder_path) + watcher = FileWatcher(path=folder_path) + watcher.start() + output += f"watching {watched_folder}\n" + return RedirectResponse( + "/filewatcher", + status_code=status.HTTP_302_FOUND, + ) + + +@app.get("/filewatcher_stop") +def stop_watcher(request: Request) -> str: + """Stops filewacher.""" + global watcher + global watched_folder + global output + + if watcher: + watcher.stop() + + message = f"stopped watching {watched_folder}\n" + output += message + return message diff --git a/gplugins/web/middleware.py b/gplugins/web/middleware.py new file mode 100644 index 00000000..8a1d7870 --- /dev/null +++ b/gplugins/web/middleware.py @@ -0,0 +1,59 @@ +from typing import List, Tuple + +from starlette.types import ASGIApp, Receive, Scope, Send + +Headers = List[Tuple[bytes, bytes]] + +# original developed by researchers at university of cambridge: +# https://pypi.org/project/fastapi-proxiedheadersmiddleware/ + +# modification to resolve Connection: keep-alive vs. upgrade conflict +# conflict described here: https://github.com/orgs/community/discussions/57596 +# doesn't seem to work as intended, as the upgrade decision is made +# before the request goes through the middleware + + +class ProxiedHeadersMiddleware: + """ + A middleware that modifies the request to ensure that FastAPI uses the + X-Forwarded-* headers when creating URLs used to reference this application. + + We are very permissive in allowing all X-Forwarded-* headers to be used, as + we know that this API will be published behind the API Gateway, and is + therefore not prone to redirect hijacking. + + """ + + def __init__(self, app: ASGIApp): + self.app = app + + async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None: + scope["headers"] = self.remap_headers(scope.get("headers", {})) + + await self.app(scope, receive, send) + return + + def remap_headers(self, source: Headers) -> Headers: + """ + Map X-Forwarded-Host to host and X-Forwarded-Prefix to prefix. + + """ + upgrade = len( + [q for p, q in source if "connection" in str(p) and "upgrade" in str(q)] + ) + + source = dict(source) + if upgrade: # resolve conflict with priority of upgrade + source[b"connection"] = b"upgrade" + + if b"x-forwarded-host" in source: + source.update({b"host": source[b"x-forwarded-host"]}) + source.pop(b"x-forwarded-host") + + if b"x-forwarded-prefix" in source: + source.update({b"host": source[b"host"] + source[b"x-forwarded-prefix"]}) + source.pop(b"x-forwarded-prefix") + + source = [(k, v) for k, v in source.items()] + + return source diff --git a/gplugins/web/server.py b/gplugins/web/server.py new file mode 100755 index 00000000..5f68c024 --- /dev/null +++ b/gplugins/web/server.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python3 +# type: ignore + +import asyncio +import json +from typing import Optional + +import klayout.db as db +import klayout.lay as lay +from fastapi import WebSocket +from loguru import logger +from starlette.endpoints import WebSocketEndpoint + +import gdsfactory as gf +from gdsfactory.component import GDSDIR_TEMP + +host = "localhost" +port = 8765 + + +class LayoutViewServerEndpoint(WebSocketEndpoint): + encoding = "text" + + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + logger.info("Initialized websocket") + _params = self.scope["query_string"].decode("utf-8") + _params_splitted = _params.split("&") + params = {} + for _param in _params_splitted: + key, value = _param.split("=") + params[key] = value + + # print("args:", args) + # print("kwargs:", kwargs) + # self.url = params["gds_file"].replace('/', '\\') + # self.layer_props = params.get("layer_props", None) + lyp_path = GDSDIR_TEMP / "layer_props.lyp" + gf.get_active_pdk().layer_views.to_lyp(lyp_path) + self.layer_props = lyp_path + # path_params = args[0]['path_params'] + # cell_name = path_params["cell_name"] + cell_name = params["variant"] + # c = gf.get_component(cell_name) + gds_path = GDSDIR_TEMP / f"{cell_name}.gds" + # c.write_gds(gds_path) + self.url = self.gds_path = str(gds_path) + + async def on_connect(self, websocket) -> None: + await websocket.accept() + await self.connection(websocket) + + async def on_receive(self, websocket, data) -> None: + await self.reader(websocket, data) + + async def on_disconnect(self, websocket, close_code) -> None: + pass + + async def send_image(self, websocket, data) -> None: + await websocket.send_text(data) + + def image_updated(self, websocket) -> None: + pixel_buffer = self.layout_view.get_screenshot_pixels() + asyncio.create_task(self.send_image(websocket, pixel_buffer.to_png_data())) + + def mode_dump(self): + return self.layout_view.mode_names() + + def annotation_dump(self): + return [d[1] for d in self.layout_view.annotation_templates()] + + def layer_dump(self): + js = [] + for layer in self.layout_view.each_layer(): + js.append( + { + "dp": layer.eff_dither_pattern(), + "ls": layer.eff_line_style(), + "c": layer.eff_fill_color(), + "fc": layer.eff_frame_color(), + "m": layer.marked, + "s": layer.source, + "t": layer.transparent, + "va": layer.valid, + "v": layer.visible, + "w": layer.width, + "x": layer.xfill, + "name": layer.name, + "id": layer.id(), + } + ) + return js + + async def connection( + self, websocket: WebSocket, path: Optional[str] = None + ) -> None: + self.layout_view = lay.LayoutView() + url = self.url + self.layout_view.load_layout(url) + if self.layer_props is not None: + self.layout_view.load_layer_props(str(self.layer_props)) + self.layout_view.max_hier() + + await websocket.send_text( + json.dumps( + { + "msg": "loaded", + "modes": self.mode_dump(), + "annotations": self.annotation_dump(), + "layers": self.layer_dump(), + } + ) + ) + + asyncio.create_task(self.timer(websocket)) + + async def timer(self, websocket) -> None: + self.layout_view.on_image_updated_event = lambda: self.image_updated(websocket) + while True: + self.layout_view.timer() + await asyncio.sleep(0.01) + + def buttons_from_js(self, js): + buttons = 0 + k = js["k"] + b = js["b"] + if (k & 1) != 0: + buttons |= lay.ButtonState.ShiftKey + if (k & 2) != 0: + buttons |= lay.ButtonState.ControlKey + if (k & 4) != 0: + buttons |= lay.ButtonState.AltKey + if (b & 1) != 0: + buttons |= lay.ButtonState.LeftButton + if (b & 2) != 0: + buttons |= lay.ButtonState.RightButton + if (b & 4) != 0: + buttons |= lay.ButtonState.MidButton + return buttons + + def wheel_event(self, function, js) -> None: + delta = 0 + dx = js["dx"] + dy = js["dy"] + if dx != 0: + delta = -dx + horizontal = True + elif dy != 0: + delta = -dy + horizontal = False + if delta != 0: + function( + delta, horizontal, db.Point(js["x"], js["y"]), self.buttons_from_js(js) + ) + + def mouse_event(self, function, js) -> None: + function(db.Point(js["x"], js["y"]), self.buttons_from_js(js)) + + async def reader(self, websocket, data: str) -> None: + js = json.loads(data) + msg = js["msg"] + if msg == "clear-annotations": + self.layout_view.clear_annotations() + elif msg == "initialize": + self.layout_view.resize(js["width"], js["height"]) + await websocket.send_text(json.dumps({"msg": "initialized"})) + elif msg == "layer-v": + layer_id = js["id"] + vis = js["value"] + for layer in self.layout_view.each_layer(): + if layer.id() == layer_id: + layer.visible = vis + elif msg == "layer-v-all": + vis = js["value"] + for layer in self.layout_view.each_layer(): + layer.visible = vis + elif msg == "mode_select": + self.layout_view.switch_mode(js["mode"]) + elif msg == "mouse_dblclick": + self.mouse_event(self.layout_view.send_mouse_double_clicked_event, js) + elif msg == "mouse_enter": + self.layout_view.send_enter_event() + elif msg == "mouse_leave": + self.layout_view.send_leave_event() + elif msg == "mouse_move": + self.mouse_event(self.layout_view.send_mouse_move_event, js) + elif msg == "mouse_pressed": + self.mouse_event(self.layout_view.send_mouse_press_event, js) + elif msg == "mouse_released": + self.mouse_event(self.layout_view.send_mouse_release_event, js) + elif msg == "quit": + return + elif msg == "resize": + self.layout_view.resize(js["width"], js["height"]) + elif msg == "select-mode": + mode = js["value"] + self.layout_view.switch_mode(mode) + elif msg == "select-ruler": + ruler = js["value"] + self.layout_view.set_config("current-ruler-template", str(ruler)) + elif msg == "wheel": + self.wheel_event(self.layout_view.send_wheel_event, js) + + +def get_layer_properties() -> str: + lyp_path = GDSDIR_TEMP / "layers.lyp" + lyp_path = gf.get_active_pdk().layer_views.to_lyp(lyp_path) + return str(lyp_path) + + +def get_layout_view(component: gf.Component) -> lay.LayoutView: + """Returns klayout layout view for a gdsfactory Component.""" + gds_path = GDSDIR_TEMP / f"{component.name}.gds" + component.write_gds(gdspath=str(gds_path)) + layout_view = lay.LayoutView() + layout_view.load_layout(str(gds_path)) + lyp_path = get_layer_properties() + layout_view.load_layer_props(str(lyp_path)) + layout_view.max_hier() + return layout_view diff --git a/gplugins/web/server_jupyter.py b/gplugins/web/server_jupyter.py new file mode 100755 index 00000000..48346213 --- /dev/null +++ b/gplugins/web/server_jupyter.py @@ -0,0 +1,28 @@ +import asyncio + +import uvicorn + +from gplugins.web.main import app + +global jupyter_server +jupyter_server = None + + +def _run(port: int = 8000) -> None: + global jupyter_server + + config = uvicorn.Config(app, port=port) + jupyter_server = uvicorn.Server(config) + loop = asyncio.get_event_loop() + loop.create_task(jupyter_server.serve()) + + +def _server_is_running() -> bool: + global jupyter_server + return False if jupyter_server is None else jupyter_server.started + + +def start(port: int = 8000) -> None: + """Start a jupyter_server if it's not already started.""" + if not _server_is_running(): + _run(port=port) diff --git a/gplugins/web/static/client.css b/gplugins/web/static/client.css new file mode 100644 index 00000000..a59ab40a --- /dev/null +++ b/gplugins/web/static/client.css @@ -0,0 +1,97 @@ + +body { + font-family: Helvetica, Arial; + font-size: 12pt; +} + +#layout_canvas { + width: 100%; + height: 100%; +} + +input, select { + background-color: white; + padding: 4pt; + font-size: 12pt; +} + +.checked { + background-color: #303030; + color: white; +} + +.unchecked { + background-color: white; +} + +div .menu-left-frame { + position: absolute; + left: 4pt; + top: 4pt; +} + +div .menu-right-frame { + position: absolute; + right: 4pt; + top: 4pt; +} + +.viewer-panel { + overflow: auto; + display: table; + border-spacing: 0.5rem; + padding: 0; +} + +.viewer-panel-sub { + display: table-row; +} + +.viewer-frame-cell { + position: relative; + display: table-cell; + vertical-align: top; + border: 1px solid #000000; + overflow: hidden; +} + +.viewer-frame { + width: 800px; + height: 500px; + resize: both; + overflow: hidden; + padding: 10px; + padding-top: 40pt; +} + +.viewer-layers-cell { + display: table-cell; + vertical-align: top; + border: 1px solid #000000; +} + +.viewer-layers { + min-width: 20%; + white-space: nowrap; + overflow-y: scroll; + padding: 0.5rem; + height: 500px; + padding: 10px; +} + +.layer-name-cell { + padding-left: 0.5rem; +} + +.layer-visible-cell { + padding-right: 0.5rem; +} + +a { + padding: 5px; + margin: 10px; + display: inline-block; + border: solid; + border-width: 2px; + color: red; +} diff --git a/gplugins/web/static/client.js b/gplugins/web/static/client.js new file mode 100644 index 00000000..929b918d --- /dev/null +++ b/gplugins/web/static/client.js @@ -0,0 +1,367 @@ + +// TODO: shouldn't be explicit here .. +// let url = 'ws://localhost:8765/ws'; + +let ws_url = current_url.replace("http://", "ws://"); +ws_url = ws_url.replace("https://", "wss://"); +ws_url += '/ws?' + "variant=" + cell_variant; +let url = ws_url; +console.log(url); + + +var canvas = document.getElementById("layout_canvas"); +var context = canvas.getContext("2d"); + +var message = document.getElementById("message"); + +// HTML5 does not have a resize event, so we need to poll +// to generate events for the canvas resize + +var lastCanvasWidth = 0; +var lastCanvasHeight = 0; + +setInterval(function () { + + var view = document.getElementById('layout-view'); + var w = view.clientWidth; + var h = view.clientHeight; + + if (lastCanvasWidth !== w || lastCanvasHeight !== h) { + + // this avoids flicker: + + var tempCanvas = document.createElement('canvas'); + tempCanvas.width = canvas.width; + tempCanvas.height = canvas.height; + var tempContext = tempCanvas.getContext("2d"); + tempContext.drawImage(context.canvas, 0, 0); + + lastCanvasWidth = w; + lastCanvasHeight = h; + canvas.width = canvas.clientWidth; + canvas.height = canvas.clientHeight; + + context.drawImage(tempContext.canvas, 0, 0); + if (socket.connected) { + socket.send(JSON.stringify({ msg: "resize", width: canvas.width, height: canvas.height })); + } + else { + console.log("Socket is not connected. Loading static image data...") + const img = new Image(); + img.src = initial_image_data; + img.onload = () => { + context.drawImage(img, 0, 0, img.width, img.height, // source rectangle + 0, 0, canvas.width, canvas.height); // destination rectangle + } + + // resizes the layer list: + + let layers = document.getElementById("layers"); + + var padding = 10; // padding in pixels + layers.style.height = (h - 2 * padding) + "px"; + + }; + } + +}, 10) + +let socket = new WebSocket(url); +socket.binaryType = "blob"; +var initialized = false; + +// Installs a handler called when the connection is established +socket.onopen = function (evt) { + + var ev = { msg: "initialize", width: canvas.width, height: canvas.height }; + socket.send(JSON.stringify(ev)); + + // Prevents the context menu to show up over the canvas area + canvas.addEventListener('contextmenu', function (evt) { + evt.preventDefault(); + }); + + canvas.addEventListener('mousemove', function (evt) { + sendMouseEvent(canvas, "mouse_move", evt); + evt.preventDefault(); + }, false); + + canvas.addEventListener('click', function (evt) { + sendMouseEvent(canvas, "mouse_click", evt); + evt.preventDefault(); + }, false); + + canvas.addEventListener('dblclick', function (evt) { + sendMouseEvent(canvas, "mouse_dblclick", evt); + evt.preventDefault(); + }, false); + + canvas.addEventListener('mousedown', function (evt) { + sendMouseEvent(canvas, "mouse_pressed", evt); + evt.preventDefault(); + }, false); + + canvas.addEventListener('mouseup', function (evt) { + sendMouseEvent(canvas, "mouse_released", evt); + evt.preventDefault(); + }, false); + + canvas.addEventListener('mouseenter', function (evt) { + sendMouseEvent(canvas, "mouse_enter", evt); + evt.preventDefault(); + }, false); + + canvas.addEventListener('mouseout', function (evt) { + sendMouseEvent(canvas, "mouse_leave", evt); + evt.preventDefault(); + }, false); + + canvas.addEventListener('wheel', function (evt) { + sendWheelEvent(canvas, "wheel", evt); + evt.preventDefault(); + }, false); +} + +// Installs a handler for the messages delivered by the web socket +socket.onmessage = function (evt) { + + let data = evt.data; + if (typeof (data) === "string") { + + // For debugging: + // message.textContent = data; + + // incoming messages are JSON objects + js = JSON.parse(data); + if (js.msg == "initialized") { + initialized = true; + } else if (js.msg == "loaded") { + showLayers(js.layers); + showMenu(js.modes, js.annotations); + } + + } else if (initialized) { + + // incoming blob messages are paint events + createImageBitmap(data).then(function (image) { + context.drawImage(image, 0, 0) + }); + + } + +}; + +socket.onclose = evt => console.log(`Closed ${evt.code}`); + +function mouseEventToJSON(canvas, type, evt) { + + var rect = canvas.getBoundingClientRect(); + var x = evt.clientX - rect.left; + var y = evt.clientY - rect.top; + var keys = 0; + if (evt.shiftKey) { + keys += 1; + } + if (evt.ctrlKey) { + keys += 2; + } + if (evt.altKey) { + keys += 4; + } + return { msg: type, x: x, y: y, b: evt.buttons, k: keys }; + +} + +function sendMouseEvent(canvas, type, evt) { + + if (socket.readyState == 1 /*OPEN*/) { + var ev = mouseEventToJSON(canvas, type, evt); + socket.send(JSON.stringify(ev)); + } + +} + +function sendWheelEvent(canvas, type, evt) { + + if (socket.readyState == 1 /*OPEN*/) { + var ev = mouseEventToJSON(canvas, type, evt); + ev.dx = evt.deltaX; + ev.dy = evt.deltaY; + ev.dm = evt.deltaMode; + socket.send(JSON.stringify(ev)); + } + +} + + + +// Updates the layer list +function showMenu(modes, annotations) { + + var modeElement = document.getElementById("modes"); + modeElement.childNodes = new Array(); + + var modeTable = document.createElement("table"); + modeTable.className = "modes-table"; + modeElement.appendChild(modeTable) + + var modeRow = document.createElement("tr"); + modeRow.className = "mode-row-header"; + modeRow.id = "mode-row"; + modeTable.appendChild(modeRow) + + var cell; + var inner; + + modes.forEach(function (m) { + + cell = document.createElement("td"); + cell.className = "mode-cell"; + + var inner = document.createElement("input"); + inner.value = m; + inner.type = "button"; + inner.className = "unchecked"; + inner.onclick = function () { + var modeRow = document.getElementById("mode-row"); + modeRow.childNodes.forEach(function (e) { + e.firstChild.className = "unchecked"; + }); + inner.className = "checked"; + socket.send(JSON.stringify({ msg: "select-mode", value: m })); + }; + + cell.appendChild(inner); + modeRow.appendChild(cell); + + }); + + var menuElement = document.getElementById("menu"); + + var menuTable = document.createElement("table"); + menuTable.className = "menu-table"; + menuElement.appendChild(menuTable) + + var menuRow = document.createElement("tr"); + menuRow.className = "menu-row-header"; + menuTable.appendChild(menuRow) + + cell = document.createElement("td"); + cell.className = "menu-cell"; + menuRow.appendChild(cell); + + var rulersSelect = document.createElement("select"); + rulersSelect.onchange = function () { + socket.send(JSON.stringify({ msg: "select-ruler", value: rulersSelect.selectedIndex })); + }; + cell.appendChild(rulersSelect); + + cell = document.createElement("td"); + cell.className = "menu-cell"; + menuRow.appendChild(cell); + + var clearRulers = document.createElement("input"); + clearRulers.value = "Clear Rulers"; + clearRulers.type = "button"; + clearRulers.onclick = function () { + socket.send(JSON.stringify({ msg: "clear-annotations" })); + }; + cell.appendChild(clearRulers); + + var index = 0; + + annotations.forEach(function (a) { + + var option = document.createElement("option"); + option.value = index; + option.text = a; + + rulersSelect.appendChild(option); + + index += 1; + + }); +} + +// Updates the layer list +function showLayers(layers) { + + var layerElement = document.getElementById("layers"); + layerElement.childNodes = new Array(); + + var layerTable = document.createElement("table"); + layerTable.className = "layer-table"; + layerElement.appendChild(layerTable) + + var cell; + var inner; + var s; + var visibilityCheckboxes = []; + + var layerRow = document.createElement("tr"); + layerRow.className = "layer-row-header"; + + // create a top level entry for resetting/setting all visible flags + + cell = document.createElement("td"); + cell.className = "layer-visible-cell"; + + inner = document.createElement("input"); + inner.type = "checkbox"; + inner.checked = true; + inner.onclick = function () { + var checked = this.checked; + visibilityCheckboxes.forEach(function (cb) { + cb.checked = checked; + }); + socket.send(JSON.stringify({ msg: "layer-v-all", value: checked })); + }; + cell.appendChild(inner); + + layerRow.appendChild(cell); + layerTable.appendChild(layerRow); + + // create table rows for each layer + + layers.forEach(function (l) { + + var layerRow = document.createElement("tr"); + layerRow.className = "layer-row"; + + cell = document.createElement("td"); + cell.className = "layer-visible-cell"; + + inner = document.createElement("input"); + visibilityCheckboxes.push(inner); + inner.type = "checkbox"; + inner.checked = l.v; + inner.onclick = function () { + socket.send(JSON.stringify({ msg: "layer-v", id: l.id, value: this.checked })); + }; + cell.appendChild(inner); + + layerRow.appendChild(cell); + + cell = document.createElement("td"); + cell.className = "layer-color-cell"; + s = "border-style: solid; border-width: " + (l.w < 0 ? 1 : l.w) + "px; border-color: #" + (l.fc & 0xffffff).toString(16) + ";"; + cell.style = s; + layerRow.appendChild(cell); + + inner = document.createElement("div"); + s = "width: 2rem; height: 1em;"; + s += "margin: 1px;"; + s += "background: #" + (l.c & 0xffffff).toString(16) + ";"; + inner.style = s; + cell.appendChild(inner); + + cell = document.createElement("td"); + cell.className = "layer-name-cell"; + cell.textContent = (l.name != 0 ? l.name : l.s); + layerRow.appendChild(cell); + + layerTable.appendChild(layerRow); + + }); + +} diff --git a/gplugins/web/templates/client.html.j2 b/gplugins/web/templates/client.html.j2 new file mode 100644 index 00000000..4ad10d6e --- /dev/null +++ b/gplugins/web/templates/client.html.j2 @@ -0,0 +1,32 @@ + + + + + + + + +
+
+
+ + +
+ +
+
+
+
+
+
+
+ +
+
+ + + + + + + diff --git a/gplugins/web/templates/file_browser.html.j2 b/gplugins/web/templates/file_browser.html.j2 new file mode 100644 index 00000000..3bf1757b --- /dev/null +++ b/gplugins/web/templates/file_browser.html.j2 @@ -0,0 +1,27 @@ +{% include 'header.html.j2' %} +{% include 'navbar.html.j2' %} + + + KWeb GDS File Browser + + + + +
+

+{{ message }} +

+
+ +
+

Files available:

+ +

File root: {{ files_root }}

+
+ + + diff --git a/gplugins/web/templates/filewatcher.html.j2 b/gplugins/web/templates/filewatcher.html.j2 new file mode 100644 index 00000000..a4c699dd --- /dev/null +++ b/gplugins/web/templates/filewatcher.html.j2 @@ -0,0 +1,66 @@ +{% include 'header.html.j2' %} +{% include 'navbar.html.j2' %} + + + Folder Watcher + + + +

Folder to watch for changing files

+
+ + + +
+ +
+
{{ output }}
+
+ + +
+

{{ cell_name }}

+ {% if variant %} +

({{ variant }})

+ {% endif %} + +
+
+
+ + +
+ +
+
+
+
+
+
+
+
+ +
+
+ + + + + +{% include 'footer.html.j2' %} diff --git a/gplugins/web/templates/footer.html.j2 b/gplugins/web/templates/footer.html.j2 new file mode 100644 index 00000000..fc1b9604 --- /dev/null +++ b/gplugins/web/templates/footer.html.j2 @@ -0,0 +1,3 @@ + + + diff --git a/gplugins/web/templates/gds_history.html.j2 b/gplugins/web/templates/gds_history.html.j2 new file mode 100644 index 00000000..6b3731aa --- /dev/null +++ b/gplugins/web/templates/gds_history.html.j2 @@ -0,0 +1,10 @@ +{% include 'header.html.j2' %} +{% include 'navbar.html.j2' %} +

gdspaths

+

+{% for component in components %} +{{ component }} +{% endfor %} +

+ +{% include 'footer.html.j2' %} diff --git a/gplugins/web/templates/header.html.j2 b/gplugins/web/templates/header.html.j2 new file mode 100644 index 00000000..7a0a010f --- /dev/null +++ b/gplugins/web/templates/header.html.j2 @@ -0,0 +1,11 @@ + + + + + + + + + Gdsfactory - {{ title | default("Page") }} + + diff --git a/gplugins/web/templates/index.html.j2 b/gplugins/web/templates/index.html.j2 new file mode 100644 index 00000000..d2f34f29 --- /dev/null +++ b/gplugins/web/templates/index.html.j2 @@ -0,0 +1,14 @@ +{% include 'header.html.j2' %} +{% include 'navbar.html.j2' %} + + + Homepage + + +

gdsfactory webapp

+

Pdk explorer

+

Last Component.show()

+

Component.show() list

+

filewatcher

+ + diff --git a/gplugins/web/templates/navbar.html.j2 b/gplugins/web/templates/navbar.html.j2 new file mode 100644 index 00000000..1bdedc42 --- /dev/null +++ b/gplugins/web/templates/navbar.html.j2 @@ -0,0 +1,19 @@ + diff --git a/gplugins/web/templates/pdk.html.j2 b/gplugins/web/templates/pdk.html.j2 new file mode 100644 index 00000000..938ccc7d --- /dev/null +++ b/gplugins/web/templates/pdk.html.j2 @@ -0,0 +1,10 @@ +{% include 'header.html.j2' %} +{% include 'navbar.html.j2' %} +

{{ pdk_name }} PDK explorer

+

+{% for component in components %} +

  • {{ component }}
  • +{% endfor %} +

    + +{% include 'footer.html.j2' %} diff --git a/gplugins/web/templates/viewer.html.j2 b/gplugins/web/templates/viewer.html.j2 new file mode 100644 index 00000000..645e637c --- /dev/null +++ b/gplugins/web/templates/viewer.html.j2 @@ -0,0 +1,44 @@ +{% include 'header.html.j2' %} +{% include 'navbar.html.j2' %} +
    +

    {{ cell_name }}

    + {% if variant %} +

    ({{ variant }})

    + {% endif %} + +
    +
    +
    + + +
    + +
    +
    +
    +
    +
    +
    +
    +

    Parameters

    +
    + + {% for setting_name, setting_value in component.settings.default.items() %} +
    + + {% if setting_name in component.settings.changed %} + + {% else %} + + {% endif %} +
    + {% endfor %} +
    +
    + + +{% include 'footer.html.j2' %} diff --git a/gplugins/widget/__init__.py b/gplugins/widget/__init__.py new file mode 100644 index 00000000..60c7eb45 --- /dev/null +++ b/gplugins/widget/__init__.py @@ -0,0 +1 @@ +"""Jupyter widget to view the layout using klayout python API in jupyter notebooks.""" diff --git a/gplugins/widget/interactive.py b/gplugins/widget/interactive.py new file mode 100644 index 00000000..743fb6ad --- /dev/null +++ b/gplugins/widget/interactive.py @@ -0,0 +1,439 @@ +"""Jupyter widget to view the layout using klayout python API in jupyter notebooks.""" + +try: + from pathlib import Path + from typing import Any + + from ipyevents import Event # type: ignore[import] + from IPython.display import Image as IPImage # type: ignore[import] + from IPython.display import display + from ipytree import Node, Tree # type: ignore[import] + from ipywidgets import ( # type: ignore[import] + Accordion, + AppLayout, + Box, + Button, + HBox, + Image, + Label, + Layout, + Output, + RadioButtons, + Tab, + ToggleButtons, + VBox, + ) + + from gdsfactory.config import CONF + from kfactory.kcell import KCell + from kfactory import kdb, lay + + +except ImportError as e: + print( + "You need install jupyter notebook plugin with `pip install gdsfactory[kfactory]`" + ) + raise e + + +def display_kcell(kc: KCell) -> None: + cell_dup = kc.kcl.dup()[kc.name] + cell_dup.draw_ports() + + match CONF.plotter: + case "widget": + lw = LayoutWidget(cell=cell_dup) + display(lw.widget) + case "image": + lipi = LayoutIPImage(cell=cell_dup) + display(lipi.image) + + +class LayoutImage: + def __init__( + self, + cell: KCell, + layer_properties: str | None = None, + ): + self.layout_view = lay.LayoutView() + self.layout_view.show_layout(cell.kcl, False) + self.layer_properties: Path | None = None + if layer_properties is not None: + self.layer_properties = Path(layer_properties) + if self.layer_properties.exists() and self.layer_properties.is_file(): + self.layer_properties = self.layer_properties + self.layout_view.load_layer_props(str(self.layer_properties)) + self.show_cell(cell._kdb_cell) + png_data = self.layout_view.get_screenshot_pixels().to_png_data() + + self.image = Image(value=png_data, format="png") + + def show_cell(self, cell: kdb.Cell) -> None: + self.layout_view.active_cellview().cell = cell + self.layout_view.max_hier() + self.layout_view.resize(800, 600) + self.layout_view.add_missing_layers() + self.layout_view.zoom_fit() + + +class LayoutIPImage: + def __init__( + self, + cell: KCell, + layer_properties: str | None = None, + ): + self.layout_view = lay.LayoutView() + self.layout_view.show_layout(cell.kcl, False) + self.layer_properties: Path | None = None + if layer_properties is not None: + self.layer_properties = Path(layer_properties) + if self.layer_properties.exists() and self.layer_properties.is_file(): + self.layer_properties = self.layer_properties + self.layout_view.load_layer_props(str(self.layer_properties)) + self.show_cell(cell._kdb_cell) + png_data = self.layout_view.get_screenshot_pixels().to_png_data() + self.image = IPImage( + data=png_data, format="png", embed=True, width=800, height=600 + ) + + def show_cell(self, cell: kdb.Cell) -> None: + self.layout_view.active_cellview().cell = cell + self.layout_view.max_hier() + self.layout_view.resize(800, 600) + self.layout_view.add_missing_layers() + self.layout_view.zoom_fit() + + +class LayoutWidget: + def __init__( + self, + cell: KCell, + layer_properties: str | None = None, + hide_unused_layers: bool = True, + with_layer_selector: bool = True, + ): + self.debug = Output() + + self.hide_unused_layers = hide_unused_layers + + self.layout_view = lay.LayoutView() + self.layout_view.show_layout(cell.kcl, False) + self.layer_properties: Path | None = None + if layer_properties is not None: + self.layer_properties = Path(layer_properties) + if self.layer_properties.exists() and self.layer_properties.is_file(): + self.layer_properties = self.layer_properties + self.layout_view.load_layer_props(str(self.layer_properties)) + self.show_cell(cell._kdb_cell) + png_data = self.layout_view.get_screenshot_pixels().to_png_data() + + self.image = Image(value=png_data, format="png") + self.refresh() + scroll_event = Event(source=self.image, watched_events=["wheel"], wait=10) + scroll_event.on_dom_event(self.on_scroll) + + enter_event = Event(source=self.image, watched_events=["mouseenter"]) + leave_event = Event(source=self.image, watched_events=["mouseleave"]) + enter_event.on_dom_event(self.on_mouse_enter) + leave_event.on_dom_event(self.on_mouse_leave) + + self.layout_view.on_image_updated_event = self.refresh # type: ignore[attr-defined] + + mouse_event = Event( + source=self.image, + watched_events=[ + "mousedown", + "mouseup", + "mousemove", + "contextmenu", + ], + wait=10, + throttle_or_debounce="debounce", + prevent_default_action=True, + ) + mouse_event.on_dom_event(self.on_mouse) + + max_height = self.layout_view.viewport_height() + if with_layer_selector: + selector_tabs = self.build_selector(max_height=max_height) + else: + selector_tabs = None + + mode_selector = self.build_modes(max_height) + + def switch_mode(buttons: ToggleButtons) -> None: + self.layout_view.switch_mode(buttons.value) + self.refresh() + + self.widget = AppLayout( + left_sidebar=mode_selector, + center=self.image, + right_sidebar=selector_tabs, + connect_items="top", + justify_items="center", + footer=self.debug, + # footer=mode_selector, + pane_weights=[1, 3, 1], + ) + + def show_cell(self, cell: kdb.Cell) -> None: + self.layout_view.active_cellview().cell = cell + self.layout_view.max_hier() + self.layout_view.resize(800, 600) + self.layout_view.add_missing_layers() + self.layout_view.zoom_fit() + + def button_toggle(self, button: Button) -> None: + button.style.button_color = ( + "transparent" + if (button.style.button_color == button.default_color) + else button.default_color + ) + + CONF.logger.info("button toggle") + for props in self.layout_view.each_layer(): + if props == button.layer_props: + props.visible = not props.visible + props.name = button.name + button.layer_props = props + break + self.refresh() + + def build_layer_toggle(self, prop_iter: lay.LayerPropertiesIterator) -> HBox | None: + props = prop_iter.current() + layer_color = f"#{props.eff_fill_color():06x}" + # Would be nice to use LayoutView.icon_for_layer() rather than simple colored box + Layout( + width="5px", + height="20px", + border=f"solid 2px {layer_color}", + display="block", + ) + + # prop_iter.current().visible = False + + image = Image( + value=self.layout_view.icon_for_layer(prop_iter, 50, 25, 1).to_png_data(), + format="png", + width=50, + height=25, + ) + + image_event = Event(source=image, watched_events=["click"]) + _prop = prop_iter.dup() + + def on_layer_click(event: Event) -> None: + _prop.current().visible = not _prop.current().visible + self.layout_view.timer() # type: ignore[attr-defined] + image.value = self.layout_view.icon_for_layer( + _prop, 50, 25, 1 + ).to_png_data() + + self.refresh() + + image_event.on_dom_event(on_layer_click) + + if props.has_children(): + prop_iter = prop_iter.down_first_child() + n_children = prop_iter.num_siblings() + children = [] + for _i in range(n_children): + prop_iter = prop_iter.next() + _layer_toggle = self.build_layer_toggle(prop_iter) + if _layer_toggle is not None: + children.append(_layer_toggle) + + if not children: + return None + layer_label = Accordion([VBox(children)], titles=(props.name,)) + + else: + cell = self.layout_view.active_cellview().cell + if cell.bbox_per_layer(prop_iter.current().layer_index()).empty(): + return None + layer_label = ( + Label(props.name) + if props.name + else Label(f"{props.source_layer}/{props.source_datatype}") + ) + + return HBox([Box([image]), layer_label]) + + def build_modes(self, max_height: float) -> VBox: + def clear(event: Event) -> None: + self.layout_view.clear_annotations() + self.refresh() + + clear_button = Button(description="clear annotations") + clear_button.on_click(clear) + + def zoom_fit(even: Event) -> None: + self.layout_view.zoom_fit() + self.refresh() + + zoom_button = Button(description="zoom fit") + zoom_button.on_click(zoom_fit) + + mode_label = Label("mode selection:") + + modes = self.layout_view.mode_names() + tb = ToggleButtons(options=modes) + + def switch_mode(mode: dict[str, Any]) -> None: + self.layout_view.switch_mode(mode["new"]) + self.refresh() + + tb.observe(switch_mode, "value") + return VBox(children=(clear_button, zoom_button, mode_label, tb)) + + def build_cell_selector(self, cell: kdb.Cell) -> Node: + child_cells = [ + self.build_cell_selector( + self.layout_view.active_cellview().layout().cell(_cell) + ) + for _cell in cell.each_child_cell() + ] + + node = Node(cell.name, show_icon=False) + node.observe(self.on_select_cell, "selected") + + for cc in child_cells: + node.add_node(cc) + + return node + + def build_selector(self, max_height: float) -> Tab: + """Builds a widget for toggling layer displays. + + Args: + max_height: Maximum height to set for the widget (likely the height of the pixel buffer). + """ + all_boxes = [] + + prop_iter = self.layout_view.begin_layers() + while not prop_iter.at_end(): + if layer_toggle := self.build_layer_toggle(prop_iter): + all_boxes.append(layer_toggle) + prop_iter.next() + + layout = Layout( + max_height=f"{max_height}px", overflow_y="auto", display="block" + ) + selector = VBox(all_boxes, layout=layout) + + cells: list[RadioButtons | Accordion] = [ + self.build_cell_selector(cell) + for cell in self.layout_view.active_cellview().layout().top_cells() + ] + tree = Tree(cells, multiple_selection=False, stripes=True) + + # For when tabs are implemented + selector_tabs = Tab([selector, tree]) + selector_tabs.set_title(0, "Layers") + selector_tabs.set_title(1, "Cells") + # selector_tabs.titles = ("Layers",) + + return selector_tabs + + def load_layout(self, filepath: str, layer_properties: str | None) -> None: + """Loads a GDS layout. + + Args: + filepath: path for the GDS layout. + layer_properties: Optional path for the layer_properties klayout file (lyp). + """ + self.layout_view.load_layout(filepath) + self.layout_view.max_hier() + if layer_properties: + self.layout_view.load_layer_props(layer_properties) + + def refresh(self) -> None: + self.layout_view.timer() # type: ignore[attr-defined] + png_data = self.layout_view.get_screenshot_pixels().to_png_data() + self.image.value = png_data + self.layout_view.timer() # type: ignore[attr-defined] + + def _get_modifier_buttons(self, event: Event) -> int: + shift = event["shiftKey"] + alt = event["altKey"] + ctrl = event["ctrlKey"] + # meta = event["metaKey"] + + mouse_buttons = event["buttons"] + + buttons = 0 + if shift: + buttons |= lay.ButtonState.ShiftKey + if alt: + buttons |= lay.ButtonState.AltKey + if ctrl: + buttons |= lay.ButtonState.ControlKey + + if mouse_buttons & 1: + buttons |= lay.ButtonState.LeftButton + if mouse_buttons & 2: + buttons |= lay.ButtonState.RightButton + if mouse_buttons & 4: + buttons |= lay.ButtonState.MidButton + + return buttons + + def on_select_cell(self, event: Event) -> None: + self.show_cell( + self.layout_view.active_cellview().layout().cell(event["owner"].name) + ) + + self.layout_view.viewport_height() + + all_boxes = [] + prop_iter = self.layout_view.begin_layers() + while not prop_iter.at_end(): + if layer_toggle := self.build_layer_toggle(prop_iter): + all_boxes.append(layer_toggle) + prop_iter.next() + + tabs = self.widget.right_sidebar + vbox = tabs.children[0] + vbox.children = all_boxes + self.refresh() + + def on_scroll(self, event: Event) -> None: + self.layout_view.timer() # type: ignore[attr-defined] + delta = int(event["deltaY"]) + x = event["relativeX"] + y = event["relativeY"] + buttons = self._get_modifier_buttons(event) + self.layout_view.send_wheel_event(-delta, False, kdb.DPoint(x, y), buttons) + self.refresh() + + def on_mouse(self, event: Event) -> None: + self.layout_view.timer() # type: ignore[attr-defined] + x = event["relativeX"] + y = event["relativeY"] + buttons = self._get_modifier_buttons(event) + + match event["event"]: + case "mousedown": + self.layout_view.send_mouse_press_event( + kdb.DPoint(float(x), float(y)), buttons + ) + case "mouseup": + self.layout_view.send_mouse_release_event( + kdb.DPoint(float(x), float(y)), buttons + ) + case "mousemove": + self.layout_view.send_mouse_move_event( + kdb.DPoint(float(x), float(y)), buttons + ) + self.refresh() + self.layout_view.timer() # type: ignore[attr-defined] + + def on_mouse_enter(self, event: Event) -> None: + self.layout_view.timer() # type: ignore[attr-defined] + self.layout_view.send_enter_event() + self.refresh() + + def on_mouse_leave(self, event: Event) -> None: + self.layout_view.timer() # type: ignore[attr-defined] + self.layout_view.send_leave_event() + self.refresh() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..211d27f7 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,198 @@ +# https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html +[build-system] +requires = ["flit_core >=3.2,<4"] +build-backend = "flit_core.buildapi" + +[project] +name="gplugins" +description="gdsfactory plugins" +classifiers = [ + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", +] +version="0.0.1" +authors = [ + {name = "gdsfactory", email = "contact@gdsfactory.com"}, +] +keywords = ["python"] +license = {file = "LICENSE"} +dependencies = [ + "gdsfactory[cad]", +] +readme = "README.md" +requires-python = ">=3.10" + +[project.optional-dependencies] +dev = [ + "pre-commit", + "pytest", + "pytest-cov", + "pytest_regressions", + ] +docs = [ + "jupytext", + "autodoc_pydantic", + "matplotlib", + "jupyter-book==0.15.1", + "sphinx-autodoc-typehints", + "sphinx-click", + ] +database = [ + "sqlalchemy", + "sqlalchemy-utils", + "dagster", + "dagit", + "sqlmodel", + "boto3", + "pymysql" +] + +devsim = [ + "devsim", + "pyvista", + "tidy3d" +] +femwell = [ + "femwell==0.1.5" +] + +gmsh = [ + "gmsh", + "h5py", + "mapbox_earcut", + "meshio", + "pygmsh", + "pyvista", + "trimesh", + "shapely" +] +kfactory = [ + "kfactory[git,ipy]==0.7.5" +] +meow = [ + "meow-sim==0.7.1" +] +meshwell = [ + "meshwell==0.0.9" +] +ray = [ + "ray[tune,air]", + "hyperopt" +] +sax = [ + "sax==0.8.8", + "jaxlib", + "jax" +] +tidy3d = [ + "tidy3d==2.3.3" +] + + +[tool.setuptools.packages] +find = {} + +# [project.scripts] +# gplugins = "gplugins.cli:cli" + +[tool.black] +line-length = 88 +target-version = ['py310'] +include = '\.pyi?$' +exclude = ''' # Specify the files/dirs that should be ignored by the black formatter +/( + \.eggs + | \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | env + | _build + | buck-out + | build + | dist +)/ +''' + +[tool.pytest.ini_options] +testpaths = ["gplugins/", "tests"] +# addopts = --tb=no +addopts = '--tb=short' +python_files = ["gplugins/*.py", "notebooks/*.ipynb", "tests/*.py"] +norecursedirs = ["extra/*.py"] + +[tool.flake8] +max-line-length = 88 +max-complexity = 57 +select = ["B","C","E","F","W","T4","B9"] +ignore = [ "E501", "E503", "E722", "W503", "W503", "E203", "B950", "B305", "B018", "B902", "B020", "B905"] +extend-ignore = "RST303" + +exclude = [ + ".git", + "__pycache__", + "lib", + "docs/source/conf.py", + "build", + "dist", + ".ipynb_checkpoints", + ".tox", + "extra", + "deprecated", + ".mypy_cache", + "venv", + "devsim" + ] + +[tool.commitizen] +name = "cz_conventional_commits" +version = "0.1.0" +version_files = [ + "pyproject.toml:version", +] + +[tool.mypy] +python_version = "3.10" +strict = true + +[tool.pylsp-mypy] +enabled = true +live_mode = true +strict = true + +[tool.isort] +multi_line_output = 3 +line_length = 88 +include_trailing_comma = true +# skip = "gplugins/__init__.py" + +[tool.setuptools.package-data] +mypkg = ["*.csv", "*.yaml"] + +[tool.codespell] +ignore-words-list = "te, te/tm, te, ba, fpr, fpr_spacing, ro, nd, donot, schem" + +[tool.pydocstyle] +inherit = false +match = "(?!test).*\\.py" +add-ignore = ["D100","D101","D102","D103","D104","D203","D405","D417"] +convention = "google" + +[tool.ruff] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "C", # flake8-comprehensions + "B", # flake8-bugbear +] +ignore = [ + "E501", # line too long, handled by black + "B008", # do not perform function calls in argument defaults + "C901", # too complex + "B905", # `zip()` without an explicit `strict=` parameter + "C408", # C408 Unnecessary `dict` call (rewrite as a literal) +]