From c7ae708806a4b163f6416dfae384530e4427029e Mon Sep 17 00:00:00 2001 From: Nick Bryan Date: Mon, 28 Feb 2022 23:02:22 -0800 Subject: [PATCH 1/4] adding initial docker for mac setup to try linux plugins on mac --- Dockerfile.audio | 81 ++++++++++++++++++++++++++++ README.md | 129 +++++++++++++++++++++++++++++++++++++++++++++ docker/client.conf | 37 +++++++++++++ docker/daemon.conf | 86 ++++++++++++++++++++++++++++++ docker/default.pa | 26 +++++++++ 5 files changed, 359 insertions(+) create mode 100644 Dockerfile.audio create mode 100644 docker/client.conf create mode 100644 docker/daemon.conf create mode 100644 docker/default.pa diff --git a/Dockerfile.audio b/Dockerfile.audio new file mode 100644 index 0000000..e0cd633 --- /dev/null +++ b/Dockerfile.audio @@ -0,0 +1,81 @@ +# Pulseaudio +# +# docker run -d \ +# -v /etc/localtime:/etc/localtime:ro \ +# --device /dev/snd \ +# --name pulseaudio \ +# -p 4713:4713 \ +# -v /var/run/dbus:/var/run/dbus \ +# -v /etc/machine-id:/etc/machine-id \ +# jess/pulseaudio +# +FROM ubuntu:18.04 +LABEL maintainer "Jessie Frazelle " + +RUN apt-get update && apt-get install -y \ + alsa-utils \ + libasound2 \ + libasound2-plugins \ + pulseaudio \ + pulseaudio-utils \ + curl \ + gnupg \ + --no-install-recommends \ + && rm -rf /var/lib/apt/lists/* + +# LV2 plugin installs +RUN apt-get update && apt-get install apt-utils -y && apt-get install pkg-config -y && apt-get install wget -y \ + && DEBIAN_FRONTEND=noninteractive apt-get install ubuntustudio-audio-plugins -y && apt-get install libsndfile-dev -y \ + && wget https://sourceforge.net/projects/lsp-plugins/files/lsp-plugins/1.1.19/Linux-x86_64/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz -P /home/code-base/ \ + && tar -C /home/code-base/ -xvf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz \ + && cp -rf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64/usr/local/lib/lv2/lsp-plugins.lv2 /usr/lib/lv2/ \ + && rm -rf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz \ + && rm -rf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64 \ + && apt-get install dh-autoreconf -y \ + && apt-get install meson -y + +# Install lilv from source and other tools +RUN apt-get update && apt-get install lv2proc -y \ + && apt-get install lilv-utils -y \ + && apt-get install lv2-dev -y \ + && apt-get install liblilv-dev -y \ + && apt-get install audacity -y + +# Install REAPER +RUN apt-get update && DEBIAN_FRONTEND=noninteractive \ + && wget https://www.reaper.fm/files/6.x/reaper649_linux_x86_64.tar.xz \ + && tar -xf reaper649_linux_x86_64.tar.xz \ + && cd reaper_linux_x86_64 \ + && ./install-reaper.sh --install /opt --integrate-desktop --usr-local-bin-symlink --quiet + +####### miniconda +ENV PATH /opt/conda/bin:$PATH +RUN wget --quiet https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda.sh +RUN /bin/bash ~/miniconda.sh -b -p /opt/conda +RUN rm ~/miniconda.sh +RUN ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh +RUN echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc +RUN echo "conda activate base" >> ~/.bashrc + +RUN apt-get update && DEBIAN_FRONTEND=noninteractive \ + && wget https://github.com/falkTX/Carla/releases/download/v2.2.0/Carla_2.2.0-linux64.tar.xz \ + && tar -xf Carla_2.2.0-linux64.tar.xz + +# See https://github.com/jessfraz/dockerfiles/issues/253#issuecomment-313995830 +RUN apt-get update && apt-get install -y mesa-utils libgl1-mesa-glx + + +ENV HOME /home/pulseaudio +RUN useradd --create-home --home-dir $HOME pulseaudio \ + && usermod -aG audio,pulse,pulse-access pulseaudio \ + && chown -R pulseaudio:pulseaudio $HOME + +WORKDIR $HOME +USER pulseaudio + +COPY docker/default.pa /etc/pulse/default.pa +COPY docker/client.conf /etc/pulse/client.conf +COPY docker/daemon.conf /etc/pulse/daemon.conf + +ENTRYPOINT [ "pulseaudio" ] +CMD [ "--log-level=4", "--log-target=stderr", "-v" ] diff --git a/README.md b/README.md index 2a979e2..2a0c906 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,132 @@ ## msynth We are using a python package. Therefore, please run : pip install -e msynth , in the directory that is the parent of the msynth folder. + + + +## Developer + + +### Install pip package + +``` +# Create a clean conda env +conda create -n msynth python=3.7 +conda activate msynth + +cd + +# Install package +pip install -e . + +``` + + +## Run Linux Audio on MacOS + +* Install [Docker Desktop](https://www.docker.com/products/docker-desktop) +* Install X11 for macOS via [X Quartz](https://github.com/XQuartz/XQuartz/releases/download/XQuartz-2.8.1/XQuartz-2.8.1.dmg) +* Install PulseAudio via homebrew, start the PulseAudio deamon, and verify status: + +``` +# Install +brew install pulseaudio + +# Start daemon +pulseaudio --load=module-native-protocol-tcp --exit-idle-time=-1 --daemon + +# Check pulse audio status +pulseaudio --check -v +``` + +* Configure PulseAudio output via: + +``` +# List default outputs for pulse audio +pacmd list-sinks + +# Set the default output +pacmd set-default-sink +``` + +* Try to play an audio file via: + +``` +paplay -p .wav +``` + +* Build the docker audio image with tag name lv2 + +``` +cd +docker build --file Dockerfile.audio -t lv2 . +``` + +* Start Xquartz + +``` +open -a Xquartz +``` + +* Configure Xquartz to "Allow connections from network clients" +* Attempt to allow OpenGL X11 forwarding (doesn't work for me yet) + +``` +defaults write `quartz-wm --help | awk '/default:.*X11/ { gsub(/\)/, "", $2); print $2}'` enable_iglx -bool true +``` +* Whitelist localhost for X11 network connections via paste below on command line + +``` +/opt/X11/bin/xhost + "127.0.0.1" +``` + +* Run (and ssh into) your image + +``` +# Basic +docker run -it -e PULSE_SERVER=docker.for.mac.localhost -e DISPLAY=host.docker.internal:0 -v ~/.config/pulse:/home/pulseaudio/.config/pulse -v ~/Desktop:/opt/Desktop --entrypoint /bin/bash --rm -u 0 lv2 +``` + +* Run and ssh into your image and do anything you want + +``` +docker run -it -e PULSE_SERVER=docker.for.mac.localhost \ + -e DISPLAY=host.docker.internal:0 \ + -v ~/.config/pulse:/home/pulseaudio/.config/pulse \ + -v ~/:/opt/mac --entrypoint /bin/bash -u 0 lv2 +``` + +* Try running reaper within your Linux container on mac via + +``` +reaper +``` + +* Create a container called "audacity" that runs audacity directly + +``` +docker run -it -e PULSE_SERVER=docker.for.mac.localhost \ + -e DISPLAY=host.docker.internal:0 \ + -v ~/.config/pulse:/home/pulseaudio/.config/pulse \ + -v ~/:/opt/mac --entrypoint audacity -u 0 --name audacity lv2 +``` + +* Start your container again + +``` +docker start audacity +``` + + diff --git a/docker/client.conf b/docker/client.conf new file mode 100644 index 0000000..cb5551d --- /dev/null +++ b/docker/client.conf @@ -0,0 +1,37 @@ +# This file is part of PulseAudio. +# +# PulseAudio is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# PulseAudio is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with PulseAudio; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 +# USA. + +## Configuration file for PulseAudio clients. See pulse-client.conf(5) for +## more information. Default values are commented out. Use either ; or # for +## commenting. + +; default-sink = +; default-source = +; default-server = +; default-dbus-server = + +autospawn = no +daemon-binary = /bin/true +; extra-arguments = --log-target=syslog + +; cookie-file = + +; enable-shm = yes +; shm-size-bytes = 0 # setting this 0 will use the system-default, usually 64 MiB + +; auto-connect-localhost = no +; auto-connect-display = no diff --git a/docker/daemon.conf b/docker/daemon.conf new file mode 100644 index 0000000..07b0a85 --- /dev/null +++ b/docker/daemon.conf @@ -0,0 +1,86 @@ +# This file is part of PulseAudio. +# +# PulseAudio is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# PulseAudio is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public License +# along with PulseAudio; if not, see . + +## Configuration file for the PulseAudio daemon. See pulse-daemon.conf(5) for +## more information. Default values are commented out. Use either ; or # for +## commenting. + +; daemonize = no +; fail = yes +; allow-module-loading = yes +; allow-exit = yes +; use-pid-file = yes +; system-instance = no +; local-server-type = user +; enable-shm = yes +; shm-size-bytes = 0 # setting this 0 will use the system-default, usually 64 MiB +; lock-memory = no +; cpu-limit = no + +; high-priority = yes +; nice-level = -11 + +; realtime-scheduling = yes +; realtime-priority = 5 + +exit-idle-time = 180 +; scache-idle-time = 20 + +; dl-search-path = (depends on architecture) + +; load-default-script-file = yes +; default-script-file = /etc/pulse/default.pa + +; log-target = auto +; log-level = notice +; log-meta = no +; log-time = no +; log-backtrace = 0 + +; resample-method = speex-float-1 +; enable-remixing = yes +; enable-lfe-remixing = no + +flat-volumes = yes + +; rlimit-fsize = -1 +; rlimit-data = -1 +; rlimit-stack = -1 +; rlimit-core = -1 +; rlimit-as = -1 +; rlimit-rss = -1 +; rlimit-nproc = -1 +; rlimit-nofile = 256 +; rlimit-memlock = -1 +; rlimit-locks = -1 +; rlimit-sigpending = -1 +; rlimit-msgqueue = -1 +; rlimit-nice = 31 +; rlimit-rtprio = 9 +; rlimit-rttime = 200000 + +; default-sample-format = s16le +; default-sample-rate = 44100 +; alternate-sample-rate = 48000 +; default-sample-channels = 2 +; default-channel-map = front-left,front-right + +; default-fragments = 4 +; default-fragment-size-msec = 25 + +; enable-deferred-volume = yes +; deferred-volume-safety-margin-usec = 8000 +; deferred-volume-extra-delay-usec = 0 + diff --git a/docker/default.pa b/docker/default.pa new file mode 100644 index 0000000..699a0df --- /dev/null +++ b/docker/default.pa @@ -0,0 +1,26 @@ +# Replace the *entire* content of this file with these few lines and +# read the comments + +.fail + # Set tsched=0 here if you experience glitchy playback. This will + # revert back to interrupt-based scheduling and should fix it. + # + # Replace the device= part if you want pulse to use a specific device + # such as "dmix" and "dsnoop" so it doesn't lock an hw: device. + + # INPUT/RECORD + load-module module-alsa-source device="default" tsched=1 + + # OUTPUT/PLAYBACK + load-module module-alsa-sink device="default" tsched=1 + + # Accept clients -- very important + load-module module-native-protocol-unix + load-module module-native-protocol-tcp auth-ip-acl=127.0.0.1;192.168.0.0/24 auth-anonymous=1 + +.nofail +.ifexists module-x11-publish.so + # Publish to X11 so the clients know how to connect to Pulse. Will + # clear itself on unload. + load-module module-x11-publish +.endif From a7a6ad265e8c297f0a4c4ae62d3741c2002739cf Mon Sep 17 00:00:00 2001 From: Nick Bryan Date: Tue, 1 Mar 2022 16:44:59 -0800 Subject: [PATCH 2/4] adding ignore --- .DS_Store | Bin 10244 -> 0 bytes .gitignore | 130 +++++++++++++++++++++- Dockerfile.audio => Dockerfile | 0 msynth.egg-info/PKG-INFO | 12 -- msynth/__pycache__/version.cpython-37.pyc | Bin 176 -> 0 bytes 5 files changed, 128 insertions(+), 14 deletions(-) delete mode 100644 .DS_Store rename Dockerfile.audio => Dockerfile (100%) delete mode 100644 msynth.egg-info/PKG-INFO delete mode 100644 msynth/__pycache__/version.cpython-37.pyc diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index 4ecfb60fedb6a917b27fab266da30918006b5a22..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10244 zcmeHMU2GIp6h3Du?F>J5iu}z&=}H9(w$LK&7Wvz@%TM_cWJ_BLQf7CCcEavX*_mxA zwWcN-U&N@97$Gql9!wB<(kO|+2hb?SpBRi1g8_Wt(P%W$7vs72&bGVTE$}4%%xvbK znS0Ked+&F?J7?})0Kn$7UI!2W0Etc}m1$HIIGCQD;|`x&6CjD?5AmM5NX$y~jkEMB zJOn%hJOn%hJOn%h{wWAh&Sr-+RY-l%K?Pp|kSwBV ze$chTE=UGk8FJ-_kffmlg)2$nitZ8vg*(*;T)U7fM}!pafbQZ0-Imc^p&)Oao*&@m zfFU8h(nG*QU_1f=swx5#U? z&DtB7L7o7`kN_J7AxUlbMhEOZ_xYEjeR0!vYHPnm-=w0+#U+w7MVc~w%Hxq`-5O}N(n;Os z&xr_yB*~K8t*ApotAfGmK(KM;aCKm)uD-FcI-9x;y@cBf+;b=XyxZKa$R zZrEv(FRSZTW-$MqeU51*?RFz&la%GRrp+o}xcH&k`bRc|uS~zMR4JPw%UuLp(lHO2 zMrzN1sXM)UH9KY`bu-!1(Qn#jG;XvdwS-aTyCut4X3v>BUG=N;Gpc+SNvV`N(^2zC z+H?ka8w)B_dB09Q-(l&j6SjsW%d}0pPQ9uORS7b17%D@x;!a`dPK9b%51XJ1Vl)K? z;5j%B&%;SL4d>t;xBwr+XYd7l317igxB<7|XZQtvh2P*$EWw#L8|UC$d;n{3Ij+D) zT!$MlgiW{=cj9BX8zZTdgiqsfd;wp?m+%yx!L#@_p2PR>eLRmB z@e+QHU*q?phrSG@cZmB;iPz{VXX3`~es|*f4iuuSf79kI;d?^atJ4He@1HqqL1k6V z%HZ0j(Rk)XFq=x8KY}2fI2mH_5azIv#a#8zlglS!^d91!aa3aDQA&h$z`sx;z9^h9 zs(+DxiA1DUd|WFp^OL0}DU*cp3b{@qk}8vhc9q;96OEN3pBd@gAr`QcI?2t#K9QnU@soQ6grqDE{@<+#KdRtS(l41;|Y8XPvo%RnzYP~}{32SU*ckh)mRtQn&K&F0Ml!6V2 zlCB03O*KR~e?oNK(o#fy2XMkgS>wj6tpEQ4 DrO~Vn diff --git a/.gitignore b/.gitignore index e9d787c..646e246 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,128 @@ -tests/rendered_audio -soundfonts/piano.sf2 +# https://github.com/github/gitignore/blob/master/Python.gitignore +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# Mac / Jetbrain specific ignores +.idea +*.swp +*.DS_Store +src/models + +# C extensions +*.so + +# Distribution / packaging +.Python +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/Dockerfile.audio b/Dockerfile similarity index 100% rename from Dockerfile.audio rename to Dockerfile diff --git a/msynth.egg-info/PKG-INFO b/msynth.egg-info/PKG-INFO deleted file mode 100644 index 309b243..0000000 --- a/msynth.egg-info/PKG-INFO +++ /dev/null @@ -1,12 +0,0 @@ -Metadata-Version: 2.1 -Name: msynth -Version: 0.0.1 -Summary: -Home-page: TBD -Author: -Author-email: -License: UNKNOWN -Description: ## msynth -Keywords: -Platform: UNKNOWN -Description-Content-Type: text/markdown diff --git a/msynth/__pycache__/version.cpython-37.pyc b/msynth/__pycache__/version.cpython-37.pyc deleted file mode 100644 index 5a5ceb51cff27b677a7ea592c77816ed04b8c2f7..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 176 zcmZ?b<>g`kg1O%%lCpvHV-N=h7=a82ATAaF5-AK(3@MCJj44b}OexI444N!eJYlIt z#hLke3YmFn`BlsYdInXjKtj*ZPm}o;dwhIKesX+#6mM}xeo;w$8AR Date: Tue, 1 Mar 2022 21:44:04 -0800 Subject: [PATCH 3/4] adding supervisord config --- docker/supervisord.conf | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 docker/supervisord.conf diff --git a/docker/supervisord.conf b/docker/supervisord.conf new file mode 100644 index 0000000..4e6af18 --- /dev/null +++ b/docker/supervisord.conf @@ -0,0 +1,28 @@ +[supervisord] +nodaemon=true +logfile=/var/log/supervisord/supervisord.log +childlogdir=/var/log/ + + +[program:jupyter] +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +command=/bin/bash -c "jupyter lab --ip=0.0.0.0 --no-browser --allow-root --config=/root/.jupyter/jupyter_notebook_config.py" + + +[program:code-server] +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +command=/bin/bash -c "code-server --bind-addr 0.0.0.0:8887" + + +[program:pulse] +stdout_logfile=/dev/stdout +stdout_logfile_maxbytes=0 +stderr_logfile=/dev/stderr +stderr_logfile_maxbytes=0 +command=/bin/bash -c "pulseaudio --system" \ No newline at end of file From 83a844a4a5e4bed4746c4ab1099618fe9a3a6b5f Mon Sep 17 00:00:00 2001 From: Nick Bryan Date: Wed, 2 Mar 2022 10:57:32 -0800 Subject: [PATCH 4/4] adding jupyter and code-server ide options --- Dockerfile | 79 ++- README.md | 77 +-- docker-compose.yml | 23 + docker/code-server-config.yaml | 5 + docker/jupyter_notebook_config.py | 905 ++++++++++++++++++++++++++++++ docker/settings.json | 7 + docker/supervisord.conf | 16 - 7 files changed, 1016 insertions(+), 96 deletions(-) create mode 100644 docker-compose.yml create mode 100644 docker/code-server-config.yaml create mode 100644 docker/jupyter_notebook_config.py create mode 100644 docker/settings.json diff --git a/Dockerfile b/Dockerfile index e0cd633..3ae9b09 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,5 @@ -# Pulseaudio -# -# docker run -d \ -# -v /etc/localtime:/etc/localtime:ro \ -# --device /dev/snd \ -# --name pulseaudio \ -# -p 4713:4713 \ -# -v /var/run/dbus:/var/run/dbus \ -# -v /etc/machine-id:/etc/machine-id \ -# jess/pulseaudio -# -FROM ubuntu:18.04 -LABEL maintainer "Jessie Frazelle " +FROM ubuntu:20.04 +LABEL maintainer "Nick Bryan " RUN apt-get update && apt-get install -y \ alsa-utils \ @@ -26,11 +15,10 @@ RUN apt-get update && apt-get install -y \ # LV2 plugin installs RUN apt-get update && apt-get install apt-utils -y && apt-get install pkg-config -y && apt-get install wget -y \ && DEBIAN_FRONTEND=noninteractive apt-get install ubuntustudio-audio-plugins -y && apt-get install libsndfile-dev -y \ - && wget https://sourceforge.net/projects/lsp-plugins/files/lsp-plugins/1.1.19/Linux-x86_64/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz -P /home/code-base/ \ - && tar -C /home/code-base/ -xvf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz \ - && cp -rf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64/usr/local/lib/lv2/lsp-plugins.lv2 /usr/lib/lv2/ \ - && rm -rf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz \ - && rm -rf /home/code-base/lsp-plugins-lv2-1.1.19-Linux-x86_64 \ + && wget https://sourceforge.net/projects/lsp-plugins/files/lsp-plugins/1.1.19/Linux-x86_64/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz -P /home/temp/ \ + && tar -C /home/temp/ -xvf /home/temp/lsp-plugins-lv2-1.1.19-Linux-x86_64.tar.gz \ + && cp -rf /home/temp/lsp-plugins-lv2-1.1.19-Linux-x86_64/usr/local/lib/lv2/lsp-plugins.lv2 /usr/lib/lv2/ \ + && rm -rf /home/temp \ && apt-get install dh-autoreconf -y \ && apt-get install meson -y @@ -46,7 +34,9 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive \ && wget https://www.reaper.fm/files/6.x/reaper649_linux_x86_64.tar.xz \ && tar -xf reaper649_linux_x86_64.tar.xz \ && cd reaper_linux_x86_64 \ - && ./install-reaper.sh --install /opt --integrate-desktop --usr-local-bin-symlink --quiet + && ./install-reaper.sh --install /opt --integrate-desktop --usr-local-bin-symlink --quiet \ + && rm -rf reaper_linux_x86_64 \ + && rm -rf reaper649_linux_x86_64.tar.xz ####### miniconda ENV PATH /opt/conda/bin:$PATH @@ -57,25 +47,56 @@ RUN ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh RUN echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc RUN echo "conda activate base" >> ~/.bashrc -RUN apt-get update && DEBIAN_FRONTEND=noninteractive \ - && wget https://github.com/falkTX/Carla/releases/download/v2.2.0/Carla_2.2.0-linux64.tar.xz \ - && tar -xf Carla_2.2.0-linux64.tar.xz +# Install Carla +# RUN apt-get update && DEBIAN_FRONTEND=noninteractive \ +# && wget https://github.com/falkTX/Carla/releases/download/v2.2.0/Carla_2.2.0-linux64.tar.xz \ +# && tar -xf Carla_2.2.0-linux64.tar.xz +# Attempt to install OpenGL stuff # See https://github.com/jessfraz/dockerfiles/issues/253#issuecomment-313995830 RUN apt-get update && apt-get install -y mesa-utils libgl1-mesa-glx -ENV HOME /home/pulseaudio -RUN useradd --create-home --home-dir $HOME pulseaudio \ - && usermod -aG audio,pulse,pulse-access pulseaudio \ - && chown -R pulseaudio:pulseaudio $HOME +# Supervisor setup +RUN apt-get update && apt-get install -y supervisor openssh-client +RUN mkdir -p /var/log/supervisord +COPY docker/supervisord.conf /etc/supervisor/conf.d/supervisord.conf +COPY docker/jupyter_notebook_config.py /root/.jupyter/jupyter_notebook_config.py + +# Upgrade pip +RUN /bin/bash -c "python -m pip install --upgrade pip" + +# Install the JupyterLab IDE +RUN /bin/bash -c "pip install jupyterlab" +# ####### INSTALL CODE SERVER +# # via https://github.com/cdr/code-server/issues/2341#issuecomment-740892890 +# RUN /bin/bash -c "curl -fL https://github.com/cdr/code-server/releases/download/v3.8.0/code-server-3.8.0-linux-amd64.tar.gz | tar -C /usr/local/bin -xz" +# RUN /bin/bash -c "mv /usr/local/bin/code-server-3.8.0-linux-amd64 /usr/local/bin/code-server-3.8.0" +# RUN /bin/bash -c "ln -s /usr/local/bin/code-server-3.8.0/bin/code-server /usr/local/bin/code-server" +# # Install Python extension +# RUN /bin/bash -c "wget https://github.com/microsoft/vscode-python/releases/download/2020.10.332292344/ms-python-release.vsix \ +# && code-server --install-extension ./ms-python-release.vsix || true" +# # Install C++ extension +# RUN /bin/bash -c "wget https://github.com/microsoft/vscode-cpptools/releases/download/1.1.3/cpptools-linux.vsix \ +# && code-server --install-extension ./cpptools-linux.vsix || true" +# # Set VS Code password to None +# #RUN /bin/bash -c "sed -i.bak 's/auth: password/auth: none/' ~/.config/code-server/config.yaml" +# COPY docker/code-server-config.yaml /root/.config/code-server/config.yaml +# # Fix broken python plugin # https://github.com/cdr/code-server/issues/2341 +# RUN /bin/bash -c "mkdir -p ~/.local/share/code-server/ && mkdir -p ~/.local/share/code-server/User" +# COPY docker/settings.json /root/.local/share/code-server/User/settings.json +# ####### DONE INSTALL CODE SERVER + +ENV HOME /home +RUN usermod -aG audio,pulse,pulse-access root \ + && chown -R root:root $HOME WORKDIR $HOME -USER pulseaudio COPY docker/default.pa /etc/pulse/default.pa COPY docker/client.conf /etc/pulse/client.conf COPY docker/daemon.conf /etc/pulse/daemon.conf -ENTRYPOINT [ "pulseaudio" ] -CMD [ "--log-level=4", "--log-target=stderr", "-v" ] +EXPOSE 8080 8888 8887 443 + +ENTRYPOINT ["/usr/bin/supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf"] diff --git a/README.md b/README.md index 2a0c906..3182bcd 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ We are using a python package. Therefore, please run : pip install -e msynth , i ``` # Create a clean conda env -conda create -n msynth python=3.7 +conda create -n msynth python=3.7 -y conda activate msynth cd @@ -22,7 +22,8 @@ pip install -e . ``` -## Run Linux Audio on MacOS + +## Linux Development on MacOS * Install [Docker Desktop](https://www.docker.com/products/docker-desktop) * Install X11 for macOS via [X Quartz](https://github.com/XQuartz/XQuartz/releases/download/XQuartz-2.8.1/XQuartz-2.8.1.dmg) @@ -55,13 +56,6 @@ pacmd set-default-sink paplay -p .wav ``` -* Build the docker audio image with tag name lv2 - -``` -cd -docker build --file Dockerfile.audio -t lv2 . -``` - * Start Xquartz ``` @@ -69,64 +63,45 @@ open -a Xquartz ``` * Configure Xquartz to "Allow connections from network clients" -* Attempt to allow OpenGL X11 forwarding (doesn't work for me yet) + + + * Whitelist localhost for X11 network connections via paste below on command line ``` /opt/X11/bin/xhost + "127.0.0.1" ``` -* Run (and ssh into) your image - -``` -# Basic -docker run -it -e PULSE_SERVER=docker.for.mac.localhost -e DISPLAY=host.docker.internal:0 -v ~/.config/pulse:/home/pulseaudio/.config/pulse -v ~/Desktop:/opt/Desktop --entrypoint /bin/bash --rm -u 0 lv2 -``` - -* Run and ssh into your image and do anything you want +* Build the docker image into a container +docker-compose up --build -d +* Specify a shared data directory between your local machine and docker to share data ``` -docker run -it -e PULSE_SERVER=docker.for.mac.localhost \ - -e DISPLAY=host.docker.internal:0 \ - -v ~/.config/pulse:/home/pulseaudio/.config/pulse \ - -v ~/:/opt/mac --entrypoint /bin/bash -u 0 lv2 +export MSYNTH= ``` -* Try running reaper within your Linux container on mac via - -``` -reaper -``` - -* Create a container called "audacity" that runs audacity directly - +* Run an existing image ``` -docker run -it -e PULSE_SERVER=docker.for.mac.localhost \ - -e DISPLAY=host.docker.internal:0 \ - -v ~/.config/pulse:/home/pulseaudio/.config/pulse \ - -v ~/:/opt/mac --entrypoint audacity -u 0 --name audacity lv2 +docker-compose up ``` -* Start your container again +* Open your IDE of choice + * For Jupyter, open a web browser at http://127.0.0.1:8888 (password is dsp). + * For VS Code, install the Docker extension, then attach to the msyth container and use VS Code. + * For command line SSH access, open a second terminal, find the running container id, and enter it + ``` + docker container ls + docker exec -it bash + ``` +* Run Linux applications with audio and graphics forwarding. -``` -docker start audacity -``` - - +audacity +reaper +``` \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..4d8a9a7 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,23 @@ +version: "3.9" +services: + dev: + build: . + ports: + - 8887:8887 + - 8888:8888 + - 22700:22700 + - 8896:8896 + - 6006:6006 + tty: true + privileged: true + volumes: + - $PWD:/home/code + - $MSYNTH_DATA/:/home/data + - $HOME/.config/pulse:/home/.config/pulse + - $HOME/:/home/mac + environment: + - PULSE_SERVER=docker.for.mac.localhost + - DISPLAY=host.docker.internal:0 + user: "0" +volumes: + data: diff --git a/docker/code-server-config.yaml b/docker/code-server-config.yaml new file mode 100644 index 0000000..9872bf8 --- /dev/null +++ b/docker/code-server-config.yaml @@ -0,0 +1,5 @@ +bind-addr: 127.0.0.1:8877 +auth: password +cert: false +password: stuff +hashed-password: 77909277a1ffc4b06a39d95f00d94b495a5c099351155c60d552c4dc8822cf9a diff --git a/docker/jupyter_notebook_config.py b/docker/jupyter_notebook_config.py new file mode 100644 index 0000000..5a59b34 --- /dev/null +++ b/docker/jupyter_notebook_config.py @@ -0,0 +1,905 @@ +# Configuration file for jupyter-notebook. + +#------------------------------------------------------------------------------ +# Application(SingletonConfigurable) configuration +#------------------------------------------------------------------------------ + +## This is an application. + +## The date format used by logging formatters for %(asctime)s +#c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S' + +## The Logging format template +#c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s' + +## Set the log level by value or name. +#c.Application.log_level = 30 + +#------------------------------------------------------------------------------ +# JupyterApp(Application) configuration +#------------------------------------------------------------------------------ + +## Base class for Jupyter applications + +## Answer yes to any prompts. +#c.JupyterApp.answer_yes = False + +## Full path of a config file. +#c.JupyterApp.config_file = '' + +## Specify a config file to load. +#c.JupyterApp.config_file_name = '' + +## Generate default config file. +#c.JupyterApp.generate_config = False + +#------------------------------------------------------------------------------ +# NotebookApp(JupyterApp) configuration +#------------------------------------------------------------------------------ + +## Set the Access-Control-Allow-Credentials: true header +#c.NotebookApp.allow_credentials = False + +## Set the Access-Control-Allow-Origin header +# +# Use '*' to allow any origin to access your server. +# +# Takes precedence over allow_origin_pat. +#c.NotebookApp.allow_origin = '' + +## Use a regular expression for the Access-Control-Allow-Origin header +# +# Requests from an origin matching the expression will get replies with: +# +# Access-Control-Allow-Origin: origin +# +# where `origin` is the origin of the request. +# +# Ignored if allow_origin is set. +#c.NotebookApp.allow_origin_pat = '' + +## Allow password to be changed at login for the notebook server. +# +# While loggin in with a token, the notebook server UI will give the opportunity +# to the user to enter a new password at the same time that will replace the +# token login mechanism. +# +# This can be set to false to prevent changing password from the UI/API. +#c.NotebookApp.allow_password_change = True + +## Allow requests where the Host header doesn't point to a local server +# +# By default, requests get a 403 forbidden response if the 'Host' header shows +# that the browser thinks it's on a non-local domain. Setting this option to +# True disables this check. +# +# This protects against 'DNS rebinding' attacks, where a remote web server +# serves you a page and then changes its DNS to send later requests to a local +# IP, bypassing same-origin checks. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are allowed as local, along +# with hostnames configured in local_hostnames. +#c.NotebookApp.allow_remote_access = False + +## Whether to allow the user to run the notebook as root. +#c.NotebookApp.allow_root = False + +## Reload the webapp when changes are made to any Python src files. +#c.NotebookApp.autoreload = False + +## DEPRECATED use base_url +#c.NotebookApp.base_project_url = '/' + +## The base URL for the notebook server. +# +# Leading and trailing slashes can be omitted, and will automatically be added. +#c.NotebookApp.base_url = '/' + +## Specify what command to use to invoke a web browser when opening the notebook. +# If not specified, the default browser will be determined by the `webbrowser` +# standard library module, which allows setting of the BROWSER environment +# variable to override it. +#c.NotebookApp.browser = '' + +## The full path to an SSL/TLS certificate file. +#c.NotebookApp.certfile = '' + +## The full path to a certificate authority certificate for SSL/TLS client +# authentication. +#c.NotebookApp.client_ca = '' + +## The config manager class to use +#c.NotebookApp.config_manager_class = 'notebook.services.config.manager.ConfigManager' + +## The notebook manager class to use. +#c.NotebookApp.contents_manager_class = 'notebook.services.contents.largefilemanager.LargeFileManager' + +## Extra keyword arguments to pass to `set_secure_cookie`. See tornado's +# set_secure_cookie docs for details. +#c.NotebookApp.cookie_options = {} + +## The random bytes used to secure cookies. By default this is a new random +# number every time you start the Notebook. Set it to a value in a config file +# to enable logins to persist across server sessions. +# +# Note: Cookie secrets should be kept private, do not share config files with +# cookie_secret stored in plaintext (you can read the value from a file). +#c.NotebookApp.cookie_secret = b'' + +## The file where the cookie secret is stored. +#c.NotebookApp.cookie_secret_file = '' + +## Override URL shown to users. +# +# Replace actual URL, including protocol, address, port and base URL, with the +# given value when displaying URL to the users. Do not change the actual +# connection URL. If authentication token is enabled, the token is added to the +# custom URL automatically. +# +# This option is intended to be used when the URL to display to the user cannot +# be determined reliably by the Jupyter notebook server (proxified or +# containerized setups for example). +#c.NotebookApp.custom_display_url = '' + +## The default URL to redirect to from `/` +#c.NotebookApp.default_url = '/tree' + +## Disable cross-site-request-forgery protection +# +# Jupyter notebook 4.3.1 introduces protection from cross-site request +# forgeries, requiring API requests to either: +# +# - originate from pages served by this server (validated with XSRF cookie and +# token), or - authenticate with a token +# +# Some anonymous compute resources still desire the ability to run code, +# completely without authentication. These services can disable all +# authentication and security checks, with the full knowledge of what that +# implies. +#c.NotebookApp.disable_check_xsrf = False + +## Whether to enable MathJax for typesetting math/TeX +# +# MathJax is the javascript library Jupyter uses to render math/LaTeX. It is +# very large, so you may want to disable it if you have a slow internet +# connection, or for offline use of the notebook. +# +# When disabled, equations etc. will appear as their untransformed TeX source. +#c.NotebookApp.enable_mathjax = True + +## extra paths to look for Javascript notebook extensions +#c.NotebookApp.extra_nbextensions_path = [] + +## handlers that should be loaded at higher priority than the default services +#c.NotebookApp.extra_services = [] + +## Extra paths to search for serving static files. +# +# This allows adding javascript/css to be available from the notebook server +# machine, or overriding individual files in the IPython +#c.NotebookApp.extra_static_paths = [] + +## Extra paths to search for serving jinja templates. +# +# Can be used to override templates from notebook.templates. +#c.NotebookApp.extra_template_paths = [] + +## +#c.NotebookApp.file_to_run = '' + +## Extra keyword arguments to pass to `get_secure_cookie`. See tornado's +# get_secure_cookie docs for details. +#c.NotebookApp.get_secure_cookie_kwargs = {} + +## Deprecated: Use minified JS file or not, mainly use during dev to avoid JS +# recompilation +#c.NotebookApp.ignore_minified_js = False + +## (bytes/sec) Maximum rate at which stream output can be sent on iopub before +# they are limited. +#c.NotebookApp.iopub_data_rate_limit = 1000000 + +## (msgs/sec) Maximum rate at which messages can be sent on iopub before they are +# limited. +#c.NotebookApp.iopub_msg_rate_limit = 1000 + +## The IP address the notebook server will listen on. +#c.NotebookApp.ip = 'localhost' + +## Supply extra arguments that will be passed to Jinja environment. +#c.NotebookApp.jinja_environment_options = {} + +## Extra variables to supply to jinja templates when rendering. +#c.NotebookApp.jinja_template_vars = {} + +## The kernel manager class to use. +#c.NotebookApp.kernel_manager_class = 'notebook.services.kernels.kernelmanager.MappingKernelManager' + +## The kernel spec manager class to use. Should be a subclass of +# `jupyter_client.kernelspec.KernelSpecManager`. +# +# The Api of KernelSpecManager is provisional and might change without warning +# between this version of Jupyter and the next stable one. +#c.NotebookApp.kernel_spec_manager_class = 'jupyter_client.kernelspec.KernelSpecManager' + +## The full path to a private key file for usage with SSL/TLS. +#c.NotebookApp.keyfile = '' + +## Hostnames to allow as local when allow_remote_access is False. +# +# Local IP addresses (such as 127.0.0.1 and ::1) are automatically accepted as +# local as well. +#c.NotebookApp.local_hostnames = ['localhost'] + +## The login handler class to use. +#c.NotebookApp.login_handler_class = 'notebook.auth.login.LoginHandler' + +## The logout handler class to use. +#c.NotebookApp.logout_handler_class = 'notebook.auth.logout.LogoutHandler' + +## The MathJax.js configuration file that is to be used. +#c.NotebookApp.mathjax_config = 'TeX-AMS-MML_HTMLorMML-full,Safe' + +## A custom url for MathJax.js. Should be in the form of a case-sensitive url to +# MathJax, for example: /static/components/MathJax/MathJax.js +#c.NotebookApp.mathjax_url = '' + +## Sets the maximum allowed size of the client request body, specified in the +# Content-Length request header field. If the size in a request exceeds the +# configured value, a malformed HTTP message is returned to the client. +# +# Note: max_body_size is applied even in streaming mode. +#c.NotebookApp.max_body_size = 536870912 + +## Gets or sets the maximum amount of memory, in bytes, that is allocated for use +# by the buffer manager. +#c.NotebookApp.max_buffer_size = 536870912 + +## Gets or sets a lower bound on the open file handles process resource limit. +# This may need to be increased if you run into an OSError: [Errno 24] Too many +# open files. This is not applicable when running on Windows. +#c.NotebookApp.min_open_files_limit = 0 + +## Dict of Python modules to load as notebook server extensions.Entry values can +# be used to enable and disable the loading ofthe extensions. The extensions +# will be loaded in alphabetical order. +#c.NotebookApp.nbserver_extensions = {} + +## The directory to use for notebooks and kernels. +#c.NotebookApp.notebook_dir = '' + +## Whether to open in a browser after starting. The specific browser used is +# platform dependent and determined by the python standard library `webbrowser` +# module, unless it is overridden using the --browser (NotebookApp.browser) +# configuration option. +c.NotebookApp.open_browser = False + +## Hashed password to use for web authentication. +# +# To generate, type in a python/IPython shell: +# +# from IPython.lib import passwd; passwd() +# +# The string should be of the form type:salt:hashed-password. +c.NotebookApp.password = 'sha1:864544ae55bc:3f840ba19107ea2d4d8cd690bf1397953f89ba2b' + +## Forces users to use a password for the Notebook server. This is useful in a +# multi user environment, for instance when everybody in the LAN can access each +# other's machine through ssh. +# +# In such a case, serving the notebook server on localhost is not secure since +# any user can connect to the notebook server via ssh. +c.NotebookApp.password_required = False + +## The port the notebook server will listen on (env: JUPYTER_PORT). +#c.NotebookApp.port = 8888 + +## The number of additional ports to try if the specified port is not available +# (env: JUPYTER_PORT_RETRIES). +#c.NotebookApp.port_retries = 50 + +## DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib. +#c.NotebookApp.pylab = 'disabled' + +## If True, display a button in the dashboard to quit (shutdown the notebook +# server). +#c.NotebookApp.quit_button = True + +## (sec) Time window used to check the message and data rate limits. +#c.NotebookApp.rate_limit_window = 3 + +## Reraise exceptions encountered loading server extensions? +#c.NotebookApp.reraise_server_extension_failures = False + +## DEPRECATED use the nbserver_extensions dict instead +#c.NotebookApp.server_extensions = [] + +## The session manager class to use. +#c.NotebookApp.session_manager_class = 'notebook.services.sessions.sessionmanager.SessionManager' + +## Shut down the server after N seconds with no kernels or terminals running and +# no activity. This can be used together with culling idle kernels +# (MappingKernelManager.cull_idle_timeout) to shutdown the notebook server when +# it's not in use. This is not precisely timed: it may shut down up to a minute +# later. 0 (the default) disables this automatic shutdown. +#c.NotebookApp.shutdown_no_activity_timeout = 0 + +## The UNIX socket the notebook server will listen on. +#c.NotebookApp.sock = '' + +## The permissions mode for UNIX socket creation (default: 0600). +#c.NotebookApp.sock_mode = '0600' + +## Supply SSL options for the tornado HTTPServer. See the tornado docs for +# details. +#c.NotebookApp.ssl_options = {} + +## Supply overrides for terminado. Currently only supports "shell_command". On +# Unix, if "shell_command" is not provided, a non-login shell is launched by +# default when the notebook server is connected to a terminal, a login shell +# otherwise. +c.NotebookApp.terminado_settings = {'shell_command': ['bash']} + +## Set to False to disable terminals. +# +# This does *not* make the notebook server more secure by itself. Anything the +# user can in a terminal, they can also do in a notebook. +# +# Terminals may also be automatically disabled if the terminado package is not +# available. +#c.NotebookApp.terminals_enabled = True + +## Token used for authenticating first-time connections to the server. +# +# The token can be read from the file referenced by JUPYTER_TOKEN_FILE or set +# directly with the JUPYTER_TOKEN environment variable. +# +# When no password is enabled, the default is to generate a new, random token. +# +# Setting to an empty string disables authentication altogether, which is NOT +# RECOMMENDED. +#c.NotebookApp.token = '' + +## Supply overrides for the tornado.web.Application that the Jupyter notebook +# uses. +#c.NotebookApp.tornado_settings = {} + +## Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- +# For headerssent by the upstream reverse proxy. Necessary if the proxy handles +# SSL +#c.NotebookApp.trust_xheaders = False + +## Disable launching browser by redirect file +# +# For versions of notebook > 5.7.2, a security feature measure was added that +# prevented the authentication token used to launch the browser from being +# visible. This feature makes it difficult for other users on a multi-user +# system from running code in your Jupyter session as you. +# +# However, some environments (like Windows Subsystem for Linux (WSL) and +# Chromebooks), launching a browser using a redirect file can lead the browser +# failing to load. This is because of the difference in file structures/paths +# between the runtime and the browser. +# +# Disabling this setting to False will disable this behavior, allowing the +# browser to launch by using a URL and visible token (as before). +#c.NotebookApp.use_redirect_file = True + +## DEPRECATED, use tornado_settings +#c.NotebookApp.webapp_settings = {} + +## Specify Where to open the notebook on startup. This is the `new` argument +# passed to the standard library method `webbrowser.open`. The behaviour is not +# guaranteed, but depends on browser support. Valid values are: +# +# - 2 opens a new tab, +# - 1 opens a new window, +# - 0 opens in an existing window. +# +# See the `webbrowser.open` documentation for details. +#c.NotebookApp.webbrowser_open_new = 2 + +## Set the tornado compression options for websocket connections. +# +# This value will be returned from +# :meth:`WebSocketHandler.get_compression_options`. None (default) will disable +# compression. A dict (even an empty one) will enable compression. +# +# See the tornado docs for WebSocketHandler.get_compression_options for details. +#c.NotebookApp.websocket_compression_options = None + +## The base URL for websockets, if it differs from the HTTP server (hint: it +# almost certainly doesn't). +# +# Should be in the form of an HTTP origin: ws[s]://hostname[:port] +#c.NotebookApp.websocket_url = '' + +#------------------------------------------------------------------------------ +# ConnectionFileMixin(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ + +## Mixin for configurable classes that work with connection files + +## JSON file in which to store connection info [default: kernel-.json] +# +# This file will contain the IP, ports, and authentication key needed to connect +# clients to this kernel. By default, this file will be created in the security +# dir of the current profile, but can be specified by absolute path. +#c.ConnectionFileMixin.connection_file = '' + +## set the control (ROUTER) port [default: random] +#c.ConnectionFileMixin.control_port = 0 + +## set the heartbeat port [default: random] +#c.ConnectionFileMixin.hb_port = 0 + +## set the iopub (PUB) port [default: random] +#c.ConnectionFileMixin.iopub_port = 0 + +## Set the kernel's IP address [default localhost]. If the IP address is +# something other than localhost, then Consoles on other machines will be able +# to connect to the Kernel, so be careful! +#c.ConnectionFileMixin.ip = '' + +## set the shell (ROUTER) port [default: random] +#c.ConnectionFileMixin.shell_port = 0 + +## set the stdin (ROUTER) port [default: random] +#c.ConnectionFileMixin.stdin_port = 0 + +## +#c.ConnectionFileMixin.transport = 'tcp' + +#------------------------------------------------------------------------------ +# KernelManager(ConnectionFileMixin) configuration +#------------------------------------------------------------------------------ + +## Manages a single kernel in a subprocess on this host. +# +# This version starts kernels with Popen. + +## Should we autorestart the kernel if it dies. +#c.KernelManager.autorestart = True + +## DEPRECATED: Use kernel_name instead. +# +# The Popen Command to launch the kernel. Override this if you have a custom +# kernel. If kernel_cmd is specified in a configuration file, Jupyter does not +# pass any arguments to the kernel, because it cannot make any assumptions about +# the arguments that the kernel understands. In particular, this means that the +# kernel does not receive the option --debug if it given on the Jupyter command +# line. +#c.KernelManager.kernel_cmd = [] + +## Time to wait for a kernel to terminate before killing it, in seconds. +#c.KernelManager.shutdown_wait_time = 5.0 + +#------------------------------------------------------------------------------ +# Session(Configurable) configuration +#------------------------------------------------------------------------------ + +## Object for handling serialization and sending of messages. +# +# The Session object handles building messages and sending them with ZMQ sockets +# or ZMQStream objects. Objects can communicate with each other over the +# network via Session objects, and only need to work with the dict-based IPython +# message spec. The Session will handle serialization/deserialization, security, +# and metadata. +# +# Sessions support configurable serialization via packer/unpacker traits, and +# signing with HMAC digests via the key/keyfile traits. +# +# Parameters ---------- +# +# debug : bool +# whether to trigger extra debugging statements +# packer/unpacker : str : 'json', 'pickle' or import_string +# importstrings for methods to serialize message parts. If just +# 'json' or 'pickle', predefined JSON and pickle packers will be used. +# Otherwise, the entire importstring must be used. +# +# The functions must accept at least valid JSON input, and output *bytes*. +# +# For example, to use msgpack: +# packer = 'msgpack.packb', unpacker='msgpack.unpackb' +# pack/unpack : callables +# You can also set the pack/unpack callables for serialization directly. +# session : bytes +# the ID of this Session object. The default is to generate a new UUID. +# username : unicode +# username added to message headers. The default is to ask the OS. +# key : bytes +# The key used to initialize an HMAC signature. If unset, messages +# will not be signed or checked. +# keyfile : filepath +# The file containing a key. If this is set, `key` will be initialized +# to the contents of the file. + +## Threshold (in bytes) beyond which an object's buffer should be extracted to +# avoid pickling. +#c.Session.buffer_threshold = 1024 + +## Whether to check PID to protect against calls after fork. +# +# This check can be disabled if fork-safety is handled elsewhere. +#c.Session.check_pid = True + +## Threshold (in bytes) beyond which a buffer should be sent without copying. +#c.Session.copy_threshold = 65536 + +## Debug output in the Session +#c.Session.debug = False + +## The maximum number of digests to remember. +# +# The digest history will be culled when it exceeds this value. +#c.Session.digest_history_size = 65536 + +## The maximum number of items for a container to be introspected for custom +# serialization. Containers larger than this are pickled outright. +#c.Session.item_threshold = 64 + +## execution key, for signing messages. +#c.Session.key = b'' + +## path to file containing execution key. +#c.Session.keyfile = '' + +## Metadata dictionary, which serves as the default top-level metadata dict for +# each message. +#c.Session.metadata = {} + +## The name of the packer for serializing messages. Should be one of 'json', +# 'pickle', or an import name for a custom callable serializer. +#c.Session.packer = 'json' + +## The UUID identifying this session. +#c.Session.session = '' + +## The digest scheme used to construct the message signatures. Must have the form +# 'hmac-HASH'. +#c.Session.signature_scheme = 'hmac-sha256' + +## The name of the unpacker for unserializing messages. Only used with custom +# functions for `packer`. +#c.Session.unpacker = 'json' + +## Username for the Session. Default is your system username. +#c.Session.username = 'username' + +#------------------------------------------------------------------------------ +# MultiKernelManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ + +## A class for managing multiple kernels. + +## The name of the default kernel to start +#c.MultiKernelManager.default_kernel_name = 'python3' + +## The kernel manager class. This is configurable to allow subclassing of the +# KernelManager for customized behavior. +#c.MultiKernelManager.kernel_manager_class = 'jupyter_client.ioloop.IOLoopKernelManager' + +## Share a single zmq.Context to talk to all my kernels +#c.MultiKernelManager.shared_context = True + +#------------------------------------------------------------------------------ +# MappingKernelManager(MultiKernelManager) configuration +#------------------------------------------------------------------------------ + +## A KernelManager that handles notebook mapping and HTTP error handling + +## White list of allowed kernel message types. When the list is empty, all +# message types are allowed. +#c.MappingKernelManager.allowed_message_types = [] + +## Whether messages from kernels whose frontends have disconnected should be +# buffered in-memory. When True (default), messages are buffered and replayed on +# reconnect, avoiding lost messages due to interrupted connectivity. Disable if +# long-running kernels will produce too much output while no frontends are +# connected. +#c.MappingKernelManager.buffer_offline_messages = True + +## Whether to consider culling kernels which are busy. Only effective if +# cull_idle_timeout > 0. +#c.MappingKernelManager.cull_busy = False + +## Whether to consider culling kernels which have one or more connections. Only +# effective if cull_idle_timeout > 0. +#c.MappingKernelManager.cull_connected = False + +## Timeout (in seconds) after which a kernel is considered idle and ready to be +# culled. Values of 0 or lower disable culling. Very short timeouts may result +# in kernels being culled for users with poor network connections. +#c.MappingKernelManager.cull_idle_timeout = 0 + +## The interval (in seconds) on which to check for idle kernels exceeding the +# cull timeout value. +#c.MappingKernelManager.cull_interval = 300 + +## Timeout for giving up on a kernel (in seconds). On starting and restarting +# kernels, we check whether the kernel is running and responsive by sending +# kernel_info_requests. This sets the timeout in seconds for how long the kernel +# can take before being presumed dead. This affects the MappingKernelManager +# (which handles kernel restarts) and the ZMQChannelsHandler (which handles the +# startup). +#c.MappingKernelManager.kernel_info_timeout = 60 + +## +#c.MappingKernelManager.root_dir = '' + +#------------------------------------------------------------------------------ +# KernelSpecManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ + +## If there is no Python kernelspec registered and the IPython kernel is +# available, ensure it is added to the spec list. +#c.KernelSpecManager.ensure_native_kernel = True + +## The kernel spec class. This is configurable to allow subclassing of the +# KernelSpecManager for customized behavior. +#c.KernelSpecManager.kernel_spec_class = 'jupyter_client.kernelspec.KernelSpec' + +## Whitelist of allowed kernel names. +# +# By default, all installed kernels are allowed. +#c.KernelSpecManager.whitelist = set() + +#------------------------------------------------------------------------------ +# ContentsManager(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ + +## Base class for serving files and directories. +# +# This serves any text or binary file, as well as directories, with special +# handling for JSON notebook documents. +# +# Most APIs take a path argument, which is always an API-style unicode path, and +# always refers to a directory. +# +# - unicode, not url-escaped +# - '/'-separated +# - leading and trailing '/' will be stripped +# - if unspecified, path defaults to '', +# indicating the root path. + +## Allow access to hidden files +#c.ContentsManager.allow_hidden = False + +## +#c.ContentsManager.checkpoints = None + +## +#c.ContentsManager.checkpoints_class = 'notebook.services.contents.checkpoints.Checkpoints' + +## +#c.ContentsManager.checkpoints_kwargs = {} + +## handler class to use when serving raw file requests. +# +# Default is a fallback that talks to the ContentsManager API, which may be +# inefficient, especially for large files. +# +# Local files-based ContentsManagers can use a StaticFileHandler subclass, which +# will be much more efficient. +# +# Access to these files should be Authenticated. +#c.ContentsManager.files_handler_class = 'notebook.files.handlers.FilesHandler' + +## Extra parameters to pass to files_handler_class. +# +# For example, StaticFileHandlers generally expect a `path` argument specifying +# the root directory from which to serve files. +#c.ContentsManager.files_handler_params = {} + +## Glob patterns to hide in file and directory listings. +#c.ContentsManager.hide_globs = ['__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~'] + +## Python callable or importstring thereof +# +# To be called on a contents model prior to save. +# +# This can be used to process the structure, such as removing notebook outputs +# or other side effects that should not be saved. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(path=path, model=model, contents_manager=self) +# +# - model: the model to be saved. Includes file contents. +# Modifying this dict will affect the file that is stored. +# - path: the API path of the save destination +# - contents_manager: this ContentsManager instance +#c.ContentsManager.pre_save_hook = None + +## +#c.ContentsManager.root_dir = '/' + +## The base name used when creating untitled directories. +#c.ContentsManager.untitled_directory = 'Untitled Folder' + +## The base name used when creating untitled files. +#c.ContentsManager.untitled_file = 'untitled' + +## The base name used when creating untitled notebooks. +#c.ContentsManager.untitled_notebook = 'Untitled' + +#------------------------------------------------------------------------------ +# FileManagerMixin(Configurable) configuration +#------------------------------------------------------------------------------ + +## Mixin for ContentsAPI classes that interact with the filesystem. +# +# Provides facilities for reading, writing, and copying both notebooks and +# generic files. +# +# Shared by FileContentsManager and FileCheckpoints. +# +# Note ---- Classes using this mixin must provide the following attributes: +# +# root_dir : unicode +# A directory against against which API-style paths are to be resolved. +# +# log : logging.Logger + +## By default notebooks are saved on disk on a temporary file and then if +# successfully written, it replaces the old ones. This procedure, namely +# 'atomic_writing', causes some bugs on file system without operation order +# enforcement (like some networked fs). If set to False, the new notebook is +# written directly on the old one which could fail (eg: full filesystem or quota +# ) +#c.FileManagerMixin.use_atomic_writing = True + +#------------------------------------------------------------------------------ +# FileContentsManager(FileManagerMixin,ContentsManager) configuration +#------------------------------------------------------------------------------ + +## If True (default), deleting files will send them to the platform's +# trash/recycle bin, where they can be recovered. If False, deleting files +# really deletes them. +#c.FileContentsManager.delete_to_trash = True + +## Python callable or importstring thereof +# +# to be called on the path of a file just saved. +# +# This can be used to process the file on disk, such as converting the notebook +# to a script or HTML via nbconvert. +# +# It will be called as (all arguments passed by keyword):: +# +# hook(os_path=os_path, model=model, contents_manager=instance) +# +# - path: the filesystem path to the file just written - model: the model +# representing the file - contents_manager: this ContentsManager instance +#c.FileContentsManager.post_save_hook = None + +## +#c.FileContentsManager.root_dir = '' + +## DEPRECATED, use post_save_hook. Will be removed in Notebook 5.0 +#c.FileContentsManager.save_script = False + +#------------------------------------------------------------------------------ +# NotebookNotary(LoggingConfigurable) configuration +#------------------------------------------------------------------------------ + +## A class for computing and verifying notebook signatures. + +## The hashing algorithm used to sign notebooks. +#c.NotebookNotary.algorithm = 'sha256' + +## The sqlite file in which to store notebook signatures. By default, this will +# be in your Jupyter data directory. You can set it to ':memory:' to disable +# sqlite writing to the filesystem. +#c.NotebookNotary.db_file = '' + +## The secret key with which notebooks are signed. +#c.NotebookNotary.secret = b'' + +## The file where the secret key is stored. +#c.NotebookNotary.secret_file = '' + +## A callable returning the storage backend for notebook signatures. The default +# uses an SQLite database. +#c.NotebookNotary.store_factory = traitlets.Undefined + +#------------------------------------------------------------------------------ +# GatewayKernelManager(MappingKernelManager) configuration +#------------------------------------------------------------------------------ + +## Kernel manager that supports remote kernels hosted by Jupyter Kernel or +# Enterprise Gateway. + +#------------------------------------------------------------------------------ +# GatewayKernelSpecManager(KernelSpecManager) configuration +#------------------------------------------------------------------------------ + +#------------------------------------------------------------------------------ +# GatewayClient(SingletonConfigurable) configuration +#------------------------------------------------------------------------------ + +## This class manages the configuration. It's its own singleton class so that we +# can share these values across all objects. It also contains some helper methods +# to build request arguments out of the various config options. + +## The authorization token used in the HTTP headers. (JUPYTER_GATEWAY_AUTH_TOKEN +# env var) +#c.GatewayClient.auth_token = None + +## The filename of CA certificates or None to use defaults. +# (JUPYTER_GATEWAY_CA_CERTS env var) +#c.GatewayClient.ca_certs = None + +## The filename for client SSL certificate, if any. (JUPYTER_GATEWAY_CLIENT_CERT +# env var) +#c.GatewayClient.client_cert = None + +## The filename for client SSL key, if any. (JUPYTER_GATEWAY_CLIENT_KEY env var) +#c.GatewayClient.client_key = None + +## The time allowed for HTTP connection establishment with the Gateway server. +# (JUPYTER_GATEWAY_CONNECT_TIMEOUT env var) +#c.GatewayClient.connect_timeout = 40.0 + +## A comma-separated list of environment variable names that will be included, +# along with their values, in the kernel startup request. The corresponding +# `env_whitelist` configuration value must also be set on the Gateway server - +# since that configuration value indicates which environmental values to make +# available to the kernel. (JUPYTER_GATEWAY_ENV_WHITELIST env var) +#c.GatewayClient.env_whitelist = '' + +## Additional HTTP headers to pass on the request. This value will be converted +# to a dict. (JUPYTER_GATEWAY_HEADERS env var) +#c.GatewayClient.headers = '{}' + +## The password for HTTP authentication. (JUPYTER_GATEWAY_HTTP_PWD env var) +#c.GatewayClient.http_pwd = None + +## The username for HTTP authentication. (JUPYTER_GATEWAY_HTTP_USER env var) +#c.GatewayClient.http_user = None + +## The gateway API endpoint for accessing kernel resources +# (JUPYTER_GATEWAY_KERNELS_ENDPOINT env var) +#c.GatewayClient.kernels_endpoint = '/api/kernels' + +## The gateway API endpoint for accessing kernelspecs +# (JUPYTER_GATEWAY_KERNELSPECS_ENDPOINT env var) +#c.GatewayClient.kernelspecs_endpoint = '/api/kernelspecs' + +## The gateway endpoint for accessing kernelspecs resources +# (JUPYTER_GATEWAY_KERNELSPECS_RESOURCE_ENDPOINT env var) +#c.GatewayClient.kernelspecs_resource_endpoint = '/kernelspecs' + +## The time allowed for HTTP request completion. (JUPYTER_GATEWAY_REQUEST_TIMEOUT +# env var) +#c.GatewayClient.request_timeout = 40.0 + +## The url of the Kernel or Enterprise Gateway server where kernel specifications +# are defined and kernel management takes place. If defined, this Notebook +# server acts as a proxy for all kernel management and kernel specification +# retrieval. (JUPYTER_GATEWAY_URL env var) +#c.GatewayClient.url = None + +## For HTTPS requests, determines if server's certificate should be validated or +# not. (JUPYTER_GATEWAY_VALIDATE_CERT env var) +#c.GatewayClient.validate_cert = True + +## The websocket url of the Kernel or Enterprise Gateway server. If not +# provided, this value will correspond to the value of the Gateway url with 'ws' +# in place of 'http'. (JUPYTER_GATEWAY_WS_URL env var) +#c.GatewayClient.ws_url = None + +#------------------------------------------------------------------------------ +# TerminalManager(LoggingConfigurable,NamedTermManager) configuration +#------------------------------------------------------------------------------ + +## + +## Timeout (in seconds) in which a terminal has been inactive and ready to be +# culled. Values of 0 or lower disable culling. +#c.TerminalManager.cull_inactive_timeout = 0 + +## The interval (in seconds) on which to check for terminals exceeding the +# inactive timeout value. +#c.TerminalManager.cull_interval = 300 diff --git a/docker/settings.json b/docker/settings.json new file mode 100644 index 0000000..9a27238 --- /dev/null +++ b/docker/settings.json @@ -0,0 +1,7 @@ +{ + "extensions.autoCheckUpdates": false, + "extensions.autoUpdate": false, + "python.dataScience.alwaysTrustNotebooks": true, + "terminal.integrated.shell.linux": "/bin/bash", + "workbench.colorTheme": "Default Dark+" +} diff --git a/docker/supervisord.conf b/docker/supervisord.conf index 4e6af18..809ab30 100644 --- a/docker/supervisord.conf +++ b/docker/supervisord.conf @@ -10,19 +10,3 @@ stdout_logfile_maxbytes=0 stderr_logfile=/dev/stderr stderr_logfile_maxbytes=0 command=/bin/bash -c "jupyter lab --ip=0.0.0.0 --no-browser --allow-root --config=/root/.jupyter/jupyter_notebook_config.py" - - -[program:code-server] -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 -command=/bin/bash -c "code-server --bind-addr 0.0.0.0:8887" - - -[program:pulse] -stdout_logfile=/dev/stdout -stdout_logfile_maxbytes=0 -stderr_logfile=/dev/stderr -stderr_logfile_maxbytes=0 -command=/bin/bash -c "pulseaudio --system" \ No newline at end of file