From 867253fb8d25c4558b86f37635b12e4f78fbe5f1 Mon Sep 17 00:00:00 2001 From: cs_zhlou <1065168006@qq.com> Date: Thu, 11 Jul 2024 15:51:44 +0800 Subject: [PATCH] Supplement and organize the dependency requirements.txt files and README.md files for each kernel; Unified the startup method of all kernels. (#1589) --- Dockerfile | 32 ++++++++-------- notebook/node_vm2_pycaller.ipynb | 8 ++-- package.json | 1 + pycjs/README.md | 59 ++++++++++++++++++++++++++++++ pycjs/requirements.txt | 6 ++- python/README.md | 8 ++-- python/db/clickhouse.py | 2 +- python/requirements.txt | 9 +++++ python_v2/README.md | 10 ++--- python_v2/config.py | 14 +++---- python_v2/db/clickhouse_wrapper.py | 2 +- python_v2/requirements.txt | 8 ++-- requirements_python.txt | 9 ----- sample_data/README.md | 41 +++++++++++++++++---- 14 files changed, 149 insertions(+), 60 deletions(-) create mode 100644 pycjs/README.md create mode 100644 python/requirements.txt delete mode 100644 requirements_python.txt diff --git a/Dockerfile b/Dockerfile index e5404aaf6..69ef6257d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,21 +1,21 @@ -FROM continuumio/miniconda3 - -LABEL maintainer="Yike Cheng" - -RUN mkdir python_kernel \ -&& mkdir python_kernel/notebook \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ easydict==1.9 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ py2neo==2021.2.3 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ plotly==5.9.0 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ clickhouse-driver==0.2.3 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ numpy==1.23.2 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ jupyterlab==3.4.5 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ matplotlib==3.5.3 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ pandas==1.4.3 \ -&& pip install -i https://pypi.tuna.tsinghua.edu.cn/simple/ pyyaml==6.0 +ARG BASE_IMAGE + +FROM ${BASE_IMAGE} +# FROM registry.cn-beijing.aliyuncs.com/open-digger/open-digger-js-notebook:1.0 +# FROM continuumio/miniconda3 # LABEL maintainer="Yike Cheng" + +USER root + +RUN mkdir -p /python_kernel/notebook WORKDIR /python_kernel/notebook +ARG KER_REL_PATH # Kernel Relative Path e.g. './pycjs' + +COPY ${KER_REL_PATH}/requirements.txt ${KER_REL_PATH}/requirements.txt + +RUN pip install -r ${KER_REL_PATH}/requirements.txt -i https://pypi.tuna.tsinghua.edu.cn/simple/ + EXPOSE 8888 -CMD jupyter lab --notebook-dir=/python_kernel/notebook --ip='*' --port=8888 --allow-root --no-browser +CMD jupyter lab --notebook-dir=${WORKDIR} --ip='*' --port=8888 --allow-root --no-browser diff --git a/notebook/node_vm2_pycaller.ipynb b/notebook/node_vm2_pycaller.ipynb index dc15cb2cd..416694341 100644 --- a/notebook/node_vm2_pycaller.ipynb +++ b/notebook/node_vm2_pycaller.ipynb @@ -636,7 +636,7 @@ "\n", "def processTechFieldRepoOpenrank(options, title):\n", " data = openDigger.index.openrank.getRepoOpenrank(\n", - " dict(**baseOptions, \n", + " dict(baseOptions, \n", " **dict(options)\n", " )\n", " )\n", @@ -880,7 +880,7 @@ " }\n", "def processTechFieldRepoActivity(options, title):\n", " data = openDigger.index.activity.getRepoActivity(\n", - " dict(**baseOptions, \n", + " dict(baseOptions, \n", " **dict(options)\n", " )\n", " )\n", @@ -1121,10 +1121,10 @@ " \"type\": 'scatter'\n", " }\n", "def processRegionsCompanyRepoActivity(options, title):\n", - " data = openDigger.index.activity.getRepoActivity({\n", + " data = openDigger.index.activity.getRepoActivity(dict({\n", " \"startYear\": startYear, \"endYear\": endYear, \"startMonth\": startMonth, \"endMonth\": endMonth,\n", " \"groupBy\": 'Company', \"groupTimeRange\": 'year', \"order\": 'DESC',\n", - " })\n", + " }, **options))\n", " \n", " data_records = pd.DataFrame(data).to_dict('records')\n", " data_periodranks = openDigger.getRank(data_records, lambda x: x[\"name\"], lambda x: x[\"activity\"])\n", diff --git a/package.json b/package.json index 5848dc9f6..5fdbb1bfc 100644 --- a/package.json +++ b/package.json @@ -11,6 +11,7 @@ "build": "tsc", "notebook": "npm run build && docker pull registry.cn-beijing.aliyuncs.com/open-digger/open-digger-js-notebook:1.0 && docker run -it --rm -p 8888:8888 -v $(pwd):/home/node/notebook registry.cn-beijing.aliyuncs.com/open-digger/open-digger-js-notebook:1.0", "notebook:win": "npm run build && docker pull registry.cn-beijing.aliyuncs.com/open-digger/open-digger-js-notebook:1.0 && docker run -it --rm -p 8888:8888 -v %cd%:/home/node/notebook registry.cn-beijing.aliyuncs.com/open-digger/open-digger-js-notebook:1.0", + "notebook-pycjs": "npm run build && docker build --build-arg KER_REL_PATH=./pycjs --build-arg BASE_IMAGE=registry.cn-beijing.aliyuncs.com/open-digger/open-digger-js-notebook:1.0 -t opendigger-jupyter-python:1.0 . && docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v .:/python_kernel/notebook opendigger-jupyter-python:1.0", "pull-label-file-test": "tsc && node lib/ci/pull_label_file_test.js", "cron": "npm run build && node --max-old-space-size=25600 lib/cron/index.js", "test": "CLICKHOUSE_HOST=http://ci.open-digger.cn:8123 CLICKHOUSE_USERNAME=default CLICKHOUSE_PASSWORD= mocha" diff --git a/pycjs/README.md b/pycjs/README.md new file mode 100644 index 000000000..a608aa5e0 --- /dev/null +++ b/pycjs/README.md @@ -0,0 +1,59 @@ +# Getting Start + +## If you want to do some data analysis work: +Start your ClickHouse container, which should be set up in [Clickhouse-sample-data](../sample_data/README.md) + +1. Clone OpenDigger `git clone https://github.com/X-lab2017/open-digger.git` + +2. Enter the repo path `cd open-digger` + + Install the necessary packages `npm install`. + +3. Go to the `src` folder(pycjs does not implement any bottom layer details) in the open-digger root directory, create a file named 'local_config.py'(this file has already added into `.gitignore` file.) for Python Kernel with the following contents: + + ```python + local_config = { + 'db': { + 'clickhouse': { + 'host':'172.17.0.1', + 'user':'default' + }, + 'neo4j':{ + 'port': '7687', + } + } + } + ``` + the `host` above is the host of the ClickHouse server. We can find it using `docker inspect container_name`(the container_name is set by command docker run --name xxx), and copy the `Gateway` like this: + + ```shell + $ docker inspect container_name | grep Gateway + "Gateway": "172.17.0.1", + "IPv6Gateway": "", + "Gateway": "172.17.0.1", + "IPv6Gateway": "", + ``` + If you use your own data, you can also change `host` field to your own host IP + + Return the repo path `cd open-digger`. + + Build ts `npm run build`. Since the npm run build command is important to active every settings change, the kernel pycjs supports `npm run notebook-pycjs` to execute the *npm run build, docker build and docker run* command automatically, instead of manually executing them step by step as below. + +4. Use `docker build --build-arg KER_REL_PATH='./pycjs' --build-arg BASE_IMAGE='registry.cn-beijing.aliyuncs.com/open-digger/open-digger-js-notebook:1.0' -t opendigger-jupyter-python:1.0 $(pwd)` to make a docker image, this image is based on `miniconda`. You can check the `Dockerfile` in root directory. + + > If you are using **Windows CMD**, all the `$(pwd)` here should be replaced by `%cd%`. And if you are using **Windows Powershell**, all the `$(pwd)` here should be replaced by `${pwd}`. + > + > **Notice:** Pathnames of directories like "pwd" may use `\` to join the directory in some versions of Windows. We recommend using absolute paths. + +5. Then we can use `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to create and run the container. + +6. Open the link in console log like `http://127.0.0.1:8888/lab?token=xxxxx`. + +7. If the source code under `python` folder changed, you need to stop the notebook docker using `docker stop python_notebook_name` and restart the notebook kernel using `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to reload the sorce code. + +8. You can find the notebook folder, where we provide demos in the handbook. You can create a new file, and happy data exploring! + Attention: you need to do this work in `notebook` or other parallel folder. If you run in root directory, it can't work because of python import rules. + +## If you are a developer: + +You can also make `workspace.py` in `python` folder. and run it. diff --git a/pycjs/requirements.txt b/pycjs/requirements.txt index 24b2004b8..7169c1c08 100644 --- a/pycjs/requirements.txt +++ b/pycjs/requirements.txt @@ -1,8 +1,10 @@ +clickhouse-driver>=0.2.8 ipynbname==2023.2.0.0 ipython==8.0.1 ipython-genutils==0.2.0 jupyterlab>=3.2.8 matplotlib>=3.5.3 node-vm2==0.4.7 -numpy>=1.21.5 -pandas>=1.4.4 \ No newline at end of file +numpy>=1.23.2 +pandas>=1.4.3 +tabulate==0.9.0 \ No newline at end of file diff --git a/python/README.md b/python/README.md index e2621e6be..1e3b947e4 100644 --- a/python/README.md +++ b/python/README.md @@ -22,7 +22,7 @@ Start your ClickHouse container, which should be set up in [Clickhouse-sample-da } } ``` - the `host` above is the host of the ClickHouse server. We can find it using `docker inspect containert_name`, and copy the `Gateway` like this: + the `host` above is the host of the ClickHouse server. We can find it using `docker inspect container_name`(the container_name is set by command docker run --name xxx), and copy the `Gateway` like this: ```shell $ docker inspect container_name | grep Gateway @@ -32,17 +32,17 @@ Start your ClickHouse container, which should be set up in [Clickhouse-sample-da "IPv6Gateway": "", ``` If you use your own data, you can also change `host` field to your own host IP -4. Use `docker build -t opendigger-jupyter-python:1.0 $(pwd)` to make a docker image, this image is based on `miniconda`. You can check the `Dockerfile` in root directory. +4. Use `docker build --build-arg KER_REL_PATH='./python' --build-arg BASE_IMAGE='continuumio/miniconda3' -t opendigger-jupyter-python:1.0 $(pwd)` to make a docker image, this image is based on `miniconda`. You can check the `Dockerfile` in root directory. > If you are using **Windows CMD**, all the `$(pwd)` here should be replaced by `%cd%`. And if you are using **Windows Powershell**, all the `$(pwd)` here should be replaced by `${pwd}`. > > **Notice:** Pathnames of directories like "pwd" may use `\` to join the directory in some versions of Windows. We recommend using absolute paths. -5. Then we can use `docker run -it --name python_notebook_name --rm -p 8888:8888 -v $(pwd):/python_kernel/notebook opendigger-jupyter-python:1.0` to create and run the container. +5. Then we can use `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to create and run the container. 6. Open the link in console log like `http://127.0.0.1:8888/lab?token=xxxxx`. -7. If the source code under `python` folder changed, you need to stop the notebook docker using `docker stop python_notebook_name` and restart the notebook kernel using `docker run -it --name python_notebook_name --rm -p 8888:8888 -v $(pwd):/python_kernel/notebook opendigger-jupyter-python:1.0` to reload the sorce code. +7. If the source code under `python` folder changed, you need to stop the notebook docker using `docker stop python_notebook_name` and restart the notebook kernel using `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to reload the sorce code. 8. You can find the notebook folder, where we provide demos in the handbook. You can create a new file, and happy data exploring! Attention: you need to do this work in `notebook` or other parallel folder. If you run in root directory, it can't work because of python import rules. diff --git a/python/db/clickhouse.py b/python/db/clickhouse.py index 036607876..9dcd3c543 100644 --- a/python/db/clickhouse.py +++ b/python/db/clickhouse.py @@ -15,4 +15,4 @@ def query(q): return client.execute(q) def queryDataframe(q): client = getClient() - return client.query_dataframe(q) + return client.query_dataframe(q, replace_nonwords=False) diff --git a/python/requirements.txt b/python/requirements.txt new file mode 100644 index 000000000..1ff59d325 --- /dev/null +++ b/python/requirements.txt @@ -0,0 +1,9 @@ +easydict==1.9 +py2neo>=2021.2.3 +plotly==5.9.0 +clickhouse-driver>=0.2.8 +numpy>=1.23.2 +jupyterlab==3.4.5 +matplotlib>=3.5.3 +pandas>=1.4.3 +pyyaml>=6.0 \ No newline at end of file diff --git a/python_v2/README.md b/python_v2/README.md index e2621e6be..1a2fc072c 100644 --- a/python_v2/README.md +++ b/python_v2/README.md @@ -7,7 +7,7 @@ Start your ClickHouse container, which should be set up in [Clickhouse-sample-da 2. Enter the repo path `cd open-digger` -3. Go to the `python` folder in the open-digger root directory, create a file named 'local_config.py'(this file has already added into `.gitignore` file.) for Python Kernel with the following contents: +3. Go to the `python_v2` folder in the open-digger root directory, create a file named 'local_config.py'(this file has already added into `.gitignore` file.) for Python Kernel with the following contents: ```python local_config = { @@ -22,7 +22,7 @@ Start your ClickHouse container, which should be set up in [Clickhouse-sample-da } } ``` - the `host` above is the host of the ClickHouse server. We can find it using `docker inspect containert_name`, and copy the `Gateway` like this: + the `host` above is the host of the ClickHouse server. We can find it using `docker inspect container_name`(the container_name is set by command docker run --name xxx), and copy the `Gateway` like this: ```shell $ docker inspect container_name | grep Gateway @@ -32,17 +32,17 @@ Start your ClickHouse container, which should be set up in [Clickhouse-sample-da "IPv6Gateway": "", ``` If you use your own data, you can also change `host` field to your own host IP -4. Use `docker build -t opendigger-jupyter-python:1.0 $(pwd)` to make a docker image, this image is based on `miniconda`. You can check the `Dockerfile` in root directory. +4. Use `docker build --build-arg KER_REL_PATH='./python_v2' --build-arg BASE_IMAGE='continuumio/miniconda3' -t opendigger-jupyter-python:1.0 $(pwd)` to make a docker image, this image is based on `miniconda`. You can check the `Dockerfile` in root directory. > If you are using **Windows CMD**, all the `$(pwd)` here should be replaced by `%cd%`. And if you are using **Windows Powershell**, all the `$(pwd)` here should be replaced by `${pwd}`. > > **Notice:** Pathnames of directories like "pwd" may use `\` to join the directory in some versions of Windows. We recommend using absolute paths. -5. Then we can use `docker run -it --name python_notebook_name --rm -p 8888:8888 -v $(pwd):/python_kernel/notebook opendigger-jupyter-python:1.0` to create and run the container. +5. Then we can use `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to create and run the container. 6. Open the link in console log like `http://127.0.0.1:8888/lab?token=xxxxx`. -7. If the source code under `python` folder changed, you need to stop the notebook docker using `docker stop python_notebook_name` and restart the notebook kernel using `docker run -it --name python_notebook_name --rm -p 8888:8888 -v $(pwd):/python_kernel/notebook opendigger-jupyter-python:1.0` to reload the sorce code. +7. If the source code under `python` folder changed, you need to stop the notebook docker using `docker stop python_notebook_name` and restart the notebook kernel using `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to reload the sorce code. 8. You can find the notebook folder, where we provide demos in the handbook. You can create a new file, and happy data exploring! Attention: you need to do this work in `notebook` or other parallel folder. If you run in root directory, it can't work because of python import rules. diff --git a/python_v2/config.py b/python_v2/config.py index 6b343b722..02cdfe1ed 100644 --- a/python_v2/config.py +++ b/python_v2/config.py @@ -34,17 +34,17 @@ } } def mergeConfig(base_config, local_config): - for key in base_config.keys(): - if isinstance(base_config[key], dict) and isinstance(local_config[key], dict): - mergeConfig(base_config[key], local_config[key]) - else: - base_config[key] = local_config[key] + for key, val in local_config.items(): + if isinstance(val, dict): + mergeConfig(base_config[key], val) + else: + base_config[key] = val return base_config -def getConfig(local_config=None): - local_config = local_config or {} +def getConfig(): global config if not inited: try: + from local_config import local_config config = mergeConfig(config, local_config) return config except: diff --git a/python_v2/db/clickhouse_wrapper.py b/python_v2/db/clickhouse_wrapper.py index e7e15afcf..5c16878a3 100644 --- a/python_v2/db/clickhouse_wrapper.py +++ b/python_v2/db/clickhouse_wrapper.py @@ -21,4 +21,4 @@ def query(self, q): return self.client.execute(q) def queryDataframe(self,q): - return self.client.query_dataframe(q) + return self.client.query_dataframe(q, replace_nonwords=False) diff --git a/python_v2/requirements.txt b/python_v2/requirements.txt index 4cbf4dd95..b4171a668 100644 --- a/python_v2/requirements.txt +++ b/python_v2/requirements.txt @@ -1,11 +1,11 @@ DateTime==5.4 -clickhouse-driver==0.2.6 +clickhouse-driver>=0.2.8 easydict==1.11 ipynbname==2023.2.0.0 -jupyterlab +jupyterlab>=3.2.8 matplotlib>=3.5.3 -numpy>=1.21.5 -pandas==1.4.4 +numpy>=1.23.2 +pandas>=1.4.3 plotly==5.9.0 py2neo==2021.2.4 typing==3.7.4.3 diff --git a/requirements_python.txt b/requirements_python.txt deleted file mode 100644 index 6bd089b3e..000000000 --- a/requirements_python.txt +++ /dev/null @@ -1,9 +0,0 @@ -easydict==1.9 -py2neo==2021.2.3 -plotly==5.9.0 -clickhouse-driver==0.2.3 -numpy==1.23.2 -jupyterlab==3.4.5 -matplotlib==3.5.3 -pandas==1.4.3 -pyyaml==6.0 \ No newline at end of file diff --git a/sample_data/README.md b/sample_data/README.md index 0b4fe23dd..a3e987a29 100644 --- a/sample_data/README.md +++ b/sample_data/README.md @@ -21,7 +21,8 @@ To use sample data from OSS service, you need to follow the steps: - Linux/MacOS: `docker run -d --name container_name -m 8G -p 8123:8123 -p 9000:9000 --ulimit nofile=262144:262144 --volume=$(pwd)/folder_name/:/data/ registry.cn-beijing.aliyuncs.com/open-digger/open-digger-clickhouse-base:v2`; - Windows: `docker run -d --name container_name -m 8G -p 8123:8123 -p 9000:9000 --ulimit nofile=262144:262144 --volume=%cd%/folder_name/:/data/ registry.cn-beijing.aliyuncs.com/open-digger/open-digger-clickhouse-base:v2`. - + + In the above command lines, `$(pwd)` or `%cd%` makes sure the `host-src` be an absolute path. > **Notice**: As referred in [Docker's Doc](https://docs.docker.com/engine/reference/run/#volume-shared-filesystems), the `host-src` in `--volume=[host-src:]container-dest[:]` must be an absolute path or a name value. > @@ -47,6 +48,24 @@ To use the sample data, at minimum 8 GB memory should be allocated to the contai ### Use Notebook image +#### Kernels + +| kernel_name | kernel_src_dir | kernel_requirements | kernel_readme | kernel_build_run | kernel_base_img | +| ----------- | -------------- | -------------------------------- | ------------------------- | ---------------- | ------------------------------------------------------------ | +| node.js | "./src" | "./package.json"->"dependencies" | "./sample_data/README.md" | "./package.json" | "./package.json"->"scripts"."notebook"->"docker pull {kernel_base_img}" | +| python | "./python" | "./python/requirements.txt" | "./python/README.md" | docker command | "continuumio/miniconda3" | +| python_v2 | "./python_v2" | "./python_v2/requirements.txt" | "./python_v2/README.md" | docker command | "continuumio/miniconda3" | +| pycjs | "./pycjs" | "./pycjs/requirements.txt" | "./pycjs/README.md" | "./package.json" | "./package.json"->"scripts"."notebook"->"docker pull {kernel_base_img}" | + +The kernel node.js only depends on the JavaScript envronment, remains up-to-date. + +The kernel python and kernel python_v2 only depend on the Python envronment, *stopped updating*. The python_v2 is more updated. + +The kernel pycjs depend on the JavaScript envronment and a node_vm2 Python package, always automatically updates synchronously with the kernel node.js, which is a Python interface where the values of variables are retrieved from node.js sandbox created by the VM. + +Recommended kernels: kernel node.js for TypeScript/JavaScript language, kernel pycjs for python language. Warning: Make sure the implements of metrics are consistent with its definition when using the kernel python or the kernel python_v2. + + #### Node.js Version Start your ClickHouse container, which should be set up in the last step. Now: @@ -57,7 +76,7 @@ Start your ClickHouse container, which should be set up in the last step. Now: 3. Install the necessary packages `npm install` -4. Go to the src folder in the open-digger root directory, create a file named 'local_config.ts' with the following contents: +4. Go to the `src` folder in the open-digger root directory, create a file named 'local_config.ts' with the following contents: ```typescript export default { @@ -79,13 +98,17 @@ Start your ClickHouse container, which should be set up in the last step. Now: #### Python Version +The format `$${}` represents the values of a chosen Python kernel in the Kernels table. + Start your ClickHouse container, which should be set up in the last step. Now: 1. Clone OpenDigger `git clone https://github.com/X-lab2017/open-digger.git` 2. Enter the repo path `cd open-digger` -3. Go to the `src` folder in the open-digger root directory, create a file named 'local_config.py' for Python Kernel with the following contents: + *If use the kernel pycjs: Install the necessary packages `npm install`. + +3. Go to the `$${kernel_src_dir}` folder in the open-digger root directory, create a file named 'local_config.py' for Python Kernel with the following contents: ```python local_config = { @@ -100,7 +123,7 @@ Start your ClickHouse container, which should be set up in the last step. Now: } } ``` - the `host` above is the host of the ClickHouse server. We can find it using `docker inspect containert_name`, and copy the `Gateway` like this: + the `host` above is the host of the ClickHouse server. We can find it using `docker inspect container_name`(the container_name is set by command docker run --name xxx), and copy the `Gateway` like this: ```shell $ docker inspect container_name | grep Gateway @@ -110,17 +133,21 @@ Start your ClickHouse container, which should be set up in the last step. Now: "IPv6Gateway": "", ``` -4. Use `docker build -t opendigger-jupyter-python:1.0 $(pwd)` to make a docker image, this image is based on `miniconda`. You can check the `Dockerfile` in root directory. + Return the repo path `cd open-digger`. + + *If use the kernel pycjs: Build ts `npm run build`. Since the npm run build command is important to active every settings change, the kernel pycjs supports `npm run notebook-pycjs` to execute the *npm run build, docker build and docker run* command automatically, instead of manually executing them step by step as below. + +4. Use `docker build --build-arg KER_REL_PATH='$${kernel_src_dir}' --build-arg BASE_IMAGE='$${kernel_base_img}' -t opendigger-jupyter-python:1.0 $(pwd)` to make a docker image. The base python image is based on `miniconda`. You can check the `Dockerfile` in root directory. > If you are using **Windows CMD**, all the `$(pwd)` here should be replaced by `%cd%`. And if you are using **Windows Powershell**, all the `$(pwd)` here should be replaced by `${pwd}`. > > **Notice:** Pathnames of directories like "pwd" may use `\` to join the directory in some versions of Windows. We recommend using absolute paths. -5. Then we can use `docker run -it --name python_notebook_name --rm -p 8888:8888 -v $(pwd):/python_kernel/notebook opendigger-jupyter-python:1.0` to create and run the container. +5. Then we can use `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to create and run the container. 6. Open the link in console log like `http://127.0.0.1:8888/lab?token=xxxxx`. -7. If the source code under `src` folder changed, you need to stop the notebook docker using `docker stop python_notebook_name` and restart the notebook kernel using `docker run -it --name python_notebook_name --rm -p 8888:8888 -v $(pwd):/python_kernel/notebook opendigger-jupyter-python:1.0` to reload the sorce code. +7. If the source code under `src` folder changed, you need to stop the notebook docker using `docker stop python_notebook_name` and restart the notebook kernel using `docker run -i -t --name python_notebook_name --rm -p 8888:8888 -v "$(pwd):/python_kernel/notebook" opendigger-jupyter-python:1.0` to reload the sorce code. 8. You can find the notebook folder, where we provide demos in the handbook. You can create a new file, and happy data exploring!