Skip to content

Commit

Permalink
resolved merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
ssh51117 committed Nov 8, 2023
2 parents 5353565 + 13d1957 commit b512a59
Show file tree
Hide file tree
Showing 18 changed files with 533 additions and 681 deletions.
4 changes: 2 additions & 2 deletions .github/pull_request_template.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Github Issue Number Here: <YOUR_GITHUB_ISSUE_NUMBER_HERE (include the hashtag)>
**What solution does this PR provide?**

**Testing Methodology**
> How did you test your changes and verify that existing
> functionality is not broken
How did you test your changes and verify that existing
functionality is not broken

**Any other considerations**
84 changes: 23 additions & 61 deletions .github/workflows/backend.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,37 +12,22 @@ on:

jobs:
lint:
runs-on: ubuntu-latest
runs-on: ubuntu-22.04
steps:
#----------------------------------------------
# check-out repo and set-up mamba env
#----------------------------------------------
- name: Check out repository
uses: actions/checkout@v3

- name: Setup Mamba
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-version: latest
use-mamba: true
activate-environment: dlp

- name: Cache Mamba env
id: cached-mamba-env
uses: actions/cache@v3
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0

- uses: mamba-org/setup-micromamba@v1
with:
path: /usr/share/miniconda3/envs/
key:
mamba-${{ runner.os }}-${{env.CACHE_NUMBER }}-${{hashFiles('**/training/environment.yml') }}

- name: Update environment
run:
mamba env update -n dlp -f
training/environment.yml
if: steps.cached-mamba-env.outputs.cache-hit != 'true'
micromamba-version: '1.4.5-0'
environment-file: training/environment.yml
init-shell: >-
bash
cache-environment: true
cache-environment-key: mamba-env-${{ runner.os }}-${{ hashFiles('**/training/environment.yml') }}

#----------------------------------------------
# load cached venv if cache exists
Expand All @@ -51,7 +36,7 @@ jobs:
id: cached-poetry-dependencies
uses: actions/cache@v3
with:
path: /usr/share/miniconda3/envs/dlp
path: /home/runner/micromamba/envs/dlp/
key: training-venv-${{ runner.os }}-${{ hashFiles('**/training/poetry.lock') }}
restore-keys: training-venv-${{ runner.os }}-

Expand All @@ -62,11 +47,8 @@ jobs:
- name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: pwd && cd training && poetry install --no-interaction --no-root && poetry env info -p
shell: bash -el {0}
shell: micromamba-shell {0}

- name: Add Poetry env to PATH
run: cd training && echo "$(poetry env info --path)/bin" >> $GITHUB_PATH
shell: bash -el {0}
#----------------------------------------------
# pyright static checker
#----------------------------------------------
Expand All @@ -76,6 +58,7 @@ jobs:
with:
github_token: ${{ secrets.GITHUB_TOKEN }} # You need this
reporter: github-pr-review # Change reporter.
lib: true

#----------------------------------------------
# black formatter
Expand All @@ -102,54 +85,33 @@ jobs:
#----------------------------------------------
- name: Check out repository
uses: actions/checkout@v3

- name: Setup Mamba
uses: conda-incubator/setup-miniconda@v2
with:
miniforge-version: latest
use-mamba: true
activate-environment: dlp

- name: Cache Mamba env
id: cached-mamba-env
uses: actions/cache@v3
env:
# Increase this value to reset cache if etc/example-environment.yml has not changed
CACHE_NUMBER: 0
- uses: mamba-org/setup-micromamba@v1
with:
path: /usr/share/miniconda3/envs/
key:
mamba-${{ runner.os }}-${{env.CACHE_NUMBER }}-${{hashFiles('**/training/environment.yml') }}

- name: Update environment
run:
mamba env update -n dlp -f
training/environment.yml
if: steps.cached-mamba-env.outputs.cache-hit != 'true'
micromamba-version: '1.4.5-0'
environment-file: training/environment.yml
init-shell: >-
bash
cache-environment: true
cache-environment-key: mamba-env-${{ runner.os }}-${{ hashFiles('**/training/environment.yml') }}

#----------------------------------------------
# load cached venv if cache exists
#----------------------------------------------

- name: Load cached venv
id: cached-poetry-dependencies
uses: actions/cache@v3
with:
path: /usr/share/miniconda3/envs/dlp
path: /home/runner/micromamba/envs/dlp/
key: training-venv-${{ runner.os }}-${{ hashFiles('**/training/poetry.lock') }}
restore-keys: training-venv-${{ runner.os }}-

#----------------------------------------------
# install dependencies if cache does not exist
#----------------------------------------------
- name: Install dependencies
if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true'
run: cd training && poetry install --no-interaction --no-root
shell: bash -el {0}

- name: Add Poetry env to PATH
run: cd training && echo "$(poetry env info --path)/bin" >> $GITHUB_PATH
shell: bash -el {0}
shell: micromamba-shell {0}
#----------------------------------------------
# run test suite
#----------------------------------------------
Expand All @@ -158,6 +120,6 @@ jobs:
export AWS_ACCESS_KEY_ID=testing
export AWS_SECRET_ACCESS_KEY=testing
export AWS_DEFAULT_REGION=us-west-2
cd training && poetry run pytest tests
shell: bash -el {0}
# cd training && pytest tests
shell: micromamba-shell {0}

22 changes: 0 additions & 22 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,28 +92,6 @@ Run the following commands in the project directory (the root folder created aft

Make sure to run the above two commands in separate terminals.

## AWS Setup
If you will be working on tasks that interface with AWS resources/services, please follow the below steps (please install AWS CLI using this [link](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) first):

1. Request an AWS Account for Deep Learning Playground by messaging Faris, Karthik, or Daniel in the DLP Discord. Please include your Github username along with your personal email account
1. Once an AWS Account has been created, you will receive an email from AWS that will require you to setup a password
1. When you login, you should be seeing that the account you're added under is `Data Science Initiative Inc`
1. Click on the dropdown to expand the `Data Science Initiative Inc` entry and select the `Command Line or programmatic access button`
1. Open your terminal and navigate to the DLP directory
1. Run `aws configure sso``. Follow the prompts to enter the SSO Start URL (this comes from step 2) and the below values
```
sso_region = us-east-1
sso_session = dlp
sso_registration_scopes = sso:account:access
default output format = None
cli profile name = just press enter (use the default one provided)
````
1. Make sure you follow the instructions in the terminal to ensure your credentials are set correctly (eg: allow botocore to access data should be selected as "yes")
1. Run `cat ~/.aws/config` to look for the sso profile configured.
1. Run `export AWS_PROFILE=<sso_profile_name from step 6>`
Please message in the DLP Discord if you have any difficulty/issue with these steps.
# Architecture

See [Architecture.md](./.github/Architecture.md)
Expand Down
8 changes: 4 additions & 4 deletions backend/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

13 changes: 13 additions & 0 deletions backend/tests/test_model.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import pytest
import torch.nn as nn
from torch.autograd import Variable
from dl.dl_model import *


Expand All @@ -12,7 +13,19 @@
],
)
def test_dlmodel(input_list):
print("input_list: " + str(input_list) + " is of type " + str(type(input_list)))
my_model = DLModel(input_list)
print("my_model: " + str(my_model) + " is of type " + str(type(my_model)))
print(
"[module for module in my_model.model.modules() if not isinstance(module, nn.Sequential)]: "
+ str(
[
module
for module in my_model.model.modules()
if not isinstance(module, nn.Sequential)
]
)
)
assert [
module
for module in my_model.model.modules()
Expand Down
6 changes: 6 additions & 0 deletions backend/tests/test_model_parser.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import pytest
import torch.nn as nn
from torch.autograd import Variable
from dl.dl_model_parser import *

# come up with the expected parsing
Expand All @@ -26,4 +27,9 @@
],
)
def test_parse_user_architecture(user_model, expected):
print(
"parse_user_architecture(user_model): "
+ str(parse_deep_user_architecture(user_model))
)
print("expected: " + str(expected))
assert [i == j for i, j in zip(parse_deep_user_architecture(user_model), expected)]
25 changes: 10 additions & 15 deletions serverless/yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -1610,11 +1610,6 @@
resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz#786c5f41f043b07afb1af37683d7c33668858f6d"
integrity sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ==

"@fastify/busboy@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.0.0.tgz#f22824caff3ae506b18207bad4126dbc6ccdb6b8"
integrity sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==

"@graphql-tools/executor@^0.0.18":
version "0.0.18"
resolved "https://registry.yarnpkg.com/@graphql-tools/executor/-/executor-0.0.18.tgz#5b9f881d59484ea55278de3b4743ece8d68bc6e7"
Expand Down Expand Up @@ -4531,9 +4526,9 @@ pkg-up@^3.1.0:
find-up "^3.0.0"

postcss@^8.4.27:
version "8.4.31"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.31.tgz#92b451050a9f914da6755af352bdc0192508656d"
integrity sha512-PS08Iboia9mts/2ygV3eLpY5ghnUcfLV/EXTOW1E2qYxJKGGBUtNjN76FYHnMs36RmARn41bC0AZmn+rR0OVpQ==
version "8.4.28"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.28.tgz#c6cc681ed00109072816e1557f889ef51cf950a5"
integrity sha512-Z7V5j0cq8oEKyejIKfpD8b4eBy9cwW2JWPk0+fB1HOAMsfHbnAXLLS+PfVWlzMSLQaWttKDt607I0XHmpE67Vw==
dependencies:
nanoid "^3.3.6"
picocolors "^1.0.0"
Expand Down Expand Up @@ -5195,11 +5190,11 @@ ultron@~1.1.0:
integrity sha512-UIEXBNeYmKptWH6z8ZnqTeS8fV74zG0/eRU9VGkpzz+LIJNs8W/zM/L+7ctCkRrgbNnnR0xxw4bKOr0cW0N0Og==

undici@^5.12.0:
version "5.26.3"
resolved "https://registry.yarnpkg.com/undici/-/undici-5.26.3.tgz#ab3527b3d5bb25b12f898dfd22165d472dd71b79"
integrity sha512-H7n2zmKEWgOllKkIUkLvFmsJQj062lSm3uA4EYApG8gLuiOM0/go9bIoC3HVaSnfg4xunowDE2i9p8drkXuvDw==
version "5.23.0"
resolved "https://registry.yarnpkg.com/undici/-/undici-5.23.0.tgz#e7bdb0ed42cebe7b7aca87ced53e6eaafb8f8ca0"
integrity sha512-1D7w+fvRsqlQ9GscLBwcAJinqcZGHUKjbOmXdlE/v8BvEGXjeWAax+341q44EuTcHXXnfyKNbKRq4Lg7OzhMmg==
dependencies:
"@fastify/busboy" "^2.0.0"
busboy "^1.6.0"

universalify@^2.0.0:
version "2.0.0"
Expand Down Expand Up @@ -5542,6 +5537,6 @@ zip-stream@^4.1.0:
readable-stream "^3.6.0"

zod@^3.21.4:
version "3.22.3"
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.3.tgz#2fbc96118b174290d94e8896371c95629e87a060"
integrity sha512-EjIevzuJRiRPbVH4mGc8nApb/lVLKVpmUhAaR5R5doKGfAnGJ6Gr3CViAVjP+4FWSxCsybeWQdcgCtbX+7oZug==
version "3.22.1"
resolved "https://registry.yarnpkg.com/zod/-/zod-3.22.1.tgz#815f850baf933fef96c1061322dbe579b1a80c27"
integrity sha512-+qUhAMl414+Elh+fRNtpU+byrwjDFOS1N7NioLY+tSlcADTx4TkCUua/hxJvxwDXcV4397/nZ420jy4n4+3WUg==
Loading

0 comments on commit b512a59

Please sign in to comment.