Skip to content

Commit

Permalink
Revert "Merge pull request #190 from aidotse/subpackages"
Browse files Browse the repository at this point in the history
This reverts commit cf89641, reversing
changes made to f30b53a.
  • Loading branch information
fazelehh committed Jan 13, 2025
1 parent cf89641 commit 3453f65
Show file tree
Hide file tree
Showing 25 changed files with 2,653 additions and 1,212 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/run_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,11 @@ jobs:
uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install dependencies from pyproject.toml

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install .[dev]
pip install -r requirements.txt
- name: Ruff Linting
run: |
Expand Down
38 changes: 38 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
name: leakpro
channels:
- conda-forge
dependencies:
# Core Python
- python=3.9.0

# Numerical and Data Analysis
- numpy
- pandas
- scipy
- scikit-learn

# Visualization
- matplotlib
- seaborn
- pillow

# Deep Learning
- pytorch
- torchvision
- torchmetrics

# Utilities
- dotmap
- loguru
- jinja2
- tqdm
- pyyaml
- numba
- pydantic
- joblib

# Development and Testing
- pytest
- pytest-mock


5 changes: 5 additions & 0 deletions examples/mia/LOS/ReadMe.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
# Lenght-of-Stay Usecase
In this use case, we focus on attacking length-of-stay classifier models. As part of the example, we train a Logistic Regression model and a Gated Recurrent Unit with Decay (GRU-D).

## MIMIC-III Data Preprocessing
To prepare the data, refer to instructions in ```mimic_prepration/ReadMe.md ```
27 changes: 14 additions & 13 deletions examples/mia/LOS/audit.yaml
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
audit: # Configurations for auditing
random_seed: 1234 # Integer specifying the random seed
attack_list:
# rmia:
# training_data_fraction: 0.5 # Fraction of the auxilary dataset to use for this attack (in each shadow model training)
# attack_data_fraction: 0.5 # Fraction of auxiliary dataset to sample from during attack
# num_shadow_models: 8 # Number of shadow models to train
# online: True # perform online or offline attack
# temperature: 2
# gamma: 1.0
# offline_a: 0.33 # parameter from which we compute p(x) from p_OUT(x) such that p_IN(x) = a p_OUT(x) + b.
# offline_b: 0.66
rmia:
training_data_fraction: 0.5 # Fraction of the auxilary dataset to use for this attack (in each shadow model training)
attack_data_fraction: 0.5 # Fraction of auxiliary dataset to sample from during attack
num_shadow_models: 8 # Number of shadow models to train
online: True # perform online or offline attack
temperature: 2
gamma: 1.0
offline_a: 0.33 # parameter from which we compute p(x) from p_OUT(x) such that p_IN(x) = a p_OUT(x) + b.
offline_b: 0.66
# qmia:
# training_data_fraction: 1.0 # Fraction of the auxilary dataset (data without train and test indices) to use for training the quantile regressor
# epochs: 5 # Number of training epochs for quantile regression
Expand All @@ -32,12 +32,13 @@ audit: # Configurations for auditing

target:
# Target model path
module_path: "utils/model.py"
model_class: "MimicLR"
module_path: "utils/model_LR.py" # either model_grud.py or model_LR.py for logestic regression
model_class: "LR" # LR/GRUD
# Data paths
target_folder: "./target"
data_path: "./data/dataset.pkl"
target_folder: "./target_LR" # either target_GRUD or target_LR
data_path: "./data/flattened/dataset.pkl" #unflattened dataset for GRUD and flattened dataset for LR

shadow_model:
model_class: # LR/GRUD

distillation_model:
Loading

0 comments on commit 3453f65

Please sign in to comment.