Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
wgc-research committed Jul 18, 2023
1 parent 138c5d7 commit 99534bf
Show file tree
Hide file tree
Showing 419 changed files with 36,654 additions and 0 deletions.
18 changes: 18 additions & 0 deletions fgssl/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from __future__ import absolute_import, division, print_function

__version__ = '0.2.1'


def _setup_logger():
import logging

logging_fmt = "%(asctime)s (%(module)s:%(lineno)d)" \
"%(levelname)s: %(message)s"
logger = logging.getLogger("federatedscope")
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(logging_fmt))
logger.addHandler(handler)
logger.propagate = False


_setup_logger()
Empty file added fgssl/attack/__init__.py
Empty file.
32 changes: 32 additions & 0 deletions fgssl/attack/auxiliary/MIA_get_target_data.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
import torch
from federatedscope.attack.auxiliary.utils import get_data_info


def get_target_data(dataset_name, pth=None):
'''
Args:
dataset_name (str): the dataset name
pth (str): the path storing the target data
Returns:
'''
# JUST FOR SHOWCASE
if pth is not None:
pass
else:
# generate the synthetic data
if dataset_name == 'femnist':
data_feature_dim, num_class, is_one_hot_label = get_data_info(
dataset_name)

# generate random data
num_syn_data = 20
data_dim = [num_syn_data]
data_dim.extend(data_feature_dim)
syn_data = torch.randn(data_dim)
syn_label = torch.randint(low=0,
high=num_class,
size=(num_syn_data, ))
return [syn_data, syn_label]
16 changes: 16 additions & 0 deletions fgssl/attack/auxiliary/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from federatedscope.attack.auxiliary.utils import *
from federatedscope.attack.auxiliary.attack_trainer_builder \
import wrap_attacker_trainer
from federatedscope.attack.auxiliary.backdoor_utils import *
from federatedscope.attack.auxiliary.poisoning_data import *
from federatedscope.attack.auxiliary.create_edgeset import *

__all__ = [
'get_passive_PIA_auxiliary_dataset', 'iDLG_trick', 'cos_sim',
'get_classifier', 'get_data_info', 'get_data_sav_fn', 'get_info_diff_loss',
'sav_femnist_image', 'get_reconstructor', 'get_generator',
'get_data_property', 'get_passive_PIA_auxiliary_dataset',
'load_poisoned_dataset_edgeset', 'load_poisoned_dataset_pixel',
'selectTrigger', 'poisoning', 'create_ardis_poisoned_dataset',
'create_ardis_poisoned_dataset', 'create_ardis_test_dataset'
]
23 changes: 23 additions & 0 deletions fgssl/attack/auxiliary/attack_trainer_builder.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
def wrap_attacker_trainer(base_trainer, config):
'''Wrap the trainer for attack client.
Args:
base_trainer (core.trainers.GeneralTorchTrainer): the trainer that
will be wrapped;
config (federatedscope.core.configs.config.CN): the configure;
:returns:
The wrapped trainer; Type: core.trainers.GeneralTorchTrainer
'''
if config.attack.attack_method.lower() == 'gan_attack':
from federatedscope.attack.trainer import wrap_GANTrainer
return wrap_GANTrainer(base_trainer)
elif config.attack.attack_method.lower() == 'gradascent':
from federatedscope.attack.trainer import wrap_GradientAscentTrainer
return wrap_GradientAscentTrainer(base_trainer)
elif config.attack.attack_method.lower() == 'backdoor':
from federatedscope.attack.trainer import wrap_backdoorTrainer
return wrap_backdoorTrainer(base_trainer)
else:
raise ValueError('Trainer {} is not provided'.format(
config.attack.attack_method))
Loading

0 comments on commit 99534bf

Please sign in to comment.