Skip to content
This repository has been archived by the owner on Jul 22, 2024. It is now read-only.

Commit

Permalink
add 1-round fedma mnist lenet
Browse files Browse the repository at this point in the history
  • Loading branch information
hwang595 committed Jul 14, 2020
1 parent 7e6cfff commit 4b586a5
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 12 deletions.
37 changes: 25 additions & 12 deletions matching/pfnm.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,8 @@ def block_patching(w_j, L_next, assignment_j_c, layer_index, model_meta_data,
shape_estimator = ModerateCNNContainerConvBlocks(num_filters=matching_shapes)
elif dataset == "mnist":
shape_estimator = ModerateCNNContainerConvBlocksMNIST(num_filters=matching_shapes)
elif network_name == "lenet":
shape_estimator = LeNetContainer(num_filters=matching_shapes, kernel_size=5)

if dataset in ("cifar10", "cinic10"):
dummy_input = torch.rand(1, 3, 32, 32)
Expand Down Expand Up @@ -890,7 +892,9 @@ def layer_wise_group_descent(batch_weights, layer_index, batch_frequencies, sigm
#sigma_inv_layer = [np.array([1 / sigma_bias] + (weights_bias[j].shape[1] - 1) * [1 / sigma]) for j in range(J)]

#sigma_inv_layer = [np.array((matching_shapes[layer_index - 2]) * [1 / sigma] + [1 / sigma_bias] + [y / sigma for y in last_layer_const[j]]) for j in range(J)]
sigma_inv_layer = [np.array((matching_shapes[layer_index - 2]) * [1 / sigma] + [1 / sigma_bias]) for j in range(J)]

#sigma_inv_layer = [np.array((matching_shapes[layer_index - 2]) * [1 / sigma] + [1 / sigma_bias]) for j in range(J)]
sigma_inv_layer = [np.array([1 / sigma_bias] + (weights_bias[j].shape[1] - 1) * [1 / sigma]) for j in range(J)]

elif (layer_index > 1 and layer_index < (n_layers - 1)):
layer_type = model_layer_type[2 * layer_index - 2]
Expand Down Expand Up @@ -966,19 +970,28 @@ def layer_wise_group_descent(batch_weights, layer_index, batch_frequencies, sigm
# softmax_inv_sigma]

# remove fitting the last layer
if first_fc_identifier:
global_weights_out = [global_weights_c[:, 0:-softmax_bias.shape[0]-1].T,
global_weights_c[:, -softmax_bias.shape[0]-1]]
# if first_fc_identifier:
# global_weights_out = [global_weights_c[:, 0:-softmax_bias.shape[0]-1].T,
# global_weights_c[:, -softmax_bias.shape[0]-1]]

global_inv_sigmas_out = [global_sigmas_c[:, 0:-softmax_bias.shape[0]-1].T,
global_sigmas_c[:, -softmax_bias.shape[0]-1]]
else:
global_weights_out = [global_weights_c[:, 0:matching_shapes[layer_index - 1 - 1]].T,
global_weights_c[:, matching_shapes[layer_index - 1 - 1]]]
# global_inv_sigmas_out = [global_sigmas_c[:, 0:-softmax_bias.shape[0]-1].T,
# global_sigmas_c[:, -softmax_bias.shape[0]-1]]
# else:
# global_weights_out = [global_weights_c[:, 0:matching_shapes[layer_index - 1 - 1]].T,
# global_weights_c[:, matching_shapes[layer_index - 1 - 1]]]

global_inv_sigmas_out = [global_sigmas_c[:, 0:matching_shapes[layer_index - 1 - 1]].T,
global_sigmas_c[:, matching_shapes[layer_index - 1 - 1]]]
logger.info("Branch B, Layer index: {}, Global weights out shapes: {}".format(layer_index, [gwo.shape for gwo in global_weights_out]))
# global_inv_sigmas_out = [global_sigmas_c[:, 0:matching_shapes[layer_index - 1 - 1]].T,
# global_sigmas_c[:, matching_shapes[layer_index - 1 - 1]]]
layer_type = model_layer_type[2 * layer_index - 2]
gwc_shape = global_weights_c.shape
if "conv" in layer_type or 'features' in layer_type:
global_weights_out = [global_weights_c[:, 0:gwc_shape[1]-1], global_weights_c[:, gwc_shape[1]-1]]
global_inv_sigmas_out = [global_sigmas_c[:, 0:gwc_shape[1]-1], global_sigmas_c[:, gwc_shape[1]-1]]
elif "fc" in layer_type or 'classifier' in layer_type:
global_weights_out = [global_weights_c[:, 0:gwc_shape[1]-1].T, global_weights_c[:, gwc_shape[1]-1]]
global_inv_sigmas_out = [global_sigmas_c[:, 0:gwc_shape[1]-1].T, global_sigmas_c[:, gwc_shape[1]-1]]

logger.info("#### Branch B, Layer index: {}, Global weights out shapes: {}".format(layer_index, [gwo.shape for gwo in global_weights_out]))

elif (layer_index > 1 and layer_index < (n_layers - 1)):
layer_type = model_layer_type[2 * layer_index - 2]
Expand Down
16 changes: 16 additions & 0 deletions matching/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,4 +101,20 @@ def __init__(self, num_filters, output_dim=10):

def forward(self, x):
x = self.conv_layer(x)
return x


class LeNetContainer(nn.Module):
def __init__(self, num_filters, kernel_size=5):
super(LeNetContainer, self).__init__()
self.conv1 = nn.Conv2d(1, num_filters[0], kernel_size, 1)
self.conv2 = nn.Conv2d(num_filters[0], num_filters[1], kernel_size, 1)

def forward(self, x):
x = self.conv1(x)
x = F.max_pool2d(x, 2, 2)
#x = F.relu(x)
x = self.conv2(x)
x = F.max_pool2d(x, 2, 2)
#x = F.relu(x)
return x

0 comments on commit 4b586a5

Please sign in to comment.