Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add random.seed() #419

Merged
merged 3 commits into from
Jul 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ jobs:
python -m pip install --upgrade pip
python -m pip install flake8 pytest
python -m pip install numpy>=1.21.0
python -m pip install "jaxlib==0.4.10" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
python -m pip install jax==0.4.10
python -m pip install "jaxlib==0.4.11" -f https://whls.blob.core.windows.net/unstable/index.html --use-deprecated legacy-resolver
python -m pip install jax==0.4.11
python -m pip install -r requirements-dev.txt
python -m pip install tqdm brainpylib
pip uninstall brainpy -y
Expand Down
27 changes: 27 additions & 0 deletions brainpy/_src/dnn/tests/test_activation.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ def test_Threshold(self, inplace):
inplace=[True, False]
)
def test_ReLU(self, inplace):
bm.random.seed()
ReLU_layer = bp.dnn.ReLU(inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -33,6 +34,7 @@ def test_ReLU(self, inplace):
inplace=[True, False]
)
def test_RReLU(self, inplace):
bm.random.seed()
RReLU_layer = bp.dnn.RReLU(lower=0, upper=1, inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -44,6 +46,7 @@ def test_RReLU(self, inplace):
inplace=[True, False]
)
def test_Hardtanh(self, inplace):
bm.random.seed()
Hardtanh_layer = bp.dnn.Hardtanh(min_val=0, max_val=1, inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -55,6 +58,7 @@ def test_Hardtanh(self, inplace):
inplace=[True, False]
)
def test_ReLU6(self, inplace):
bm.random.seed()
ReLU6_layer = bp.dnn.ReLU6(inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -63,6 +67,7 @@ def test_ReLU6(self, inplace):
output = ReLU6_layer(input)

def test_Sigmoid(self):
bm.random.seed()
Sigmoid_layer = bp.dnn.Sigmoid()
input = bm.random.randn(2)
output = Sigmoid_layer(input)
Expand All @@ -71,6 +76,7 @@ def test_Sigmoid(self):
inplace=[True, False]
)
def test_Hardsigmoid(self, inplace):
bm.random.seed()
Hardsigmoid_layer = bp.dnn.Hardsigmoid(inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -79,6 +85,7 @@ def test_Hardsigmoid(self, inplace):
output = Hardsigmoid_layer(input)

def test_Tanh(self):
bm.random.seed()
Tanh_layer = bp.dnn.Tanh()
input = bm.random.randn(2)
output = Tanh_layer(input)
Expand All @@ -87,6 +94,7 @@ def test_Tanh(self):
inplace=[True, False]
)
def test_SiLU(self, inplace):
bm.random.seed()
SiLU_layer = bp.dnn.SiLU(inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -98,6 +106,7 @@ def test_SiLU(self, inplace):
inplace=[True, False]
)
def test_Mish(self, inplace):
bm.random.seed()
Mish_layer = bp.dnn.Mish(inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -109,6 +118,7 @@ def test_Mish(self, inplace):
inplace=[True, False]
)
def test_Hardswish(self, inplace):
bm.random.seed()
Hardswish_layer = bp.dnn.Hardswish(inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -120,6 +130,7 @@ def test_Hardswish(self, inplace):
inplace=[True, False]
)
def test_ELU(self, inplace):
bm.random.seed()
ELU_layer = bp.dnn.ELU(alpha=0.5, inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -131,6 +142,7 @@ def test_ELU(self, inplace):
inplace=[True, False]
)
def test_CELU(self, inplace):
bm.random.seed()
CELU_layer = bp.dnn.CELU(alpha=0.5, inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -142,6 +154,7 @@ def test_CELU(self, inplace):
inplace=[True, False]
)
def test_SELU(self, inplace):
bm.random.seed()
SELU_layer = bp.dnn.SELU(inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -150,6 +163,7 @@ def test_SELU(self, inplace):
output = SELU_layer(input)

def test_GLU(self):
bm.random.seed()
GLU_layer = bp.dnn.GLU()
input = bm.random.randn(4, 2)
output = GLU_layer(input)
Expand All @@ -158,11 +172,13 @@ def test_GLU(self):
approximate=['tanh', 'none']
)
def test_GELU(self, approximate):
bm.random.seed()
GELU_layer = bp.dnn.GELU()
input = bm.random.randn(2)
output = GELU_layer(input)

def test_Hardshrink(self):
bm.random.seed()
Hardshrink_layer = bp.dnn.Hardshrink(lambd=1)
input = bm.random.randn(2)
output = Hardshrink_layer(input)
Expand All @@ -171,6 +187,7 @@ def test_Hardshrink(self):
inplace=[True, False]
)
def test_LeakyReLU(self, inplace):
bm.random.seed()
LeakyReLU_layer = bp.dnn.LeakyReLU(inplace=inplace)
input = bm.random.randn(2)
if inplace == True:
Expand All @@ -179,6 +196,7 @@ def test_LeakyReLU(self, inplace):
output = LeakyReLU_layer(input)

def test_LogSigmoid(self):
bm.random.seed()
LogSigmoid_layer = bp.dnn.LogSigmoid()
input = bm.random.randn(2)
output = LogSigmoid_layer(input)
Expand All @@ -188,46 +206,55 @@ def test_LogSigmoid(self):
threshold=[20, 21, 22]
)
def test_Softplus(self, beta, threshold):
bm.random.seed()
Softplus_layer = bp.dnn.Softplus(beta=beta, threshold=threshold)
input = bm.random.randn(2)
output = Softplus_layer(input)

def test_Softshrink(self):
bm.random.seed()
Softshrink_layer = bp.dnn.Softshrink(lambd=1)
input = bm.random.randn(2)
output = Softshrink_layer(input)

def test_PReLU(self):
bm.random.seed()
PReLU_layer = bp.dnn.PReLU(num_parameters=2, init=0.5)
input = bm.random.randn(2)
output = PReLU_layer(input)

def test_Softsign(self):
bm.random.seed()
Softsign_layer = bp.dnn.Softsign()
input = bm.random.randn(2)
output = Softsign_layer(input)

def test_Tanhshrink(self):
bm.random.seed()
Tanhshrink_layer = bp.dnn.Tanhshrink()
input = bm.random.randn(2)
output = Tanhshrink_layer(input)

def test_Softmin(self):
bm.random.seed()
Softmin_layer = bp.dnn.Softmin(dim=2)
input = bm.random.randn(2, 3, 4)
output = Softmin_layer(input)

def test_Softmax(self):
bm.random.seed()
Softmax_layer = bp.dnn.Softmax(dim=2)
input = bm.random.randn(2, 3, 4)
output = Softmax_layer(input)

def test_Softmax2d(self):
bm.random.seed()
Softmax2d_layer = bp.dnn.Softmax2d()
input = bm.random.randn(2, 3, 12, 13)
output = Softmax2d_layer(input)

def test_LogSoftmax(self):
bm.random.seed()
LogSoftmax_layer = bp.dnn.LogSoftmax(dim=2)
input = bm.random.randn(2, 3, 4)
output = LogSoftmax_layer(input)
Expand Down
23 changes: 18 additions & 5 deletions brainpy/_src/dnn/tests/test_conv_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,13 @@
from absl.testing import absltest
import jax.numpy as jnp
import brainpy.math as bm

from absl.testing import parameterized
import brainpy as bp


class TestConv(bp.testing.UnitTestCase):
class TestConv(parameterized.TestCase):
def test_Conv2D_img(self):
bm.random.seed()
img = jnp.zeros((2, 200, 198, 4))
for k in range(4):
x = 30 + 60 * k
Expand All @@ -28,6 +29,7 @@ def test_Conv2D_img(self):
# plt.show()

def test_conv1D(self):
bm.random.seed()
with bp.math.training_environment():
model = bp.layers.Conv1d(in_channels=3, out_channels=32, kernel_size=(3,))

Expand All @@ -41,6 +43,7 @@ def test_conv1D(self):
# plt.show()

def test_conv2D(self):
bm.random.seed()
with bp.math.training_environment():
model = bp.layers.Conv2d(in_channels=3, out_channels=32, kernel_size=(3, 3))

Expand All @@ -54,15 +57,17 @@ def test_conv2D(self):
# plt.show()

def test_conv3D(self):
bm.random.seed()
with bp.math.training_environment():
model = bp.layers.Conv3d(in_channels=3, out_channels=32, kernel_size=(3, 3, 3))
input = bp.math.ones((2, 5, 5, 5, 3))
out = model(input)
print("out shape: ", out.shape)


class TestConvTranspose1d(bp.testing.UnitTestCase):
class TestConvTranspose1d(parameterized.TestCase):
def test_conv_transpose(self):
bm.random.seed()
x = bm.ones((1, 8, 3))
for use_bias in [True, False]:
conv_transpose_module = bp.layers.ConvTranspose1d(
Expand Down Expand Up @@ -92,6 +97,7 @@ def test_conv_transpose(self):
self.assertTrue(bm.allclose(y, correct_ans))

def test_single_input_masked_conv_transpose(self):
bm.random.seed()
x = jnp.ones((1, 8, 3))
m = jnp.tril(jnp.ones((3, 3, 4)))
conv_transpose_module = bp.layers.ConvTranspose1d(
Expand Down Expand Up @@ -120,6 +126,7 @@ def test_single_input_masked_conv_transpose(self):
self.assertTrue(bm.allclose(y, correct_ans))

def test_computation_padding_same(self):
bm.random.seed()
data = jnp.ones([1, 3, 1])
for use_bias in [True, False]:
net = bp.layers.ConvTranspose1d(
Expand All @@ -141,8 +148,9 @@ def test_computation_padding_same(self):
self.assertTrue(bm.allclose(out, expected_out, rtol=1e-5))


class TestConvTranspose2d(bp.testing.UnitTestCase):
class TestConvTranspose2d(parameterized.TestCase):
def test_conv_transpose(self):
bm.random.seed()
x = bm.ones((1, 8, 8, 3))
for use_bias in [True, False]:
conv_transpose_module = bp.layers.ConvTranspose2d(
Expand All @@ -159,6 +167,7 @@ def test_conv_transpose(self):
print(y.shape)

def test_single_input_masked_conv_transpose(self):
bm.random.seed()
x = jnp.ones((1, 8, 8, 3))
m = jnp.tril(jnp.ones((3, 3, 3, 4)))
conv_transpose_module = bp.layers.ConvTranspose2d(
Expand All @@ -174,6 +183,7 @@ def test_single_input_masked_conv_transpose(self):
print(y.shape)

def test_computation_padding_same(self):
bm.random.seed()
x = bm.ones((1, 8, 8, 3))
for use_bias in [True, False]:
conv_transpose_module = bp.layers.ConvTranspose2d(
Expand All @@ -191,8 +201,9 @@ def test_computation_padding_same(self):
print(y.shape)


class TestConvTranspose3d(bp.testing.UnitTestCase):
class TestConvTranspose3d(parameterized.TestCase):
def test_conv_transpose(self):
bm.random.seed()
x = bm.ones((1, 8, 8, 8, 3))
for use_bias in [True, False]:
conv_transpose_module = bp.layers.ConvTranspose3d(
Expand All @@ -208,6 +219,7 @@ def test_conv_transpose(self):
print(y.shape)

def test_single_input_masked_conv_transpose(self):
bm.random.seed()
x = jnp.ones((1, 8, 8, 8, 3))
m = jnp.tril(jnp.ones((3, 3, 3, 3, 4)))
conv_transpose_module = bp.layers.ConvTranspose3d(
Expand All @@ -223,6 +235,7 @@ def test_single_input_masked_conv_transpose(self):
print(y.shape)

def test_computation_padding_same(self):
bm.random.seed()
x = bm.ones((1, 8, 8, 8, 3))
for use_bias in [True, False]:
conv_transpose_module = bp.layers.ConvTranspose3d(
Expand Down
5 changes: 4 additions & 1 deletion brainpy/_src/dnn/tests/test_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,14 @@
import jax.numpy as jnp
import brainpy.math as bm
from absl.testing import absltest
from absl.testing import parameterized
import brainpy as bp


class TestFunction(bp.testing.UnitTestCase):
class TestFunction(parameterized.TestCase):

def test_flatten_batching_mode(self):
bm.random.seed()
layer = bp.dnn.Flatten(mode=bm.BatchingMode())
input = bm.random.randn(20, 10, 10, 6)

Expand All @@ -20,6 +22,7 @@ def test_flatten_batching_mode(self):
self.assertEqual(output.shape, expected_shape)

def test_flatten_non_batching_mode(self):
bm.random.seed()
layer = bp.dnn.Flatten(mode=bm.NonBatchingMode())
input = bm.random.randn(10, 10, 6)

Expand Down
Loading