Skip to content

Commit

Permalink
Move tests outside of code directory
Browse files Browse the repository at this point in the history
  • Loading branch information
Armavica committed Feb 21, 2023
1 parent ec30a2f commit 1ef1f95
Show file tree
Hide file tree
Showing 93 changed files with 159 additions and 155 deletions.
122 changes: 61 additions & 61 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -38,65 +38,65 @@ jobs:
python-version: ["3.10"]
test-subset:
- |
pymc/tests/test_util.py
pymc/tests/test_pytensorf.py
pymc/tests/test_math.py
pymc/tests/backends/test_base.py
pymc/tests/backends/test_ndarray.py
pymc/tests/step_methods/hmc/test_hmc.py
pymc/tests/test_func_utils.py
pymc/tests/distributions/test_shape_utils.py
pymc/tests/distributions/test_mixture.py
tests/test_util.py
tests/test_pytensorf.py
tests/test_math.py
tests/backends/test_base.py
tests/backends/test_ndarray.py
tests/step_methods/hmc/test_hmc.py
tests/test_func_utils.py
tests/distributions/test_shape_utils.py
tests/distributions/test_mixture.py
- |
pymc/tests/distributions/test_continuous.py
pymc/tests/distributions/test_multivariate.py
tests/distributions/test_continuous.py
tests/distributions/test_multivariate.py
- |
pymc/tests/distributions/test_bound.py
pymc/tests/distributions/test_censored.py
pymc/tests/distributions/test_simulator.py
pymc/tests/sampling/test_forward.py
pymc/tests/sampling/test_population.py
pymc/tests/stats/test_convergence.py
pymc/tests/stats/test_log_likelihood.py
pymc/tests/distributions/test_distribution.py
pymc/tests/distributions/test_discrete.py
tests/distributions/test_bound.py
tests/distributions/test_censored.py
tests/distributions/test_simulator.py
tests/sampling/test_forward.py
tests/sampling/test_population.py
tests/stats/test_convergence.py
tests/stats/test_log_likelihood.py
tests/distributions/test_distribution.py
tests/distributions/test_discrete.py
- |
pymc/tests/tuning/test_scaling.py
pymc/tests/tuning/test_starting.py
pymc/tests/distributions/test_dist_math.py
pymc/tests/distributions/test_transform.py
pymc/tests/sampling/test_mcmc.py
pymc/tests/sampling/test_parallel.py
pymc/tests/test_printing.py
tests/tuning/test_scaling.py
tests/tuning/test_starting.py
tests/distributions/test_dist_math.py
tests/distributions/test_transform.py
tests/sampling/test_mcmc.py
tests/sampling/test_parallel.py
tests/test_printing.py
- |
pymc/tests/distributions/test_timeseries.py
pymc/tests/gp/test_cov.py
pymc/tests/gp/test_gp.py
pymc/tests/gp/test_mean.py
pymc/tests/gp/test_util.py
pymc/tests/test_model.py
pymc/tests/test_model_graph.py
pymc/tests/ode/test_ode.py
pymc/tests/ode/test_utils.py
pymc/tests/step_methods/hmc/test_quadpotential.py
tests/distributions/test_timeseries.py
tests/gp/test_cov.py
tests/gp/test_gp.py
tests/gp/test_mean.py
tests/gp/test_util.py
tests/test_model.py
tests/test_model_graph.py
tests/ode/test_ode.py
tests/ode/test_utils.py
tests/step_methods/hmc/test_quadpotential.py
- |
pymc/tests/distributions/test_truncated.py
pymc/tests/logprob/test_abstract.py
pymc/tests/logprob/test_censoring.py
pymc/tests/logprob/test_composite_logprob.py
pymc/tests/logprob/test_cumsum.py
pymc/tests/logprob/test_joint_logprob.py
pymc/tests/logprob/test_mixture.py
pymc/tests/logprob/test_rewriting.py
pymc/tests/logprob/test_scan.py
pymc/tests/logprob/test_tensor.py
pymc/tests/logprob/test_transforms.py
pymc/tests/logprob/test_utils.py
tests/distributions/test_truncated.py
tests/logprob/test_abstract.py
tests/logprob/test_censoring.py
tests/logprob/test_composite_logprob.py
tests/logprob/test_cumsum.py
tests/logprob/test_joint_logprob.py
tests/logprob/test_mixture.py
tests/logprob/test_rewriting.py
tests/logprob/test_scan.py
tests/logprob/test_tensor.py
tests/logprob/test_transforms.py
tests/logprob/test_utils.py
fail-fast: false
runs-on: ${{ matrix.os }}
Expand Down Expand Up @@ -164,10 +164,10 @@ jobs:
floatx: [float64]
python-version: ["3.8"]
test-subset:
- pymc/tests/variational/test_approximations.py pymc/tests/variational/test_callbacks.py pymc/tests/variational/test_inference.py pymc/tests/variational/test_opvi.py pymc/tests/test_initial_point.py
- pymc/tests/test_model.py pymc/tests/sampling/test_mcmc.py
- pymc/tests/gp/test_cov.py pymc/tests/gp/test_gp.py pymc/tests/gp/test_mean.py pymc/tests/gp/test_util.py pymc/tests/ode/test_ode.py pymc/tests/ode/test_utils.py pymc/tests/smc/test_smc.py pymc/tests/sampling/test_parallel.py
- pymc/tests/step_methods/test_metropolis.py pymc/tests/step_methods/test_slicer.py pymc/tests/step_methods/hmc/test_nuts.py pymc/tests/step_methods/test_compound.py pymc/tests/step_methods/hmc/test_hmc.py
- tests/variational/test_approximations.py tests/variational/test_callbacks.py tests/variational/test_inference.py tests/variational/test_opvi.py tests/test_initial_point.py
- tests/test_model.py tests/sampling/test_mcmc.py
- tests/gp/test_cov.py tests/gp/test_gp.py tests/gp/test_mean.py tests/gp/test_util.py tests/ode/test_ode.py tests/ode/test_utils.py tests/smc/test_smc.py tests/sampling/test_parallel.py
- tests/step_methods/test_metropolis.py tests/step_methods/test_slicer.py tests/step_methods/hmc/test_nuts.py tests/step_methods/test_compound.py tests/step_methods/hmc/test_hmc.py

fail-fast: false
runs-on: ${{ matrix.os }}
Expand Down Expand Up @@ -238,16 +238,16 @@ jobs:
python-version: ["3.9"]
test-subset:
- |
pymc/tests/sampling/test_parallel.py
pymc/tests/test_data.py
pymc/tests/test_model.py
tests/sampling/test_parallel.py
tests/test_data.py
tests/test_model.py
- |
pymc/tests/sampling/test_mcmc.py
tests/sampling/test_mcmc.py
- |
pymc/tests/backends/test_arviz.py
pymc/tests/variational/test_updates.py
tests/backends/test_arviz.py
tests/variational/test_updates.py
fail-fast: false
runs-on: ${{ matrix.os }}
env:
Expand Down Expand Up @@ -312,7 +312,7 @@ jobs:
floatx: [float64]
python-version: ["3.9"]
test-subset:
- pymc/tests/sampling/test_jax.py pymc/tests/sampling/test_mcmc_external.py
- tests/sampling/test_jax.py tests/sampling/test_mcmc_external.py
fail-fast: false
runs-on: ${{ matrix.os }}
env:
Expand Down Expand Up @@ -383,7 +383,7 @@ jobs:
floatx: [float32]
python-version: ["3.10"]
test-subset:
- pymc/tests/sampling/test_mcmc.py pymc/tests/ode/test_ode.py pymc/tests/ode/test_utils.py
- tests/sampling/test_mcmc.py tests/ode/test_ode.py tests/ode/test_utils.py
fail-fast: false
runs-on: ${{ matrix.os }}
env:
Expand Down
2 changes: 1 addition & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
recursive-include pymc/tests/data *
recursive-include tests/data *
recursive-include source *
# because of an upload-size limit by PyPI, we're temporarily removing docs from the tarball:
recursive-exclude docs *
Expand Down
2 changes: 1 addition & 1 deletion codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ coverage:
base: auto

ignore:
- "pymc/tests/*"
- "tests/*"
- "pymc/_version.py"

comment:
Expand Down
18 changes: 9 additions & 9 deletions docs/source/contributing/implementing_distribution.md
Original file line number Diff line number Diff line change
Expand Up @@ -233,14 +233,14 @@ pm.logcdf(blah, [-0.5, 1.5]).eval()

## 3. Adding tests for the new `RandomVariable`

Tests for new `RandomVariables` are mostly located in `pymc/tests/distributions/test_*.py`.
Tests for new `RandomVariables` are mostly located in `tests/distributions/test_*.py`.
Most tests can be accommodated by the default `BaseTestDistributionRandom` class, which provides default tests for checking:
1. Expected inputs are passed to the `rv_op` by the `dist` `classmethod`, via `check_pymc_params_match_rv_op`
1. Expected (exact) draws are being returned, via `check_pymc_draws_match_reference`
1. Shape variable inference is correct, via `check_rv_size`

```python
from pymc.tests.distributions.util import BaseTestDistributionRandom, seeded_scipy_distribution_builder
from tests.distributions.util import BaseTestDistributionRandom, seeded_scipy_distribution_builder

class TestBlah(BaseTestDistributionRandom):

Expand Down Expand Up @@ -279,7 +279,7 @@ class TestBlahAltParam2(BaseTestDistributionRandom):

```

Custom tests can also be added to the class as is done for the {class}`~pymc.tests.distributions.test_continuous.TestFlat`.
Custom tests can also be added to the class as is done for the {class}`~tests.distributions.test_continuous.TestFlat`.

### Note on `check_rv_size` test:

Expand All @@ -292,7 +292,7 @@ tests_to_run = ["check_rv_size"]
```

This is usually needed for Multivariate distributions.
You can see an example in {class}`~pymc.tests.distributions.test_multivariate.TestDirichlet`.
You can see an example in {class}`~tests.distributions.test_multivariate.TestDirichlet`.

### Notes on `check_pymcs_draws_match_reference` test

Expand All @@ -302,17 +302,17 @@ The latter kind of test (if warranted) can be performed with the aid of `pymc_ra
This kind of test only makes sense if there is a good independent generator reference (i.e., not just the same composition of NumPy / SciPy calls that is done inside `rng_fn`).

Finally, when your `rng_fn` is doing something more than just calling a NumPy or SciPy method, you will need to set up an equivalent seeded function with which to compare for the exact draws (instead of relying on `seeded_[scipy|numpy]_distribution_builder`).
You can find an example in {class}`~pymc.tests.distributions.test_continuous.TestWeibull`, whose `rng_fn` returns `beta * np.random.weibull(alpha, size=size)`.
You can find an example in {class}`~tests.distributions.test_continuous.TestWeibull`, whose `rng_fn` returns `beta * np.random.weibull(alpha, size=size)`.


## 4. Adding tests for the `logp` / `logcdf` methods

Tests for the `logp` and `logcdf` mostly make use of the helpers `check_logp`, `check_logcdf`, and
`check_selfconsistency_discrete_logcdf` implemented in `~pymc.tests.distributions.util`
`check_selfconsistency_discrete_logcdf` implemented in `~tests.distributions.util`

```python
from pymc.tests.distributions.util import check_logp, check_logcdf, Domain
from pymc.tests.helpers import select_by_precision
from tests.helpers import select_by_precision
from tests.distributions.util import check_logp, check_logcdf, Domain

R = Domain([-np.inf, -2.1, -1, -0.01, 0.0, 0.01, 1, 2.1, np.inf])
Rplus = Domain([0, 0.01, 0.1, 0.9, 0.99, 1, 1.5, 2, 100, np.inf])
Expand Down Expand Up @@ -382,7 +382,7 @@ which checks if:

import pytest
from pymc.distributions import Blah
from pymc.tests.distributions.util import assert_moment_is_expected
from tests.distributions.util import assert_moment_is_expected

@pytest.mark.parametrize(
"param1, param2, size, expected",
Expand Down
6 changes: 3 additions & 3 deletions docs/source/contributing/running_the_test_suite.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ Therefore, we recommend to run just specific tests that target the parts of the

To run all tests from a single file:
```bash
pytest -v pymc/tests/test_model.py
pytest -v tests/test_model.py
```

```{tip}
Expand All @@ -31,7 +31,7 @@ By using the `-k` flag, you can filter for test cases that match a certain patte
For example, the following command runs all test cases from `test_model.py` that have "coord" in their name:

```bash
pytest -v pymc/tests/test_model.py -k coord
pytest -v tests/test_model.py -k coord
```


Expand All @@ -40,7 +40,7 @@ Note that because you are not running the entire test suite, the coverage will b
But you can still watch for specific line numbers of the code that you're working on.

```bash
pytest -v --cov=pymc --cov-report term-missing pymc/tests/<name of test>.py
pytest -v --cov=pymc --cov-report term-missing tests/<name of test>.py
```

When you are reasonably confident about the changes you made, you can push the changes and open a pull request.
Expand Down
2 changes: 1 addition & 1 deletion scripts/check_all_tests_are_covered.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def find_testfiles():
dp_repo = Path(__file__).parent.parent
all_tests = {
str(fp.relative_to(dp_repo)).replace(os.sep, "/")
for fp in (dp_repo / "pymc" / "tests").glob("**/test_*.py")
for fp in (dp_repo / "tests").glob("**/test_*.py")
}
_log.info("Found %i tests in total.", len(all_tests))
return all_tests
Expand Down
2 changes: 1 addition & 1 deletion scripts/run_mypy.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ def check_no_unexpected_results(mypy_lines: Iterator[str]):
args, _ = parser.parse_known_args()

cp = subprocess.run(
["mypy", "--show-error-codes", "--exclude", "pymc/tests", "pymc"],
["mypy", "--show-error-codes", "--exclude", "tests", "pymc"],
capture_output=True,
)
output = cp.stdout.decode()
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[tool:pytest]
testpaths = pymc/tests
testpaths = tests

[coverage:run]
omit = *examples*
Expand Down
4 changes: 4 additions & 0 deletions pymc/tests/variational/__init__.py → tests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import pymc as pm

_log = pm._log
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
import pytest

from pymc.backends import base
from pymc.tests import models
from tests import models


class ModelBackendSetupTestCase:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -326,7 +326,7 @@ def test_overwrite_model_coords_dims(self):
assert np.all(idata2.constant_data.x.dim2.values == np.array(["c1", "c2"]))

def test_missing_data_model(self):
# source pymc/pymc/tests/test_missing.py
# source tests/test_missing.py
data = ma.masked_values([1, 2, -1, 4, -1], value=-1)
model = pm.Model()
with model:
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
import pytest

from pymc.backends import base, ndarray
from pymc.tests.backends import fixtures as bf
from tests.backends import fixtures as bf

STATS1 = [{"a": np.float64, "b": bool}]

Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
from pymc.logprob.joint_logprob import logp
from pymc.logprob.utils import ParameterValueError
from pymc.pytensorf import floatX
from pymc.tests.distributions.util import (
from tests.distributions.util import (
BaseTestDistributionRandom,
Circ,
Domain,
Expand All @@ -49,8 +49,8 @@
seeded_numpy_distribution_builder,
seeded_scipy_distribution_builder,
)
from pymc.tests.helpers import select_by_precision
from pymc.tests.logprob.utils import create_pytensor_params, scipy_logprob_tester
from tests.helpers import select_by_precision
from tests.logprob.utils import create_pytensor_params, scipy_logprob_tester

try:
from polyagamma import polyagamma_cdf, polyagamma_pdf, random_polyagamma
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@
from pymc.logprob.joint_logprob import logp
from pymc.logprob.utils import ParameterValueError
from pymc.pytensorf import floatX
from pymc.tests.distributions.util import (
from pymc.vartypes import discrete_types
from tests.distributions.util import (
BaseTestDistributionRandom,
Bool,
Domain,
Expand All @@ -57,8 +58,7 @@
seeded_numpy_distribution_builder,
seeded_scipy_distribution_builder,
)
from pymc.tests.logprob.utils import create_pytensor_params, scipy_logprob_tester
from pymc.vartypes import discrete_types
from tests.logprob.utils import create_pytensor_params, scipy_logprob_tester


def discrete_weibull_logpmf(value, q, beta):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@
)
from pymc.logprob.utils import ParameterValueError
from pymc.pytensorf import floatX
from pymc.tests.checks import close_to
from pymc.tests.helpers import verify_grad
from tests.checks import close_to
from tests.helpers import verify_grad


@pytest.mark.parametrize(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,8 @@
from pymc.logprob.joint_logprob import logp
from pymc.model import Model
from pymc.sampling import draw, sample
from pymc.tests.distributions.util import assert_moment_is_expected
from pymc.util import _FutureWarningValidatingScratchpad
from tests.distributions.util import assert_moment_is_expected


class TestBugfixes:
Expand Down
Loading

0 comments on commit 1ef1f95

Please sign in to comment.