Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: github actions for publishing #141

Merged
merged 8 commits into from
Nov 8, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions .github/workflows/conventional-prs.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
name: Title
on:
pull_request_target:
types:
- opened
- reopened
- edited
- synchronize

permissions:
contents: read

jobs:
title-format:
permissions:
pull-requests: read # for amannn/action-semantic-pull-request to analyze PRs
statuses: write # for amannn/action-semantic-pull-request to mark status of analyzed PR
runs-on: ubuntu-latest
steps:
- uses: amannn/action-semantic-pull-request@v4.6.0
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
67 changes: 67 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
name: Release

on:
push:
branches:
- main

jobs:
release:
name: Release
runs-on: ubuntu-latest
steps:
- uses: GoogleCloudPlatform/release-please-action@v3
id: release
with:
release-type: python
package-name: haptools

- uses: actions/checkout@v3
if: ${{ steps.release.outputs.release_created }}
with:
fetch-depth: 2

- name: Set up Python
if: ${{ steps.release.outputs.release_created }}
uses: actions/setup-python@v4
with:
python-version: '3.7'

- name: Upgrade pip
if: ${{ steps.release.outputs.release_created }}
run: |
pip install --constraint=.github/workflows/constraints.txt pip
pip --version

- name: Install Poetry
if: ${{ steps.release.outputs.release_created }}
run: |
pip install --constraint=.github/workflows/constraints.txt poetry
poetry --version

- name: Bump version for developmental release
if: ${{ steps.release.outputs.release_created }}
env:
version: ${{ steps.release.outputs.tag_name }}
run: |
poetry version $version

- name: Build package
if: ${{ steps.release.outputs.release_created }}
run: |
poetry build --ansi

- name: Publish package on TestPyPI
if: ${{ steps.release.outputs.release_created }}
uses: pypa/gh-action-pypi-publish@v1.5.0
with:
user: __token__
password: {{ "${{ secrets.TEST_PYPI_TOKEN }}" }}
repository_url: https://test.pypi.org/legacy/

- name: Publish package on PyPI
if: ${{ steps.release.outputs.release_created }}
uses: pypa/gh-action-pypi-publish@v1.5.0
with:
user: __token__
password: {{ "${{ secrets.PYPI_TOKEN }}" }}
17 changes: 8 additions & 9 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
name: Tests

on:
- push
- pull_request

jobs:
tests:
name: ${{"{{"}} matrix.session {{"}}"}} ${{"{{"}} matrix.python {{"}}"}} / ${{"{{"}} matrix.os {{"}}"}}
runs-on: ${{"{{"}} matrix.os {{"}}"}}
name: ${{ matrix.session }} ${{ matrix.python }} / ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
Expand All @@ -17,22 +16,22 @@ jobs:
- { python: "3.8", os: "ubuntu-latest", session: "tests" }
- { python: "3.9", os: "ubuntu-latest", session: "tests" }
- { python: "3.10", os: "ubuntu-latest", session: "tests" }
- { python: "3.10", os: "windows-latest", session: "tests" }
# - { python: "3.10", os: "windows-latest", session: "tests" }
- { python: "3.10", os: "macos-latest", session: "tests" }

env:
NOXSESSION: ${{"{{"}} matrix.session {{"}}"}}
NOXSESSION: ${{ matrix.session }}
FORCE_COLOR: "1"
PRE_COMMIT_COLOR: "always"

steps:
- name: Check out the repository
uses: actions/checkout@v3

- name: Set up Python ${{"{{"}} matrix.python {{"}}"}}
- name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v3
with:
python-version: ${{"{{"}} matrix.python {{"}}"}}
python-version: ${{ matrix.python }}

- name: Upgrade pip
run: |
Expand All @@ -56,7 +55,7 @@ jobs:
nox --version
- name: Run Nox
run: |
nox --python=${{"{{"}} matrix.python {{"}}"}}
nox --python=${{ matrix.python }}
- name: Upload coverage data
if: always() && matrix.session == 'tests'
uses: "actions/upload-artifact@v3"
Expand Down Expand Up @@ -101,4 +100,4 @@ jobs:
# run: |
# nox --session=coverage -- xml
# - name: Upload coverage report
# uses: codecov/codecov-action@v3.1.0
# uses: codecov/codecov-action@v3.1.0
Empty file added CHANGELOG.md
Empty file.
13 changes: 11 additions & 2 deletions docs/commands/simphenotype.rst
Original file line number Diff line number Diff line change
Expand Up @@ -82,14 +82,21 @@ By default, all of the haplotypes in the ``.hap`` file will be encoded as causal
haptools transform tests/data/example.vcf.gz tests/data/simphenotype.hap | \
haptools simphenotype --id 'chr21.q.3365*1' /dev/stdin tests/data/simphenotype.hap

To simulate ancestry-specific effects from a genotypes file with population labels, use the ``--ancestry`` switch when running ``transform``:
To simulate ancestry-specific effects from a genotypes file with population labels, use the ``--ancestry`` switch when running ``transform``.

.. code-block:: bash

haptools transform --ancestry tests/data/simple-ancestry.vcf tests/data/simple.hap | \
haptools simphenotype --id H1 /dev/stdin tests/data/simple.hap

Simulate two replicates of a case/control trait that occurs in 60% of your samples with a heritability of 0.8. Encode all of the haplotypes in ``tests/data/example.hap.gz`` as independent causal variables.
If speed is important, it's generally faster to use PGEN files than VCFs.

.. code-block:: bash

haptools transform --output simple-haps.pgen tests/data/simple.pgen tests/data/simple.hap
haptools simphenotype --id H1 simple-haps.pgen tests/data/simple.hap

Let's simulate two replicates of a case/control trait that occurs in 60% of samples with a heritability of 0.8. We'll encode only two of the haplotypes in ``tests/data/simphenotype.hap`` as independent causal variables.

.. code-block:: bash

Expand All @@ -98,6 +105,8 @@ Simulate two replicates of a case/control trait that occurs in 60% of your sampl
--replications 2 \
--heritability 0.8 \
--prevalence 0.6 \
--id 'chr21.q.3365*10' \
--id 'chr21.q.3365*11' \
--output simulated.pheno \
/dev/stdin tests/data/simphenotype.hap

Expand Down
4 changes: 2 additions & 2 deletions docs/formats/genotypes.rst
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,9 @@ Genotype files must be specified as VCF or BCF files. They can be bgzip-compress
PLINK2 PGEN
-----------

There is also experimental support for `PLINK2 PGEN <https://github.com/chrchang/plink-ng/blob/master/pgen_spec/pgen_spec.pdf>`_ files in some commands. These files can be loaded much more quickly than VCFs, so we highly recommend using them if you're working with large datasets. See the documentation for the :class:`GenotypesPLINK` class in :ref:`the API docs <api-data-genotypesplink>` for more information.
There is also experimental support for `PLINK2 PGEN <https://github.com/chrchang/plink-ng/blob/master/pgen_spec/pgen_spec.pdf>`_ files in some commands. These files can be loaded and created much more quickly than VCFs, so we highly recommend using them if you're working with large datasets. See the documentation for the :class:`GenotypesPLINK` class in :ref:`the API docs <api-data-genotypesplink>` for more information.

If you run out memory when using PGEN files, consider reading variants from the file in chunks via the ``--chunk-size`` parameter.
If you run out memory when using PGEN files, consider reading/writing variants from the file in chunks via the ``--chunk-size`` parameter.

.. note::
PLINK2 support depends on the ``Pgenlib`` python library. This can be installed automatically with ``haptools`` if you specify the "files" extra requirements during installation.
Expand Down
20 changes: 10 additions & 10 deletions docs/project_info/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -96,29 +96,29 @@ For example, to add a pypi dependency to our list and install it, just run
-----------
Code Checks
-----------
Before creating your pull request, please run our code checks through ``nox``.

.. code-block:: bash

nox

You can also execute each of our code checks individually.
Before creating your pull request, please run each of our code checks.

1. Format the code correctly

.. code-block:: bash

nox --session=lint
black .

2. If you made changes to the docs, check that they appear correctly.

.. code-block:: bash

nox --session=docs
open docs/_build/index.html
sphinx-build docs docs/_build
open docs/_build/html/index.html

3. Run all of the tests

.. code-block:: bash

pytest tests/

You can also build the package and run the tests from the built version using ``nox``. This will fully simulate installing the package from PyPI.

.. code-block:: bash

nox --session=tests
Expand Down
75 changes: 48 additions & 27 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,7 @@
)


# detect whether mamba is installed
conda_cmd = "conda"
if (Path(os.getenv("CONDA_EXE")).parent / "mamba").exists():
conda_cmd = "mamba"
conda_args = ["-c", "conda-forge"]


@session(python=False)
@session(python=python_versions[0])
def docs(session: Session) -> None:
"""Build the documentation."""
args = session.posargs or ["docs", "docs/_build"]
Expand All @@ -40,30 +33,58 @@ def docs(session: Session) -> None:
session.run("sphinx-build", *args)


@session(python=False)
@session(python=python_versions[0])
def lint(session: Session) -> None:
"""Lint our code."""
session.install("black")
session.run("black", "--check", ".")


@session(venv_backend=conda_cmd, venv_params=conda_args, python=python_versions)
def tests(session: Session) -> None:
"""Run the test suite."""
session.conda_install(
"coverage[toml]", "pytest", "numpy>=1.20.0", channel="conda-forge"
)
# TODO: change this to ".[files]" once plink-ng Alpha 3.8 is released
# https://github.com/chrchang/plink-ng/releases
session.install(".")

try:
session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs)
finally:
if session.interactive:
session.notify("coverage", posargs=[])


@session(python=False)
# detect whether conda/mamba is installed
if os.getenv("CONDA_EXE"):
conda_cmd = "conda"
if (Path(os.getenv("CONDA_EXE")).parent / "mamba").exists():
conda_cmd = "mamba"
conda_args = ["-c", "conda-forge"]

@session(venv_backend=conda_cmd, venv_params=conda_args, python=python_versions)
def tests(session: Session) -> None:
"""Run the test suite."""
session.conda_install(
"coverage[toml]", "pytest", "numpy>=1.20.0", channel="conda-forge"
)
# TODO: change this to ".[files]" once plink-ng Alpha 3.8 is released
# https://github.com/chrchang/plink-ng/releases
session.install(".")

try:
session.run(
"coverage", "run", "--parallel", "-m", "pytest", *session.posargs
)
finally:
if session.interactive:
session.notify("coverage", posargs=[])

else:

@session(python=python_versions)
def tests(session: Session) -> None:
"""Run the test suite."""
session.install("coverage[toml]", "pytest")
# TODO: change this to ".[files]" once plink-ng Alpha 3.8 is released
# https://github.com/chrchang/plink-ng/releases
session.install(".")

try:
session.run(
"coverage", "run", "--parallel", "-m", "pytest", *session.posargs
)
finally:
if session.interactive:
session.notify("coverage", posargs=[])


@session(python=python_versions[0])
def coverage(session: Session) -> None:
"""Produce the coverage report."""
args = session.posargs or ["report"]
Expand Down