diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..d830695 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,18 @@ +on: + release: + types: [created] +jobs: + deploy-to-pypi: + runs-on: ubuntu-latest + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: '3.x' + - run: | + pip install build wheel twine + python -m build + twine upload dist/* \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2ecdd14..64a7313 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -5,16 +5,21 @@ on: pull_request: jobs: - Linux: + run-tests: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 + with: + fetch-depth: 0 # important for Coveralls to read commit history - uses: actions/setup-python@v5 with: python-version: '3.x' - - - name: tests + - name: tests and coverage run: | - pip install -e .[advanced,dev] - pytest pynumdiff - + pip install -e .[advanced,dev] coveralls + coverage run --source=pynumdiff --omit='pynumdiff/_version.py' -m pytest -s + coverage xml + - uses: coverallsapp/github-action@v2 + with: + format: cobertura + file: coverage.xml diff --git a/README.md b/README.md index 68f9d10..9288b52 100644 --- a/README.md +++ b/README.md @@ -9,10 +9,13 @@ Python methods for numerical differentiation of noisy data, including multi-obje

+ Documentation Status + + Coverage Status - PyPI version + PyPI DOI @@ -24,12 +27,12 @@ Python methods for numerical differentiation of noisy data, including multi-obje PyNumDiff is a Python package that implements various methods for computing numerical derivatives of noisy data, which can be a critical step in developing dynamic models or designing control. There are seven different families of methods implemented in this repository: 1. convolutional smoothing followed by finite difference calculation -2. polynomial-fit-based methods -3. iterated finite differencing -4. total variation regularization of a finite difference derivative -5. Kalman (RTS) smoothing -6. basis-function-based methods -7. linear local approximation with linear model +2. polynomial fit methods +3. basis function fit methods +4. iterated finite differencing +5. total variation regularization of a finite difference derivative +6. Kalman (RTS) smoothing +7. local approximation with linear model Most of these methods have multiple parameters, so we take a principled approach and propose a multi-objective optimization framework for choosing parameters that minimize a loss function to balance the faithfulness and smoothness of the derivative estimate. For more details, refer to [this paper](https://doi.org/10.1109/ACCESS.2020.3034077).