diff --git a/.circleci/config.yml b/.circleci/config.yml
index eeada39f4b2b..27f37c14c2c6 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -78,7 +78,7 @@ jobs:
pip install numpy==1.21.5
pip install -r doc_requirements.txt
# `asv` pin because of slowdowns reported in gh-15568
- pip install mpmath gmpy2 "asv==0.4.2" pythran ninja meson
+ pip install mpmath gmpy2 "asv==0.4.2" pythran ninja meson click rich-click doit pydevtool pooch
pip install pybind11
# extra benchmark deps
pip install pyfftw cffi pytest
@@ -86,7 +86,7 @@ jobs:
- run:
name: build SciPy
command: |
- python dev.py --build-only -j2
+ python dev.py build -j2
- save_cache:
key: deps_ccache-{{ .Branch }}
@@ -119,16 +119,16 @@ jobs:
no_output_timeout: 25m
command: |
export PYTHONPATH=$PWD/build-install/lib/python3.8/site-packages
- python dev.py -n -j2 --doc html-scipyorg
+ python dev.py --no-build doc -j2
- store_artifacts:
- path: doc/build/html-scipyorg
- destination: html-scipyorg
+ path: doc/build/html
+ destination: html
- persist_to_workspace:
root: doc/build
paths:
- - html-scipyorg
+ - html
# Run benchmarks
@@ -185,7 +185,7 @@ jobs:
(git checkout --orphan tmp && git branch -D gh-pages || true);
git checkout --orphan gh-pages;
git reset --hard;
- cp -R /tmp/build/html-scipyorg/. .;
+ cp -R /tmp/build/html/. .;
touch .nojekyll;
git config --global user.email "scipy-circleci-bot@nomail";
git config --global user.name "scipy-circleci-bot";
diff --git a/.cirrus.star b/.cirrus.star
new file mode 100644
index 000000000000..9056082c84bf
--- /dev/null
+++ b/.cirrus.star
@@ -0,0 +1,41 @@
+# The guide to programming cirrus-ci tasks using starlark is found at
+# https://cirrus-ci.org/guide/programming-tasks/
+#
+# In this simple starlark script we simply check conditions for whether
+# a CI run should go ahead. If the conditions are met, then we just
+# return the yaml containing the tasks to be run.
+
+load("cirrus", "env", "fs", "http")
+
+def main(ctx):
+ ######################################################################
+ # Should wheels be built?
+ # Only test on the scipy/scipy repository
+ # Test if the run was triggered by:
+ # - a cron job called "nightly". The cron job is not set in this file,
+ # but on the cirrus-ci repo page
+ # - commit message containing [wheel build]
+ ######################################################################
+
+ if env.get("CIRRUS_REPO_FULL_NAME") != "scipy/scipy":
+ return []
+
+ if env.get("CIRRUS_CRON", "") == "nightly":
+ return fs.read("ci/cirrus_wheels.yml")
+
+ # Obtain commit message for the event. Unfortunately CIRRUS_CHANGE_MESSAGE
+ # only contains the actual commit message on a non-PR trigger event.
+ # For a PR event it contains the PR title and description.
+ SHA = env.get("CIRRUS_CHANGE_IN_REPO")
+ url = "https://api.github.com/repos/scipy/scipy/git/commits/" + SHA
+ dct = http.get(url).json()
+ if "[wheel build]" in dct["message"]:
+ return fs.read("ci/cirrus_wheels.yml")
+
+ # this configuration runs a single linux_aarch64 + macosx_arm64 run.
+ # there's no need to do this during a wheel run as they automatically build
+ # and test over a wider range of Pythons.
+ if "[skip cirrus]" in dct["message"] or "[skip ci]" in dct["message"]:
+ return []
+
+ return fs.read("ci/cirrus_general_ci.yml")
diff --git a/.gitattributes b/.gitattributes
index d0d9a45f3343..f0b109233911 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -1,3 +1,25 @@
+# Excluding files from an sdist generated by meson-python
+#########################################################
+
+# Note: when adding to this list, be aware that you need to commit your changes
+# before they take effect (can be confusing during testing)
+.circleci/* export-ignore
+.github/* export-ignore
+ci/* export-ignore
+.coveragerc export-ignore
+.git* export-ignore
+*.yml export-ignore
+*.yaml export-ignore
+.mailmap export-ignore
+MANIFEST.in export-ignore
+pavement.py export-ignore
+tools/ci/* export-ignore
+tools/docker_dev/* export-ignore
+
+
+# Dealing with line endings
+###########################
+
* text=auto
tools/win32/build_scripts/nsis_scripts/*.nsi.in eol=crlf
diff --git a/.github/label-globs.yml b/.github/label-globs.yml
new file mode 100644
index 000000000000..afa5b1232da1
--- /dev/null
+++ b/.github/label-globs.yml
@@ -0,0 +1,84 @@
+scipy.cluster:
+- scipy/cluster/**/*
+
+scipy.constants:
+- scipy/constants/**/*
+
+scipy.fft:
+- scipy/fft/**/*
+
+scipy.fftpack:
+- scipy/fftpack/**/*
+
+scipy.integrate:
+- scipy/integrate/**/*
+
+scipy.interpolate:
+- scipy/interpolate/**/*
+
+scipy.io:
+- scipy/io/**/*
+
+scipy._lib:
+- scipy/_lib/**/*
+
+scipy.linalg:
+- scipy/linalg/**/*
+
+scipy.misc:
+- scipy/misc/**/*
+
+scipy.ndimage:
+- scipy/ndimage/**/*
+
+scipy.odr:
+- scipy/odr/**/*
+
+scipy.optimize:
+- scipy/optimize/**/*
+
+scipy.signal:
+- scipy/signal/**/*
+
+scipy.sparse:
+- all: ["scipy/sparse/**/*", "!scipy/sparse/csgraph/**/*", "!scipy/sparse/linalg/**/*"]
+
+scipy.sparse.csgraph:
+- scipy/sparse/csgraph/**/*
+
+scipy.sparse.linalg:
+- scipy/sparse/linalg/**/*
+
+scipy.spatial:
+- scipy/spatial/**/*
+
+scipy.special:
+- scipy/special/**/*
+
+scipy.stats:
+- scipy/stats/**/*
+
+scipy.weave:
+- scipy/weave/**/*
+
+Cython:
+- scipy/**/*.pyx.*
+- scipy/**/*.pxd.*
+- scipy/**/*.pxi.*
+
+Fortran:
+- scipy/**/*.f
+- scipy/**/*.f.src
+- scipy/**/*.F
+- scipy/**/*.f90
+
+C/C++:
+- scipy/**/*.c
+- scipy/**/*.c.in
+- scipy/**/*.c.old
+- scipy/**/*.h
+- scipy/**/*.h.in
+- scipy/**/*.cpp
+- scipy/**/*.cc
+- scipy/**/*.cxx
+- scipy/**/*.hpp
diff --git a/.github/workflows/circle_artifacts.yml b/.github/workflows/circle_artifacts.yml
index 4881af9fd1c2..9b11b95251f1 100644
--- a/.github/workflows/circle_artifacts.yml
+++ b/.github/workflows/circle_artifacts.yml
@@ -2,12 +2,13 @@ on: [status]
jobs:
circleci_artifacts_redirector_job:
runs-on: ubuntu-20.04
+ if: "github.repository == 'scipy/scipy' && !contains(github.event.head_commit.message, '[skip circle]') && github.event.context == 'ci/circleci: build_docs'"
name: Run CircleCI artifacts redirector
steps:
- name: GitHub Action step
uses: larsoner/circleci-artifacts-redirector-action@master
with:
repo-token: ${{ secrets.GITHUB_TOKEN }}
- artifact-path: 0/html-scipyorg/index.html
+ artifact-path: 0/html/index.html
circleci-jobs: build_docs
job-title: Check the rendered docs here!
diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml
index 9118434d6598..1c10657e8090 100644
--- a/.github/workflows/docker.yml
+++ b/.github/workflows/docker.yml
@@ -9,6 +9,9 @@ on:
paths:
- "environment.yml"
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
jobs:
build-base-docker:
name: Build base Docker image
@@ -31,12 +34,12 @@ jobs:
shell: bash
run: |
export raw_branch=${GITHUB_REF#refs/heads/}
- echo "::set-output name=branch::${raw_branch//\//-}"
- echo "::set-output name=date::$(date +'%Y%m%d')"
- echo "::set-output name=sha8::$(echo ${GITHUB_SHA} | cut -c1-8)"
+ echo "branch=${raw_branch//\//-}" >> $GITHUB_OUTPUT
+ echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
+ echo "sha8=$(echo ${GITHUB_SHA} | cut -c1-8)" >> $GITHUB_OUTPUT
id: getrefs
- - name: Set up Docker Buildx
+ - name: Setup Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
diff --git a/.github/workflows/gitpod.yml b/.github/workflows/gitpod.yml
index 40a4c90539aa..40d7e5405cd8 100644
--- a/.github/workflows/gitpod.yml
+++ b/.github/workflows/gitpod.yml
@@ -4,6 +4,9 @@ on:
schedule:
- cron: "0 0 * * *" # nightly
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
jobs:
build:
name: Build Gitpod Docker image
@@ -22,11 +25,11 @@ jobs:
shell: bash
run: |
export raw_branch=${GITHUB_REF#refs/heads/}
- echo "::set-output name=branch::${raw_branch//\//-}"
- echo "::set-output name=date::$(date +'%Y%m%d')"
- echo "::set-output name=sha8::$(echo ${GITHUB_SHA} | cut -c1-8)"
+ echo "branch=${raw_branch//\//-}" >> $GITHUB_OUTPUT
+ echo "name=date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
+ echo "sha8=$(echo ${GITHUB_SHA} | cut -c1-8)" >> $GITHUB_OUTPUT
id: getrefs
- - name: Set up Docker Buildx
+ - name: Setup Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Cache Docker layers
uses: actions/cache@v3
diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml
index ff6bb68ac7c5..792dd8c42f45 100644
--- a/.github/workflows/linux.yml
+++ b/.github/workflows/linux.yml
@@ -10,6 +10,9 @@ on:
- main
- maintenance/**
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
@@ -18,7 +21,7 @@ jobs:
Python-38-dbg:
name: Python 3.8-dbg
if: "github.repository == 'scipy/scipy' || github.repository == ''"
- runs-on: ubuntu-18.04
+ runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
with:
@@ -26,7 +29,7 @@ jobs:
- name: Configuring Test Environment
run: |
sudo apt-get update
- sudo apt install python3.8-dbg python3.8-dev libatlas-base-dev liblapack-dev gfortran libgmp-dev libmpfr-dev libsuitesparse-dev ccache swig libmpc-dev
+ sudo apt install python3.8-dbg python3.8-dev libatlas-base-dev liblapack-dev gfortran libgmp-dev libmpfr-dev ccache swig libmpc-dev
free -m
python3.8-dbg --version # just to check
export NPY_NUM_BUILD_JOBS=2
@@ -40,8 +43,8 @@ jobs:
run: |
python3.8-dbg -c 'import sys; print("Python debug build:", hasattr(sys, "gettotalrefcount"))'
python3.8-dbg -m pip install --upgrade pip "setuptools<60.0" wheel
- python3.8-dbg -m pip install --upgrade numpy cython pytest pytest-xdist pybind11
- python3.8-dbg -m pip install --upgrade mpmath gmpy2 pythran threadpoolctl
+ python3.8-dbg -m pip install --upgrade numpy cython pytest pytest-xdist pytest-timeout pybind11
+ python3.8-dbg -m pip install --upgrade mpmath gmpy2 pythran threadpoolctl pooch
python3.8-dbg -m pip uninstall -y nose
cd ..
- name: Building SciPy
@@ -49,49 +52,8 @@ jobs:
- name: Testing SciPy
if: ${{ github.event_name == 'pull_request' }}
run: |
- python3.8-dbg -u runtests.py -n -g -j2 -m fast -- -rfEX --durations=10 2>&1 | tee runtests.log
+ python3.8-dbg -u runtests.py -n -g -j2 -m fast -- -rfEX --durations=10 --timeout=60 2>&1 | tee runtests.log
python3.8-dbg tools/validate_runtests_log.py fast < runtests.log
- name: Dynamic symbol hiding check on Linux
if: ${{ github.event_name == 'pull_request' }}
run: ./tools/check_pyext_symbol_hiding.sh build
-
- test_numpy_main:
- name: NumPy main ${{ matrix.python-version }}
- if: "github.repository == 'scipy/scipy' && !contains(github.event.head_commit.message, '[ci skip]') && !contains(github.event.head_commit.message, '[skip ci]') && !contains(github.event.head_commit.message, '[skip github]') && !contains(github.ref, 'maintenance/') && !contains(github.base_ref, 'maintenance/')"
- runs-on: ubuntu-latest
- strategy:
- matrix:
- python-version: ["3.11-dev"]
-
- steps:
- - uses: actions/checkout@v3
- with:
- submodules: recursive
-
- - name: Set up Python
- uses: actions/setup-python@v3
- with:
- python-version: ${{ matrix.python-version }}
- architecture: x64
-
- - name: Install other build dependencies
- run: |
- sudo apt-get install libatlas-base-dev liblapack-dev gfortran libgmp-dev libmpfr-dev libsuitesparse-dev ccache libmpc-dev
-
- - name: Install packages
- run: |
- pip install --user git+https://github.com/numpy/numpy.git
- python -m pip install --user "setuptools<60.0" wheel cython pytest pybind11 pytest-xdist
- pip install --user git+https://github.com/serge-sans-paille/pythran.git
- python -m pip install -r mypy_requirements.txt
-
- # Packages that are only needed for their annotations
- python -m pip install --user types-psutil pybind11 sphinx
-
- - name: Mypy
- run: |
- python -u runtests.py --mypy
-
- - name: Test SciPy
- run: |
- python -u runtests.py -n -m fast
diff --git a/.github/workflows/linux_meson.yml b/.github/workflows/linux_meson.yml
index df2bdf694184..d83db15a8bbe 100644
--- a/.github/workflows/linux_meson.yml
+++ b/.github/workflows/linux_meson.yml
@@ -14,6 +14,9 @@ on:
- main
- maintenance/**
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
env:
CCACHE_DIR: "${{ github.workspace }}/.ccache"
INSTALLDIR: "build-install"
@@ -27,10 +30,15 @@ jobs:
name: Meson build
# If using act to run CI locally the github object does not exist and the usual skipping should not be enforced
if: "github.repository == 'scipy/scipy' || github.repository == ''"
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
strategy:
matrix:
- python-version: [3.9]
+ python-version: ['3.9', '3.11-dev']
+ maintenance-branch:
+ - ${{ contains(github.ref, 'maintenance/') || contains(github.base_ref, 'maintenance/') }}
+ exclude:
+ - maintenance-branch: true
+ python-version: '3.11-dev'
steps:
- uses: actions/checkout@v3
@@ -38,9 +46,11 @@ jobs:
submodules: recursive
- name: Setup Python
- uses: actions/setup-python@v3
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
+ cache: 'pip'
+ cache-dependency-path: 'environment.yml'
- name: Install Ubuntu dependencies
run: |
@@ -48,25 +58,27 @@ jobs:
sudo apt-get update
sudo apt-get install -y libopenblas-dev libatlas-base-dev liblapack-dev gfortran libgmp-dev libmpfr-dev libsuitesparse-dev ccache libmpc-dev
- - name: Caching Python dependencies
- uses: actions/cache@v3
- id: cache
- with:
- path: ~/.cache/pip
- key: ${{ runner.os }}-pip
-
- name: Install Python packages
+ if: matrix.python-version == '3.9'
+ run: |
+ python -m pip install numpy cython pytest pytest-xdist pytest-timeout pybind11 mpmath gmpy2 pythran ninja meson click rich-click doit pydevtool pooch
+
+ - name: Install Python packages from repositories
+ if: matrix.python-version == '3.11-dev'
run: |
- python -m pip install numpy setuptools wheel cython pytest pytest-xdist pybind11 mpmath gmpy2 pythran ninja meson
+ python -m pip install git+https://github.com/numpy/numpy.git
+ python -m pip install ninja cython pytest pybind11 pytest-xdist pytest-timeout click rich-click doit pydevtool pooch
+ python -m pip install git+https://github.com/serge-sans-paille/pythran.git
+ python -m pip install git+https://github.com/mesonbuild/meson.git
- name: Prepare compiler cache
id: prep-ccache
shell: bash
run: |
mkdir -p "${CCACHE_DIR}"
- echo "::set-output name=dir::$CCACHE_DIR"
+ echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT
NOW=$(date -u +"%F-%T")
- echo "::set-output name=timestamp::${NOW}"
+ echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
- name: Setup compiler cache
uses: actions/cache@v3
@@ -86,7 +98,7 @@ jobs:
- name: Setup build and install scipy
run: |
- python dev.py -j 2 --build-only --werror
+ python dev.py build -j 2 --werror
- name: Ccache performance
shell: bash -l {0}
@@ -96,34 +108,39 @@ jobs:
run: |
pushd tools
python check_installation.py ${{ env.INSTALLDIR }}
+ ./check_pyext_symbol_hiding.sh ../build
popd
+ - name: Mypy
+ if: matrix.python-version == '3.9'
+ run: |
+ # Packages that are only needed for their annotations
+ python -m pip install -r mypy_requirements.txt
+ python -m pip install types-psutil pybind11 sphinx
+
+ python -u dev.py mypy
- name: Test SciPy
run: |
export OMP_NUM_THREADS=2
- python dev.py -n -j 2
+ export SCIPY_USE_PROPACK=1
+ python dev.py --no-build test -j 2 -- --durations 10 --timeout=60
test_venv_install:
name: Pip install into venv
if: "github.repository == 'scipy/scipy' || github.repository == ''"
- runs-on: ubuntu-latest
+ runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v3
with:
submodules: recursive
- - name: Setup Python
- uses: actions/setup-python@v3
- with:
- python-version: "3.10"
-
- name: Install Ubuntu dependencies
run: |
# We're not running the full test suite here, only testing the install
# into a venv is working, so leave out optional dependencies. That's
# also why we can get away with an old version of OpenBLAS from Ubuntu
sudo apt-get update
- sudo apt-get install -y libopenblas-dev pkg-config gfortran
+ sudo apt-get install -y python3-dev libopenblas-dev pkg-config gfortran
- name: Create venv, install SciPy
run: |
diff --git a/.github/workflows/macos.yml b/.github/workflows/macos.yml
index 326810471789..89342de7f017 100644
--- a/.github/workflows/macos.yml
+++ b/.github/workflows/macos.yml
@@ -10,6 +10,9 @@ on:
- main
- maintenance/**
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
@@ -29,10 +32,12 @@ jobs:
- uses: actions/checkout@v3
with:
submodules: recursive
- - name: Set up Python ${{ matrix.python-version }}
- uses: actions/setup-python@v3
+ - name: Setup Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
+ cache: 'pip'
+ cache-dependency-path: 'environment.yml'
- name: Setup gfortran
run: |
@@ -80,9 +85,9 @@ jobs:
- name: Install packages
run: |
pip install ${{ matrix.numpy-version }}
- pip install setuptools==59.8.0 wheel cython pytest pytest-xdist pybind11 pytest-xdist mpmath gmpy2 pythran
+ pip install setuptools==59.8.0 wheel cython pytest pytest-xdist pytest-timeout pybind11 pytest-xdist mpmath gmpy2 pythran pooch
- name: Test SciPy
run: |
export LIBRARY_PATH="$LIBRARY_PATH:/Library/Developer/CommandLineTools/SDKs/MacOSX.sdk/usr/lib"
- SCIPY_USE_PYTHRAN=`test ${{ matrix.python-version }} != 3.9; echo $?` python -u runtests.py
+ SCIPY_USE_PYTHRAN=`test ${{ matrix.python-version }} != 3.9; echo $?` python -u runtests.py -- --durations=10 --timeout=60
diff --git a/.github/workflows/macos_meson.yml b/.github/workflows/macos_meson.yml
index a4006e279b16..8829e90610b4 100644
--- a/.github/workflows/macos_meson.yml
+++ b/.github/workflows/macos_meson.yml
@@ -10,6 +10,9 @@ on:
- main
- maintenance/**
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
env:
INSTALLDIR: "build-install"
CCACHE_DIR: "${{ github.workspace }}/.ccache"
@@ -33,11 +36,6 @@ jobs:
with:
submodules: recursive
- - name: Setup Python
- uses: actions/setup-python@v3
- with:
- python-version: ${{ matrix.python-version }}
-
- name: Install Ccache
run: |
brew install ccache
@@ -47,9 +45,9 @@ jobs:
shell: bash -l {0}
run: |
mkdir -p "${CCACHE_DIR}"
- echo "::set-output name=dir::$CCACHE_DIR"
+ echo "dir=$CCACHE_DIR" >> $GITHUB_OUTPUT
NOW=$(date -u +"%F-%T")
- echo "::set-output name=timestamp::${NOW}"
+ echo "timestamp=${NOW}" >> $GITHUB_OUTPUT
- name: Setup compiler cache
uses: actions/cache@v3
@@ -73,11 +71,18 @@ jobs:
uses: conda-incubator/setup-miniconda@v2
with:
python-version: ${{ matrix.python-version }}
- mamba-version: "*"
channels: conda-forge
channel-priority: true
activate-environment: scipy-dev
use-only-tar-bz2: true
+ miniforge-variant: Mambaforge
+ miniforge-version: latest
+ use-mamba: true
+
+ - name: Get Date
+ id: get-date
+ run: echo "today=$(/bin/date -u '+%Y%m%d')" >> $GITHUB_OUTPUT
+ shell: bash
- name: Cache conda
uses: actions/cache@v3
@@ -85,19 +90,23 @@ jobs:
# Increase this value to reset cache if environment.yml has not changed
CACHE_NUMBER: 0
with:
- path: /usr/local/miniconda/envs/scipy-dev
+ path: ${{ env.CONDA }}/envs/scipy-dev
key:
- ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }}
+ ${{ runner.os }}--${{ steps.get-date.outputs.today }}-conda-${{ env.CACHE_NUMBER }}-${{ hashFiles('environment.yml') }}
id: envcache
- name: Update Conda Environment
run: mamba env update -n scipy-dev -f environment.yml
+ if: steps.envcache.outputs.cache-hit != 'true'
- name: Build and Install SciPy
shell: bash -l {0}
run: |
conda activate scipy-dev
- python -m pip install meson==0.61.1
+
+ # optional test dependencies
+ conda install scikit-umfpack scikit-sparse
+
# Python.org installers still use 10.9, so let's use that too. Note
# that scikit-learn already changed to 10.13 in Jan 2021, so increasing
# this number in the future (if needed) should not be a problem.
@@ -105,14 +114,15 @@ jobs:
# https://conda-forge.org/docs/maintainer/knowledge_base.html#requiring-newer-macos-sdks
export MACOSX_DEPLOYMENT_TARGET=10.9
export MACOSX_SDK_VERSION=10.9
- CC="ccache $CC" python do.py build -j 2
+ CC="ccache $CC" python dev.py build -j 2
- name: Test SciPy
shell: bash -l {0}
run: |
conda activate scipy-dev
export OMP_NUM_THREADS=2
- python dev.py -n -j 2
+ export SCIPY_USE_PROPACK=1
+ python dev.py -n test -j 2
- name: Ccache statistics
shell: bash -l {0}
diff --git a/.github/workflows/pull-request-labeler.yml b/.github/workflows/pull-request-labeler.yml
new file mode 100644
index 000000000000..3e76fbaac776
--- /dev/null
+++ b/.github/workflows/pull-request-labeler.yml
@@ -0,0 +1,19 @@
+name: "Pull Request Labeler"
+on:
+ pull_request_target:
+ types: [created]
+
+permissions:
+ contents: write # to add labels
+
+jobs:
+
+ label_pull_request:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: thomasjpfan/labeler@v2.5.0
+ continue-on-error: true
+ if: github.repository == 'scipy/scipy'
+ with:
+ repo-token: "${{ secrets.GITHUB_TOKEN }}"
+ configuration-path: ".github/label-globs.yml"
diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml
new file mode 100644
index 000000000000..bb1fcf8f2b67
--- /dev/null
+++ b/.github/workflows/wheels.yml
@@ -0,0 +1,173 @@
+# Workflow to build and test wheels.
+# To work on the wheel building infrastructure on a fork, comment out:
+#
+# if: github.repository == 'scipy/scipy'
+#
+# in the get_commit_message job include [wheel build] in your commit
+# message to trigger the build. All files related to wheel building are located
+# at tools/wheels/
+name: Wheel builder
+
+on:
+ schedule:
+ # ┌───────────── minute (0 - 59)
+ # │ ┌───────────── hour (0 - 23)
+ # │ │ ┌───────────── day of the month (1 - 31)
+ # │ │ │ ┌───────────── month (1 - 12 or JAN-DEC)
+ # │ │ │ │ ┌───────────── day of the week (0 - 6 or SUN-SAT)
+ # │ │ │ │ │
+ - cron: "9 9 * * 6"
+ push:
+ branches:
+ - maintenance/**
+ pull_request:
+ branches:
+ - main
+ - maintenance/**
+ workflow_dispatch:
+
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
+concurrency:
+ group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
+ cancel-in-progress: true
+
+jobs:
+ get_commit_message:
+ name: Get commit message
+ runs-on: ubuntu-latest
+ if: github.repository == 'scipy/scipy'
+ outputs:
+ message: ${{ steps.commit_message.outputs.message }}
+ steps:
+ - name: Checkout scipy
+ uses: actions/checkout@v3
+ # Gets the correct commit message for pull request
+ with:
+ ref: ${{ github.event.pull_request.head.sha }}
+ - name: Get commit message
+ id: commit_message
+ run: |
+ set -xe
+ COMMIT_MSG=$(git log --no-merges -1)
+ RUN="0"
+ if [[ "$COMMIT_MSG" == *"[wheel build]"* ]]; then
+ RUN="1"
+ fi
+ echo "message=$RUN" >> $GITHUB_OUTPUT
+ echo github.ref ${{ github.ref }}
+
+ build_wheels:
+ name: Build wheel for ${{ matrix.python[0] }}-${{ matrix.buildplat[1] }} ${{ matrix.buildplat[2] }}
+ needs: get_commit_message
+ if: >-
+ contains(needs.get_commit_message.outputs.message, '1') ||
+ github.event_name == 'schedule' ||
+ github.event_name == 'workflow_dispatch'
+ runs-on: ${{ matrix.buildplat[0] }}
+
+ strategy:
+ # Ensure that a wheel builder finishes even if another fails
+ fail-fast: false
+ matrix:
+ # Github Actions doesn't support pairing matrix values together, let's improvise
+ # https://github.com/github/feedback/discussions/7835#discussioncomment-1769026
+ buildplat:
+ # should also be able to do multi-archs on a single entry, e.g.
+ # [windows-2019, win*, "AMD64 x86"]. However, those two require a different compiler setup
+ # so easier to separate out here.
+ - [ubuntu-20.04, manylinux, x86_64]
+
+ # When the macos-10.15 image is retired the gfortran/openblas chain
+ # may have to be reworked because the gfortran-4.9.0 compiler currently
+ # used in CI doesn't work in the macos-11.0 image. This will require a more
+ # recent gfortran (gfortran-9 is present on the macOS-11.0 image), and
+ # will probably require that the prebuilt openBLAS is updated.
+ # xref https://github.com/andyfaff/scipy/pull/28#issuecomment-1203496836
+ - [macos-10.15, macosx, x86_64]
+ - [windows-2019, win, AMD64]
+
+ python: [["cp38", "3.8"], ["cp39", "3.9"], ["cp310", "3.10"], ["cp311", "3.11.0-alpha - 3.11.0"]]
+ # python[0] is used to specify the python versions made by cibuildwheel
+
+ env:
+ IS_32_BIT: ${{ matrix.buildplat[2] == 'x86' }}
+ # upload to staging if it's a push to a maintenance branch and the last
+ # commit message contains '[wheel build]'
+ IS_PUSH: ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/heads/maintenance') && contains(needs.get_commit_message.outputs.message, '1') }}
+ IS_SCHEDULE_DISPATCH: ${{ github.event_name == 'schedule' || github.event_name == 'workflow_dispatch' }}
+
+ steps:
+ - name: Checkout scipy
+ uses: actions/checkout@v3
+ with:
+ submodules: true
+ fetch-depth: 0
+
+ - uses: actions/setup-python@v4.2.0
+ with:
+ python-version: 3.8
+
+ - name: win_amd64 - install rtools
+ run: |
+ # mingw-w64
+ choco install rtools --no-progress
+ echo "c:\rtools40\ucrt64\bin;" >> $env:GITHUB_PATH
+ if: ${{ runner.os == 'Windows' && env.IS_32_BIT == 'false' }}
+
+# - name: win32 - configure mingw for 32-bit builds
+# run: |
+# # taken from numpy wheels.yml script
+# # Force 32-bit mingw. v 8.1.0 is the current version used to build
+# # the 32 bit openBLAS library (not sure if that matters)
+# choco uninstall mingw
+# choco install -y mingw --forcex86 --force --version=8.1.0
+# echo "C:\ProgramData\chocolatey\lib\mingw\tools\install\mingw32\bin;" >> $env:GITHUB_PATH
+# echo $(gfortran --version)
+# echo $(gcc --version)
+# if: ${{ runner.os == 'Windows' && env.IS_32_BIT == 'true' }}
+
+ - name: Build wheels
+ uses: pypa/cibuildwheel@v2.11.3
+ # Build all wheels here, apart from macosx_arm64, linux_aarch64
+ # cibuildwheel is currently unable to pass configuration flags to
+ # CIBW_BUILD_FRONTEND https://github.com/pypa/cibuildwheel/issues/1227
+ # (pip/build). Cross compilation with meson requires an initial
+ # configuration step to create a build directory. The subsequent wheel
+ # build then needs to use that directory. This can be done with pip
+ # using a command like:
+ # python -m pip wheel --config-settings builddir=build .
+ if: >-
+ ( ! contains(matrix.buildplat[2], 'arm64' ) )
+ env:
+ CIBW_BUILD: ${{ matrix.python[0] }}-${{ matrix.buildplat[1] }}*
+ CIBW_ARCHS: ${{ matrix.buildplat[2] }}
+ CIBW_ENVIRONMENT_PASS_LINUX: RUNNER_OS
+
+ - uses: actions/upload-artifact@v3
+ with:
+ path: ./wheelhouse/*.whl
+ name: ${{ matrix.python[0] }}-${{ matrix.buildplat[1] }}
+
+ - name: Upload wheels
+ if: success()
+ shell: bash
+ env:
+ SCIPY_STAGING_UPLOAD_TOKEN: ${{ secrets.SCIPY_STAGING_UPLOAD_TOKEN }}
+ SCIPY_NIGHTLY_UPLOAD_TOKEN: ${{ secrets.SCIPY_NIGHTLY_UPLOAD_TOKEN }}
+ run: |
+ source tools/wheels/upload_wheels.sh
+ set_upload_vars
+ # For cron jobs (restricted to main branch) or "Run workflow" trigger
+ # an upload to:
+ #
+ # https://anaconda.org/scipy-wheels-nightly/scipy
+ #
+ # Pushes to a maintenance branch that contain '[wheel build]' will
+ # cause wheels to be built and uploaded to:
+ #
+ # https://anaconda.org/multibuild-wheels-staging/scipy
+ #
+ # The tokens were originally generated at anaconda.org
+ upload_wheels
diff --git a/.github/workflows/windows.yml b/.github/workflows/windows.yml
index d3b9221fb723..2875c06f2107 100644
--- a/.github/workflows/windows.yml
+++ b/.github/workflows/windows.yml
@@ -10,6 +10,9 @@ on:
- main
- maintenance/**
+permissions:
+ contents: read # to fetch code (actions/checkout)
+
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
@@ -22,16 +25,17 @@ jobs:
if: "github.repository == 'scipy/scipy' || github.repository == ''"
runs-on: windows-2019
steps:
- - name: Set up Python
- uses: actions/setup-python@v3
+ - name: Checkout
+ uses: actions/checkout@v3
+ with:
+ submodules: recursive
+ - name: Setup Python
+ uses: actions/setup-python@v4
with:
python-version: '3.9'
architecture: 'x64'
- - name: Checkout
- uses: actions/checkout@v3
- - name: show-python-version
- run: |
- python --version
+ cache: 'pip'
+ cache-dependency-path: 'environment.yml'
- name: install-rtools
run: |
choco install rtools --no-progress
@@ -42,7 +46,7 @@ jobs:
gfortran --version
- name: pip-packages
run: |
- pip install numpy==1.22.2 cython pybind11 pythran meson ninja pytest pytest-xdist
+ pip install numpy==1.22.2 cython pybind11 pythran meson ninja pytest pytest-xdist pytest-timeout pooch
- name: openblas-libs
run: |
# Download and install pre-built OpenBLAS library
@@ -55,8 +59,7 @@ jobs:
echo "PKG_CONFIG_PATH=c:\opt\openblas\if_32\64\lib\pkgconfig;" >> $env:GITHUB_ENV
- name: meson-configure
run: |
- git submodule update --init --recursive
- meson build --prefix=$PWD\build
+ meson setup build --prefix=$PWD\build
- name: meson-build
run: |
ninja -j 2 -C build
@@ -79,6 +82,7 @@ jobs:
- name: prep-test
run: |
echo "PYTHONPATH=${env:installed_path}" >> $env:GITHUB_ENV
+ echo "SCIPY_USE_PROPACK=1" >> $env:GITHUB_ENV
- name: test
run: |
mkdir tmp
diff --git a/.gitignore b/.gitignore
index bf730c4ac248..826e090c93f9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,6 +12,7 @@
.pydevproject
*.rej
.settings/
+.spyproject/
.*.sw[nop]
.sw[nop]
*.tmp
@@ -271,7 +272,7 @@ scipy/special/_comb.c
scipy/special/_ellip_harm_2.c
scipy/special/_ellip_harm_2.h
scipy/special/_logit.c
-scipy/special/_test_round.c
+scipy/special/_test_internal.c
scipy/special/_ufuncs.c
scipy/special/_ufuncs.h
scipy/special/_ufuncs.pyx
@@ -307,6 +308,8 @@ scipy/stats/_qmc_cy.cxx
scipy/stats/_hypotests_pythran.cpp
scipy/stats/_unuran/unuran_wrapper.pyx
scipy/stats/_unuran/unuran_wrapper.c
+scipy/stats/_rcont/rcont.c
+scipy/stats/_stats_pythran.cpp
scipy/version.py
scipy/special/_exprel.c
scipy/optimize/_group_columns.c
diff --git a/.mailmap b/.mailmap
index 0f6ad8752bb8..af11468a0a04 100644
--- a/.mailmap
+++ b/.mailmap
@@ -18,6 +18,8 @@
@endolith endolith
@FormerPhysicist FormerPhysicist
@gaulinmp Mac
+@h-vetinari h-vetinari
+@h-vetinari H. Vetinari
@ksemb ksemb
@kshitij12345 kshitij12345
@luzpaz Unknown
@@ -73,6 +75,7 @@ Anne Archibald Anne Archibald
Antonio Horta Ribeiro antonio
Antonio Horta Ribeiro Antonio H Ribeiro
Ariel Rokem ariel.rokem
+Arno Marty korneix
Arno Onken Arno Onken
Arthur Volant Arthur <37664438+V0lantis@users.noreply.github.com>
Ashwin Pathak ashwinpathak20
@@ -102,6 +105,7 @@ Charles Masson charlesmasson Chelsea
Chelsea Liu chelsea.l
Chris Burns chris.burns
+Christoph Hohnerlein chohner
Chris Lasher <> gotgenes <>
Christian Clauss cclauss
Christoph Baumgarten chrisb83 <33071866+chrisb83@users.noreply.github.com>
@@ -161,6 +165,7 @@ Dávid Bodnár bdvd
Ed Schofield edschofield
Egor Zemlyanoy egorz734
Egor Zemlyanoy Egorz734
+Egor Zemlyanoy Egor
Eric Larson Eric89GXL
Eric Quintero e-q
Eric Quintero Eric Quintero
@@ -190,6 +195,8 @@ G Young gfyoung
Gagandeep Singh czgdp1807
Garrett Reynolds Garrett-R
Gaël Varoquaux Gael varoquaux
+Gavin Zhang GavinZhang
+Gavin Zhang Gavin Zhang
Geordie McBain G. D. McBain
Gang Zhao zhaog6 <31978442+zhaog6@users.noreply.github.com>
Gina Helfrich Gina
@@ -367,6 +374,7 @@ Nicholas McKibben mckib2
Nickolai Belakovski nbelakovski
Nicky van Foreest Nicky van Foreest
Nicola Montecchio nicola montecchio
+Nikita Karetnikov Nikita Karetnikov (ニキータ カレートニコフ)
Nikolai Nowaczyk Nikolai
Nikolas Moya nmoya
Nikolay Mayorov Nikolay Mayorov
@@ -426,6 +434,7 @@ Ryan Gibson ragibson Sam Lewis
Sam McCormack Sam McCormack
Sam Mason Sam Mason
+Sam Rosen <7624861+SamGRosen@users.noreply.github.com> SamGRosen <7624861+SamGRosen@users.noreply.github.com>
Samuel Wallan <44255917+swallan@users.noreply.github.com> swallan <44255917+swallan@users.noreply.github.com>
Samuel Wallan <44255917+swallan@users.noreply.github.com> Sam Wallan <44255917+swallan@users.noreply.github.com>
Santi Hernandez santiher
@@ -443,6 +452,7 @@ Shivnaren Srinivasan srinivasan
Skipper Seabold skip
Shinya SUZUKI Shinya SUZUKI
Smit Lunagariya <55887635+Smit-create@users.noreply.github.com> Smit-create <55887635+Smit-create@users.noreply.github.com>
+Smit Lunagariya Smit-create
Sourav Singh Sourav Singh <4314261+souravsingh@users.noreply.github.com>
Srikiran sriki18
Stefan Endres stefan-endres
diff --git a/HACKING.rst.txt b/HACKING.rst.txt
index 1f609cd780bb..0484eb8bdf8f 100644
--- a/HACKING.rst.txt
+++ b/HACKING.rst.txt
@@ -286,7 +286,7 @@ improvements, and submit your first PR!
.. _Pytest: https://pytest.org/
-.. _mailing lists: https://www.scipy.org/mailing-lists/
+.. _mailing lists: https://scipy.org/community/#scipy-mailing-list
.. _Spyder: https://www.spyder-ide.org/
diff --git a/INSTALL.rst.txt b/INSTALL.rst.txt
index 5f500cb52c38..f40d1f4a2759 100644
--- a/INSTALL.rst.txt
+++ b/INSTALL.rst.txt
@@ -35,15 +35,15 @@ SciPy requires the following software installed for your platform:
__ https://www.python.org
-2) NumPy__ >= 1.18.5
+2) NumPy__ >= 1.19.5
__ https://www.numpy.org/
If building from source, SciPy also requires:
-3) setuptools__ < 60.0
+3) Meson >= 0.62.1
-__ https://github.com/pypa/setuptools
+__ https://github.com/mesonbuild/meson
4) pybind11__ >= 2.4.3
@@ -57,6 +57,17 @@ __ http://www.sphinx-doc.org/
__ http://cython.org/
+7) Pythran__ => 0.11.0
+
+__ https://pythran.readthedocs.io/en/latest/
+
+8) Ninja__
+
+__ https://ninja-build.org/
+
+9) If you want to install via wheels: `meson-python` and `wheel`
+
+
Windows
-------
diff --git a/LICENSES_bundled.txt b/LICENSES_bundled.txt
index 505a0609066d..aecc299ec572 100644
--- a/LICENSES_bundled.txt
+++ b/LICENSES_bundled.txt
@@ -1,10 +1,9 @@
+
+----
+
The SciPy repository and source distributions bundle a number of libraries that
are compatibly licensed. We list these here.
-Name: scipy-sphinx-theme
-Files: doc/scipy-sphinx-theme/*
-License: 3-clause BSD, PSF and Apache 2.0
- For details, see doc/sphinxext/LICENSE.txt
Name: Decorator
Files: scipy/_lib/decorator.py
@@ -251,3 +250,9 @@ Name: UNU.RAN
Files: scipy/_lib/unuran/*
License 3-Clause BSD
For details, see scipy/_lib/unuran/license.txt
+
+Name: NumPy
+Files: scipy/stats/_rcont/[logfactorial.h,logfactorial.c]
+License 3-Clause BSD
+ For details, see header inside scipy/stats/_rcont/logfactorial.h
+ and scipy/stats/_rcont/logfactorial.c
diff --git a/MANIFEST.in b/MANIFEST.in
index e8e27dcdd73f..201843ad4c10 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -2,6 +2,7 @@ include MANIFEST.in
include *.txt
# Top-level build script
include setup.py
+include meson.build
include pyproject.toml
# All source files
recursive-include scipy *
@@ -9,10 +10,11 @@ recursive-include benchmarks *
# All documentation
recursive-include doc *
# Add build and testing tools
-include runtests.py
+include runtests.py dev.py
include .coveragerc
include site.cfg.example
-include tox.ini pytest.ini
+include tox.ini pytest.ini mypy.ini
+include CITATION.bib
recursive-include tools *
# Exclude what we don't want to include
recursive-exclude scipy/linalg/src/id_dist/src *_subr_*.f
diff --git a/README.rst b/README.rst
index d18cf9bede4a..27f872630bfc 100644
--- a/README.rst
+++ b/README.rst
@@ -1,11 +1,8 @@
-.. raw:: html
-
-
-
-
- SciPy
-
-
+.. image:: doc/source/_static/logo.svg
+ :target: https://scipy.org
+ :width: 100
+ :height: 100
+ :align: left
.. image:: https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A
:target: https://numfocus.org
@@ -28,12 +25,13 @@ integration, linear algebra, Fourier transforms, signal and image processing,
ODE solvers, and more.
- **Website:** https://scipy.org
-- **Documentation:** https://docs.scipy.org/
+- **Documentation:** https://docs.scipy.org/doc/scipy/
+- **Development version of the documentation:** https://scipy.github.io/devdocs
- **Mailing list:** https://mail.python.org/mailman3/lists/scipy-dev.python.org/
- **Source code:** https://github.com/scipy/scipy
- **Contributing:** https://scipy.github.io/devdocs/dev/index.html
- **Bug reports:** https://github.com/scipy/scipy/issues
-- **Code of Conduct:** https://scipy.github.io/devdocs/dev/conduct/code_of_conduct.html
+- **Code of Conduct:** https://docs.scipy.org/doc/scipy/dev/conduct/code_of_conduct.html
- **Report a security vulnerability:** https://tidelift.com/docs/security
- **Citing in your work:** https://www.scipy.org/citing-scipy/
@@ -47,7 +45,7 @@ manipulate numbers on a computer and display or publish the results, give
SciPy a try!
For the installation instructions, see `our install
-guide `__.
+guide `__.
Call for Contributions
@@ -55,7 +53,7 @@ Call for Contributions
We appreciate and welcome contributions. Small improvements or fixes are always appreciated; issues labeled as "good
first issue" may be a good starting point. Have a look at `our contributing
-guide `__.
+guide `__.
Writing code isn’t the only way to contribute to SciPy. You can also:
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
index 2fa95b3a3b7c..75796d28ac98 100644
--- a/azure-pipelines.yml
+++ b/azure-pipelines.yml
@@ -102,9 +102,9 @@ stages:
- template: ci/azure-travis-template.yaml
parameters:
test_mode: fast
- numpy_spec: "numpy==1.19.3"
+ numpy_spec: "numpy==1.19.5"
use_sdist: true
- - job: wheel_optimized_gcc6
+ - job: wheel_optimized_gcc8
timeoutInMinutes: 90
pool:
vmImage: 'ubuntu-18.04'
@@ -114,14 +114,15 @@ stages:
# flag. This environment variable starts all Py instances in -OO mode.
PYTHONOPTIMIZE: 2
# Use gcc version 6
- CC: gcc-6
- CXX: g++-6
+ CC: gcc-8
+ CXX: g++-8
+ FC: gfortran-8
steps:
- script: |
set -euo pipefail
sudo apt update -y
- sudo apt install -y g++-6 gcc-6
- displayName: 'Install GCC 6'
+ sudo apt install -y g++-8 gcc-8 gfortran-8
+ displayName: 'Install GCC 8'
- task: UsePythonVersion@0
inputs:
versionSpec: '3.8'
@@ -130,7 +131,7 @@ stages:
- template: ci/azure-travis-template.yaml
parameters:
test_mode: fast
- numpy_spec: "numpy==1.18.5"
+ numpy_spec: "numpy==1.19.5"
use_wheel: true
- job: Lint
condition: and(succeeded(), ne(variables['Build.SourceBranch'], 'refs/heads/main')) # skip for PR merges
@@ -182,17 +183,17 @@ stages:
curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
python3.8 get-pip.py && \
pip3 --version && \
- pip3 install setuptools==59.6.0 wheel numpy==1.18.5 cython==0.29.21 pybind11 pytest pytest-timeout pytest-xdist pytest-env pytest-cov Pillow mpmath pythran==0.10.0 && \
- apt-get -y install gcc-5 g++-5 gfortran-8 wget && \
+ pip3 install setuptools==59.6.0 wheel numpy==1.19.5 cython==0.29.21 pybind11 pytest pytest-timeout pytest-xdist pytest-env pytest-cov Pillow mpmath pythran pooch && \
+ apt-get -y install gcc-8 g++-8 gfortran-8 wget && \
cd .. && \
mkdir openblas && cd openblas && \
target=\$(python3.8 ../scipy/tools/openblas_support.py) && \
cp -r \$target/lib/* /usr/lib && \
cp \$target/include/* /usr/include && \
cd ../scipy && \
- CC=gcc-5 CXX=g++-5 F77=gfortran-8 F90=gfortran-8 python3.8 setup.py install && \
+ CC=gcc-8 CXX=g++-8 F77=gfortran-8 F90=gfortran-8 python3.8 setup.py install && \
python3.8 tools/openblas_support.py --check_version $(openblas_version) && \
- python3.8 runtests.py -n --mode=full -- -n auto -s --junitxml=junit/test-results.xml --cov-config=.coveragerc --cov-report=xml --cov-report=html"
+ python3.8 runtests.py -n --mode=full -- -n auto -s --junitxml=junit/test-results.xml --cov-config=.coveragerc --cov-report=xml --cov-report=html --durations=10 --timeout=60"
displayName: 'Run 32-bit Ubuntu Docker Build / Tests'
- task: PublishTestResults@2
condition: succeededOrFailed()
@@ -295,13 +296,14 @@ stages:
numpy==1.21.4
Pillow
pybind11
- pythran==0.10.0
+ pythran==0.12.0
pytest
pytest-cov
pytest-env
pytest-timeout
pytest-xdist
threadpoolctl
+ pooch
displayName: 'Install dependencies'
# DLL resolution mechanics were changed in
# Python 3.8: https://bugs.python.org/issue36085
@@ -337,6 +339,7 @@ stages:
refreshenv
}
$env:PATH = "C:\\ProgramData\\chocolatey\\lib\\mingw\\tools\\install\\mingw$(BITS)\\bin;" + $env:PATH
+ $env:SCIPY_USE_PYTHRAN=$(SCIPY_USE_PYTHRAN)
# Still testing distutils here (`pip wheel --no-use-pep517` cannot be
# used, so back to `setup.py` it is ...)
@@ -347,8 +350,7 @@ stages:
displayName: 'Build SciPy'
- powershell: |
$env:PATH = "C:\\ProgramData\\chocolatey\\lib\\mingw\\tools\\install\\mingw$(BITS)\\bin;" + $env:PATH
- $env:SCIPY_USE_PYTHRAN=$(SCIPY_USE_PYTHRAN)
- python runtests.py -n --mode=$(TEST_MODE) -- -n 2 --junitxml=junit/test-results.xml --durations=10
+ python runtests.py -n --mode=$(TEST_MODE) -- -n 2 --junitxml=junit/test-results.xml --durations=10 --timeout=60
displayName: 'Run SciPy Test Suite'
- task: PublishTestResults@2
condition: succeededOrFailed()
diff --git a/benchmarks/asv.conf.json b/benchmarks/asv.conf.json
index 77b2c71edce5..73430e8983ef 100644
--- a/benchmarks/asv.conf.json
+++ b/benchmarks/asv.conf.json
@@ -42,6 +42,7 @@
"pytest": [],
"pythran": [],
"pybind11": [],
+ "meson-python": [],
},
// The directory (relative to the current directory) that benchmarks are
diff --git a/benchmarks/benchmarks/cutest/calfun.py b/benchmarks/benchmarks/cutest/calfun.py
new file mode 100644
index 000000000000..3d8c0a986c48
--- /dev/null
+++ b/benchmarks/benchmarks/cutest/calfun.py
@@ -0,0 +1,59 @@
+# This is a python implementation of calfun.m,
+# provided at https://github.com/POptUS/BenDFO
+import numpy as np
+from .dfovec import dfovec
+
+
+def norm(x, type=2):
+ if type == 1:
+ return np.sum(np.abs(x))
+ elif type == 2:
+ return np.sqrt(x ** 2)
+ else: # type==np.inf:
+ return max(np.abs(x))
+
+
+def calfun(x, m, nprob, probtype="smooth", noise_level=1e-3):
+ n = len(x)
+
+ # Restrict domain for some nondiff problems
+ xc = x
+ if probtype == "nondiff":
+ if (
+ nprob == 8
+ or nprob == 9
+ or nprob == 13
+ or nprob == 16
+ or nprob == 17
+ or nprob == 18
+ ):
+ xc = max(x, 0)
+
+ # Generate the vector
+ fvec = dfovec(m, n, xc, nprob)
+
+ # Calculate the function value
+ if probtype == "noisy3":
+ sigma = noise_level
+ u = sigma * (-np.ones(m) + 2 * np.random.rand(m))
+ fvec = fvec * (1 + u)
+ y = np.sum(fvec ** 2)
+ elif probtype == "wild3":
+ sigma = noise_level
+ phi = 0.9 * np.sin(100 * norm(x, 1)) * np.cos(
+ 100 * norm(x, np.inf)
+ ) + 0.1 * np.cos(norm(x, 2))
+ phi = phi * (4 * phi ** 2 - 3)
+ y = (1 + sigma * phi) * sum(fvec ** 2)
+ elif probtype == "smooth":
+ y = np.sum(fvec ** 2)
+ elif probtype == "nondiff":
+ y = np.sum(np.abs(fvec))
+ else:
+ print(f"invalid probtype {probtype}")
+ return None
+ # Never return nan. Return inf instead so that
+ # optimization algorithms treat it as out of bounds.
+ if np.isnan(y):
+ return np.inf
+ return y
diff --git a/benchmarks/benchmarks/cutest/dfo.txt b/benchmarks/benchmarks/cutest/dfo.txt
new file mode 100644
index 000000000000..9703ec3d53f7
--- /dev/null
+++ b/benchmarks/benchmarks/cutest/dfo.txt
@@ -0,0 +1,53 @@
+ 1 9 45 0
+ 1 9 45 1
+ 2 7 35 0
+ 2 7 35 1
+ 3 7 35 0
+ 3 7 35 1
+ 4 2 2 0
+ 4 2 2 1
+ 5 3 3 0
+ 5 3 3 1
+ 6 4 4 0
+ 6 4 4 1
+ 7 2 2 0
+ 7 2 2 1
+ 8 3 15 0
+ 8 3 15 1
+ 9 4 11 0
+ 10 3 16 0
+ 11 6 31 0
+ 11 6 31 1
+ 11 9 31 0
+ 11 9 31 1
+ 11 12 31 0
+ 11 12 31 1
+ 12 3 10 0
+ 13 2 10 0
+ 14 4 20 0
+ 14 4 20 1
+ 15 6 6 0
+ 15 7 7 0
+ 15 8 8 0
+ 15 9 9 0
+ 15 10 10 0
+ 15 11 11 0
+ 16 10 10 0
+ 17 5 33 0
+ 18 11 65 0
+ 18 11 65 1
+ 19 8 8 0
+ 19 10 12 0
+ 19 11 14 0
+ 19 12 16 0
+ 20 5 5 0
+ 20 6 6 0
+ 20 8 8 0
+ 21 5 5 0
+ 21 5 5 1
+ 21 8 8 0
+ 21 10 10 0
+ 21 12 12 0
+ 21 12 12 1
+ 22 8 8 0
+ 22 8 8 1
\ No newline at end of file
diff --git a/benchmarks/benchmarks/cutest/dfovec.py b/benchmarks/benchmarks/cutest/dfovec.py
new file mode 100644
index 000000000000..2224f998d373
--- /dev/null
+++ b/benchmarks/benchmarks/cutest/dfovec.py
@@ -0,0 +1,377 @@
+# This is a python implementation of dfovec.m,
+# provided at https://github.com/POptUS/BenDFO
+import numpy as np
+
+
+def dfovec(m, n, x, nprob):
+ # Set lots of constants:
+ c13 = 1.3e1
+ c14 = 1.4e1
+ c29 = 2.9e1
+ c45 = 4.5e1
+ v = [
+ 4.0e0,
+ 2.0e0,
+ 1.0e0,
+ 5.0e-1,
+ 2.5e-1,
+ 1.67e-1,
+ 1.25e-1,
+ 1.0e-1,
+ 8.33e-2,
+ 7.14e-2,
+ 6.25e-2,
+ ]
+ y1 = [
+ 1.4e-1,
+ 1.8e-1,
+ 2.2e-1,
+ 2.5e-1,
+ 2.9e-1,
+ 3.2e-1,
+ 3.5e-1,
+ 3.9e-1,
+ 3.7e-1,
+ 5.8e-1,
+ 7.3e-1,
+ 9.6e-1,
+ 1.34e0,
+ 2.1e0,
+ 4.39e0,
+ ]
+ y2 = [
+ 1.957e-1,
+ 1.947e-1,
+ 1.735e-1,
+ 1.6e-1,
+ 8.44e-2,
+ 6.27e-2,
+ 4.56e-2,
+ 3.42e-2,
+ 3.23e-2,
+ 2.35e-2,
+ 2.46e-2,
+ ]
+ y3 = [
+ 3.478e4,
+ 2.861e4,
+ 2.365e4,
+ 1.963e4,
+ 1.637e4,
+ 1.372e4,
+ 1.154e4,
+ 9.744e3,
+ 8.261e3,
+ 7.03e3,
+ 6.005e3,
+ 5.147e3,
+ 4.427e3,
+ 3.82e3,
+ 3.307e3,
+ 2.872e3,
+ ]
+ y4 = [
+ 8.44e-1,
+ 9.08e-1,
+ 9.32e-1,
+ 9.36e-1,
+ 9.25e-1,
+ 9.08e-1,
+ 8.81e-1,
+ 8.5e-1,
+ 8.18e-1,
+ 7.84e-1,
+ 7.51e-1,
+ 7.18e-1,
+ 6.85e-1,
+ 6.58e-1,
+ 6.28e-1,
+ 6.03e-1,
+ 5.8e-1,
+ 5.58e-1,
+ 5.38e-1,
+ 5.22e-1,
+ 5.06e-1,
+ 4.9e-1,
+ 4.78e-1,
+ 4.67e-1,
+ 4.57e-1,
+ 4.48e-1,
+ 4.38e-1,
+ 4.31e-1,
+ 4.24e-1,
+ 4.2e-1,
+ 4.14e-1,
+ 4.11e-1,
+ 4.06e-1,
+ ]
+ y5 = [
+ 1.366e0,
+ 1.191e0,
+ 1.112e0,
+ 1.013e0,
+ 9.91e-1,
+ 8.85e-1,
+ 8.31e-1,
+ 8.47e-1,
+ 7.86e-1,
+ 7.25e-1,
+ 7.46e-1,
+ 6.79e-1,
+ 6.08e-1,
+ 6.55e-1,
+ 6.16e-1,
+ 6.06e-1,
+ 6.02e-1,
+ 6.26e-1,
+ 6.51e-1,
+ 7.24e-1,
+ 6.49e-1,
+ 6.49e-1,
+ 6.94e-1,
+ 6.44e-1,
+ 6.24e-1,
+ 6.61e-1,
+ 6.12e-1,
+ 5.58e-1,
+ 5.33e-1,
+ 4.95e-1,
+ 5.0e-1,
+ 4.23e-1,
+ 3.95e-1,
+ 3.75e-1,
+ 3.72e-1,
+ 3.91e-1,
+ 3.96e-1,
+ 4.05e-1,
+ 4.28e-1,
+ 4.29e-1,
+ 5.23e-1,
+ 5.62e-1,
+ 6.07e-1,
+ 6.53e-1,
+ 6.72e-1,
+ 7.08e-1,
+ 6.33e-1,
+ 6.68e-1,
+ 6.45e-1,
+ 6.32e-1,
+ 5.91e-1,
+ 5.59e-1,
+ 5.97e-1,
+ 6.25e-1,
+ 7.39e-1,
+ 7.1e-1,
+ 7.29e-1,
+ 7.2e-1,
+ 6.36e-1,
+ 5.81e-1,
+ 4.28e-1,
+ 2.92e-1,
+ 1.62e-1,
+ 9.8e-2,
+ 5.4e-2,
+ ]
+
+ # Initialize things
+ fvec = np.zeros(m)
+ total = 0
+
+ if nprob == 1: # Linear function - full rank.
+ for j in range(n):
+ total = total + x[j]
+ temp = 2 * total / m + 1
+ for i in range(m):
+ fvec[i] = -temp
+ if i < n:
+ fvec[i] = fvec[i] + x[i]
+ elif nprob == 2: # Linear function - rank 1.
+ for j in range(n):
+ total = total + (j + 1) * x[j]
+ for i in range(m):
+ fvec[i] = (i + 1) * total - 1
+ elif nprob == 3: # Linear function - rank 1 with zero columns and rows.
+ for j in range(1, n - 1):
+ total = total + (j + 1) * x[j]
+ for i in range(m - 1):
+ fvec[i] = i * total - 1
+ fvec[m - 1] = -1
+ elif nprob == 4: # Rosenbrock function.
+ fvec[0] = 10 * (x[1] - x[0] * x[0])
+ fvec[1] = 1 - x[0]
+ elif nprob == 5: # Helical valley function.
+ if x[0] > 0:
+ th = np.arctan(x[1] / x[0]) / (2 * np.pi)
+ elif x[0] < 0:
+ th = np.arctan(x[1] / x[0]) / (2 * np.pi) + 0.5
+ elif x[0] == x[1] and x[1] == 0:
+ th = 0.0
+ else:
+ th = 0.25
+ r = np.sqrt(x[0] * x[0] + x[1] * x[1])
+ fvec[0] = 10 * (x[2] - 10 * th)
+ fvec[1] = 10 * (r - 1)
+ fvec[2] = x[2]
+ elif nprob == 6: # Powell singular function.
+ fvec[0] = x[0] + 10 * x[1]
+ fvec[1] = np.sqrt(5) * (x[2] - x[3])
+ fvec[2] = (x[1] - 2 * x[2]) ** 2
+ fvec[3] = np.sqrt(10) * (x[0] - x[3]) ** 2
+ elif nprob == 7: # Freudenstein and Roth function.
+ fvec[0] = -c13 + x[0] + ((5 - x[1]) * x[1] - 2) * x[1]
+ fvec[1] = -c29 + x[0] + ((1 + x[1]) * x[1] - c14) * x[1]
+ elif nprob == 8: # Bard function.
+ for i in range(15):
+ tmp1 = i + 1
+ tmp2 = 15 - i
+ tmp3 = tmp1
+ if i > 7:
+ tmp3 = tmp2
+ fvec[i] = y1[i] - (x[0] + tmp1 / (x[1] * tmp2 + x[2] * tmp3))
+ elif nprob == 9: # Kowalik and Osborne function.
+ for i in range(11):
+ tmp1 = v[i] * (v[i] + x[1])
+ tmp2 = v[i] * (v[i] + x[2]) + x[3]
+ fvec[i] = y2[i] - x[0] * tmp1 / tmp2
+ elif nprob == 10: # Meyer function.
+ for i in range(16):
+ temp = 5 * (i + 1) + c45 + x[2]
+ tmp1 = x[1] / temp
+ tmp2 = np.exp(tmp1)
+ fvec[i] = x[0] * tmp2 - y3[i]
+ elif nprob == 11: # Watson function.
+ for i in range(29):
+ div = (i + 1) / c29
+ s1 = 0
+ dx = 1
+ for j in range(1, n):
+ s1 = s1 + j * dx * x[j]
+ dx = div * dx
+ s2 = 0
+ dx = 1
+ for j in range(n):
+ s2 = s2 + dx * x[j]
+ dx = div * dx
+ fvec[i] = s1 - s2 * s2 - 1
+ fvec[29] = x[0]
+ fvec[30] = x[1] - x[0] * x[0] - 1
+ elif nprob == 12: # Box 3-dimensional function.
+ for i in range(m):
+ temp = i + 1
+ tmp1 = temp / 10
+ fvec[i] = (
+ np.exp(-tmp1 * x[0])
+ - np.exp(-tmp1 * x[1])
+ + (np.exp(-temp) - np.exp(-tmp1)) * x[2]
+ )
+ elif nprob == 13: # Jennrich and Sampson function.
+ for i in range(m):
+ temp = i + 1
+ fvec[i] = 2 + 2 * temp - np.exp(temp * x[0]) - np.exp(temp * x[1])
+ elif nprob == 14: # Brown and Dennis function.
+ for i in range(m):
+ temp = (i + 1) / 5
+ tmp1 = x[0] + temp * x[1] - np.exp(temp)
+ tmp2 = x[2] + np.sin(temp) * x[3] - np.cos(temp)
+ fvec[i] = tmp1 * tmp1 + tmp2 * tmp2
+ elif nprob == 15: # Chebyquad function.
+ for j in range(n):
+ t1 = 1
+ t2 = 2 * x[j] - 1
+ t = 2 * t2
+ for i in range(m):
+ fvec[i] = fvec[i] + t2
+ th = t * t2 - t1
+ t1 = t2
+ t2 = th
+ iev = -1
+ for i in range(m):
+ fvec[i] = fvec[i] / n
+ if iev > 0:
+ fvec[i] = fvec[i] + 1 / ((i + 1) ** 2 - 1)
+ iev = -iev
+ elif nprob == 16: # Brown almost-linear function.
+ total1 = -(n + 1)
+ prod1 = 1
+ for j in range(n):
+ total1 = total1 + x[j]
+ prod1 = x[j] * prod1
+ for i in range(n - 1):
+ fvec[i] = x[i] + total1
+ fvec[n - 1] = prod1 - 1
+ elif nprob == 17: # Osborne 1 function.
+ for i in range(33):
+ temp = 10 * i
+ tmp1 = np.exp(-x[3] * temp)
+ tmp2 = np.exp(-x[4] * temp)
+ fvec[i] = y4[i] - (x[0] + x[1] * tmp1 + x[2] * tmp2)
+ elif nprob == 18: # Osborne 2 function.
+ for i in range(65):
+ temp = i / 10
+ tmp1 = np.exp(-x[4] * temp)
+ tmp2 = np.exp(-x[5] * (temp - x[8]) ** 2)
+ tmp3 = np.exp(-x[6] * (temp - x[9]) ** 2)
+ tmp4 = np.exp(-x[7] * (temp - x[10]) ** 2)
+ fvec[i] = y5[i] - (x[0] * tmp1 + x[1] * tmp2 + x[2] * tmp3 + x[3] * tmp4) # noqa
+ elif nprob == 19: # Bdqrtic
+ # n >= 5, m = (n-4)*2
+ for i in range(n - 4):
+ fvec[i] = -4 * x[i] + 3.0
+ fvec[n - 4 + i] = (
+ x[i] ** 2
+ + 2 * x[i + 1] ** 2
+ + 3 * x[i + 2] ** 2
+ + 4 * x[i + 3] ** 2
+ + 5 * x[n - 1] ** 2
+ )
+ elif nprob == 20: # Cube
+ # n = 2, m = n
+ fvec[1] = x[0] - 1.0
+ for i in range(1, n):
+ fvec[i] = 10 * (x[i] - x[i - 1] ** 3)
+ elif nprob == 21: # Mancino
+ # n = 2, m = n
+ for i in range(n):
+ ss = 0
+ for j in range(n):
+ v2 = np.sqrt(x[i] ** 2 + (i + 1) / (j + 1))
+ ss = ss + v2 * ((np.sin(np.log(v2))) ** 5 + (np.cos(np.log(v2))) ** 5) # noqa
+ fvec[i] = 1400 * x[i] + (i - 49) ** 3 + ss
+ elif nprob == 22: # Heart8ls
+ # m = n = 8
+ fvec[0] = x[0] + x[1] + 0.69
+ fvec[1] = x[2] + x[3] + 0.044
+ fvec[2] = x[4] * x[0] + x[5] * x[1] - x[6] * x[2] - x[7] * x[3] + 1.57
+ fvec[3] = x[6] * x[0] + x[7] * x[1] + x[4] * x[2] + x[5] * x[3] + 1.31
+ fvec[4] = (
+ x[0] * (x[4] ** 2 - x[6] ** 2)
+ - 2.0 * x[2] * x[4] * x[6]
+ + x[1] * (x[5] ** 2 - x[7] ** 2)
+ - 2.0 * x[3] * x[5] * x[7]
+ + 2.65
+ )
+ fvec[5] = (
+ x[2] * (x[4] ** 2 - x[6] ** 2)
+ + 2.0 * x[0] * x[4] * x[6]
+ + x[3] * (x[5] ** 2 - x[7] ** 2)
+ + 2.0 * x[1] * x[5] * x[7]
+ - 2.0
+ )
+ fvec[6] = (
+ x[0] * x[4] * (x[4] ** 2 - 3.0 * x[6] ** 2)
+ + x[2] * x[6] * (x[6] ** 2 - 3.0 * x[4] ** 2)
+ + x[1] * x[5] * (x[5] ** 2 - 3.0 * x[7] ** 2)
+ + x[3] * x[7] * (x[7] ** 2 - 3.0 * x[5] ** 2)
+ + 12.6
+ )
+ fvec[7] = (
+ x[2] * x[4] * (x[4] ** 2 - 3.0 * x[6] ** 2)
+ - x[0] * x[6] * (x[6] ** 2 - 3.0 * x[4] ** 2)
+ + x[3] * x[5] * (x[5] ** 2 - 3.0 * x[7] ** 2)
+ - x[1] * x[7] * (x[7] ** 2 - 3.0 * x[6] ** 2)
+ - 9.48
+ )
+ else:
+ print(f"unrecognized function number {nprob}")
+ return None
+ return fvec
diff --git a/benchmarks/benchmarks/cutest/dfoxs.py b/benchmarks/benchmarks/cutest/dfoxs.py
new file mode 100644
index 000000000000..69b55dd14451
--- /dev/null
+++ b/benchmarks/benchmarks/cutest/dfoxs.py
@@ -0,0 +1,94 @@
+# This is a python implementation of dfoxs.m,
+# provided at https://github.com/POptUS/BenDFO
+import numpy as np
+
+
+def dfoxs(n, nprob, factor):
+ x = np.zeros(n)
+
+ if nprob == 1 or nprob == 2 or nprob == 3: # Linear functions.
+ x = np.ones(n)
+ elif nprob == 4: # Rosenbrock function.
+ x[0] = -1.2
+ x[1] = 1
+ elif nprob == 5: # Helical valley function.
+ x[0] = -1
+ elif nprob == 6: # Powell singular function.
+ x[0] = 3
+ x[1] = -1
+ x[2] = 0
+ x[3] = 1
+ elif nprob == 7: # Freudenstein and Roth function.
+ x[0] = 0.5
+ x[1] = -2
+ elif nprob == 8: # Bard function.
+ x[0] = 1
+ x[1] = 1
+ x[2] = 1
+ elif nprob == 9: # Kowalik and Osborne function.
+ x[0] = 0.25
+ x[1] = 0.39
+ x[2] = 0.415
+ x[3] = 0.39
+ elif nprob == 10: # Meyer function.
+ x[0] = 0.02
+ x[1] = 4000
+ x[2] = 250
+ elif nprob == 11: # Watson function.
+ x = 0.5 * np.ones(n)
+ elif nprob == 12: # Box 3-dimensional function.
+ x[0] = 0
+ x[1] = 10
+ x[2] = 20
+ elif nprob == 13: # Jennrich and Sampson function.
+ x[0] = 0.3
+ x[1] = 0.4
+ elif nprob == 14: # Brown and Dennis function.
+ x[0] = 25
+ x[1] = 5
+ x[2] = -5
+ x[3] = -1
+ elif nprob == 15: # Chebyquad function.
+ for k in range(n):
+ x[k] = (k + 1) / (n + 1)
+ elif nprob == 16: # Brown almost-linear function.
+ x = 0.5 * np.ones(n)
+ elif nprob == 17: # Osborne 1 function.
+ x[0] = 0.5
+ x[1] = 1.5
+ x[2] = 1
+ x[3] = 0.01
+ x[4] = 0.02
+ elif nprob == 18: # Osborne 2 function.
+ x[0] = 1.3
+ x[1] = 0.65
+ x[2] = 0.65
+ x[3] = 0.7
+ x[4] = 0.6
+ x[5] = 3
+ x[6] = 5
+ x[7] = 7
+ x[8] = 2
+ x[9] = 4.5
+ x[10] = 5.5
+ elif nprob == 19: # Bdqrtic.
+ x = np.ones(n)
+ elif nprob == 20: # Cube.
+ x = 0.5 * np.ones(n)
+ elif nprob == 21: # Mancino.
+ for i in range(n):
+ ss = 0
+ for j in range(n):
+ frac = (i + 1) / (j + 1)
+ ss = ss + np.sqrt(frac) * (
+ (np.sin(np.log(np.sqrt(frac)))) ** 5
+ + (np.cos(np.log(np.sqrt(frac)))) ** 5
+ )
+ x[i] = -8.710996e-4 * ((i - 49) ** 3 + ss)
+ elif nprob == 22: # Heart8ls.
+ x = np.asarray([-0.3, -0.39, 0.3, -0.344, -1.2, 2.69, 1.59, -1.5])
+ else:
+ print(f"unrecognized function number {nprob}")
+ return None
+
+ return factor * x
diff --git a/benchmarks/benchmarks/interpolate.py b/benchmarks/benchmarks/interpolate.py
index 9322938d613e..cc30a5dba32f 100644
--- a/benchmarks/benchmarks/interpolate.py
+++ b/benchmarks/benchmarks/interpolate.py
@@ -243,3 +243,127 @@ def time_interpolate(self, n_samples, module):
interpolate.interp1d(self.x, self.y, kind="linear")
else:
np.interp(self.z, self.x, self.y)
+
+
+class RegularGridInterpolator(Benchmark):
+ """
+ Benchmark RegularGridInterpolator with method="linear".
+ """
+ param_names = ['ndim', 'max_coord_size', 'n_samples', 'flipped']
+ params = [
+ [2, 3, 4],
+ [10, 40, 200],
+ [10, 100, 1000, 10000],
+ [1, -1]
+ ]
+
+ def setup(self, ndim, max_coord_size, n_samples, flipped):
+ rng = np.random.default_rng(314159)
+
+ # coordinates halve in size over the dimensions
+ coord_sizes = [max_coord_size // 2**i for i in range(ndim)]
+ self.points = [np.sort(rng.random(size=s))[::flipped]
+ for s in coord_sizes]
+ self.values = rng.random(size=coord_sizes)
+
+ # choose in-bounds sample points xi
+ bounds = [(p.min(), p.max()) for p in self.points]
+ xi = [rng.uniform(low, high, size=n_samples)
+ for low, high in bounds]
+ self.xi = np.array(xi).T
+
+ self.interp = interpolate.RegularGridInterpolator(
+ self.points,
+ self.values,
+ )
+
+ def time_rgi_setup_interpolator(self, ndim, max_coord_size,
+ n_samples, flipped):
+ self.interp = interpolate.RegularGridInterpolator(
+ self.points,
+ self.values,
+ )
+
+ def time_rgi(self, ndim, max_coord_size, n_samples, flipped):
+ self.interp(self.xi)
+
+
+class RegularGridInterpolatorValues(interpolate.RegularGridInterpolator):
+ def __init__(self, points, xi, **kwargs):
+ # create fake values for initialization
+ values = np.zeros(tuple([len(pt) for pt in points]))
+ super().__init__(points, values, **kwargs)
+ self._is_initialized = False
+ # precompute values
+ (self.xi, self.xi_shape, self.ndim,
+ self.nans, self.out_of_bounds) = self._prepare_xi(xi)
+ self.indices, self.norm_distances = self._find_indices(xi.T)
+ self._is_initialized = True
+
+ def _prepare_xi(self, xi):
+ if not self._is_initialized:
+ return super()._prepare_xi(xi)
+ else:
+ # just give back precomputed values
+ return (self.xi, self.xi_shape, self.ndim,
+ self.nans, self.out_of_bounds)
+
+ def _find_indices(self, xi):
+ if not self._is_initialized:
+ return super()._find_indices(xi)
+ else:
+ # just give back pre-computed values
+ return self.indices, self.norm_distances
+
+ def __call__(self, values, method=None):
+ values = self._check_values(values)
+ # check fillvalue
+ self._check_fill_value(values, self.fill_value)
+ # check dimensionality
+ self._check_dimensionality(self.grid, values)
+ # flip, if needed
+ self.values = np.flip(values, axis=self._descending_dimensions)
+ return super().__call__(self.xi, method=method)
+
+
+class RegularGridInterpolatorSubclass(Benchmark):
+ """
+ Benchmark RegularGridInterpolator with method="linear".
+ """
+ param_names = ['ndim', 'max_coord_size', 'n_samples', 'flipped']
+ params = [
+ [2, 3, 4],
+ [10, 40, 200],
+ [10, 100, 1000, 10000],
+ [1, -1]
+ ]
+
+ def setup(self, ndim, max_coord_size, n_samples, flipped):
+ rng = np.random.default_rng(314159)
+
+ # coordinates halve in size over the dimensions
+ coord_sizes = [max_coord_size // 2**i for i in range(ndim)]
+ self.points = [np.sort(rng.random(size=s))[::flipped]
+ for s in coord_sizes]
+ self.values = rng.random(size=coord_sizes)
+
+ # choose in-bounds sample points xi
+ bounds = [(p.min(), p.max()) for p in self.points]
+ xi = [rng.uniform(low, high, size=n_samples)
+ for low, high in bounds]
+ self.xi = np.array(xi).T
+
+ self.interp = RegularGridInterpolatorValues(
+ self.points,
+ self.xi,
+ )
+
+ def time_rgi_setup_interpolator(self, ndim, max_coord_size,
+ n_samples, flipped):
+ self.interp = RegularGridInterpolatorValues(
+ self.points,
+ self.xi,
+ )
+
+ def time_rgi(self, ndim, max_coord_size, n_samples, flipped):
+ self.interp(self.values)
diff --git a/benchmarks/benchmarks/optimize.py b/benchmarks/benchmarks/optimize.py
index 7533dd8fbe16..3aea481fba00 100644
--- a/benchmarks/benchmarks/optimize.py
+++ b/benchmarks/benchmarks/optimize.py
@@ -16,8 +16,10 @@
import scipy.optimize
from scipy.optimize.optimize import rosen, rosen_der, rosen_hess
from scipy.optimize import (leastsq, basinhopping, differential_evolution,
- dual_annealing)
+ dual_annealing, shgo, direct)
from scipy.optimize._minimize import MINIMIZE_METHODS
+ from .cutest.calfun import calfun
+ from .cutest.dfoxs import dfoxs
class _BenchOptimizers(Benchmark):
@@ -109,6 +111,11 @@ def average_results(self):
newres.mean_njev = np.mean([r.njev for r in result_list])
newres.mean_nhev = np.mean([r.nhev for r in result_list])
newres.mean_time = np.mean([r.time for r in result_list])
+ funs = [r.fun for r in result_list]
+ newres.max_obj = np.max(funs)
+ newres.min_obj = np.min(funs)
+ newres.mean_obj = np.mean(funs)
+
newres.ntrials = len(result_list)
newres.nfail = len([r for r in result_list if not r.success])
newres.nsuccess = len([r for r in result_list if r.success])
@@ -167,6 +174,38 @@ def run_basinhopping(self):
res.nfev = self.function.nfev
self.add_result(res, t1 - t0, 'basinh.')
+ def run_direct(self):
+ """
+ Do an optimization run for direct
+ """
+ self.function.nfev = 0
+
+ t0 = time.time()
+
+ res = direct(self.fun,
+ self.bounds)
+
+ t1 = time.time()
+ res.success = self.function.success(res.x)
+ res.nfev = self.function.nfev
+ self.add_result(res, t1 - t0, 'DIRECT')
+
+ def run_shgo(self):
+ """
+ Do an optimization run for shgo
+ """
+ self.function.nfev = 0
+
+ t0 = time.time()
+
+ res = shgo(self.fun,
+ self.bounds)
+
+ t1 = time.time()
+ res.success = self.function.success(res.x)
+ res.nfev = self.function.nfev
+ self.add_result(res, t1 - t0, 'SHGO')
+
def run_differentialevolution(self):
"""
Do an optimization run for differential_evolution
@@ -206,14 +245,21 @@ def bench_run_global(self, numtrials=50, methods=None):
"""
if methods is None:
- methods = ['DE', 'basinh.', 'DA']
+ methods = ['DE', 'basinh.', 'DA', 'DIRECT', 'SHGO']
+
+ stochastic_methods = ['DE', 'basinh.', 'DA']
method_fun = {'DE': self.run_differentialevolution,
'basinh.': self.run_basinhopping,
- 'DA': self.run_dualannealing,}
-
- for i in range(numtrials):
- for m in methods:
+ 'DA': self.run_dualannealing,
+ 'DIRECT': self.run_direct,
+ 'SHGO': self.run_shgo, }
+
+ for m in methods:
+ if m in stochastic_methods:
+ for i in range(numtrials):
+ method_fun[m]()
+ else:
method_fun[m]()
def bench_run(self, x0, methods=None, **minimizer_kwargs):
@@ -223,10 +269,10 @@ def bench_run(self, x0, methods=None, **minimizer_kwargs):
if methods is None:
methods = MINIMIZE_METHODS
- # L-BFGS-B, BFGS, trust-constr can use gradients, but examine
+ # L-BFGS-B, BFGS, trust-constr, SLSQP can use gradients, but examine
# performance when numerical differentiation is used.
fonly_methods = ["COBYLA", 'Powell', 'nelder-mead', 'L-BFGS-B', 'BFGS',
- 'trust-constr']
+ 'trust-constr', 'SLSQP']
for method in fonly_methods:
if method not in methods:
continue
@@ -451,7 +497,7 @@ class BenchGlobal(Benchmark):
params = [
list(_functions.keys()),
["success%", ""],
- ['DE', 'basinh.', 'DA'],
+ ['DE', 'basinh.', 'DA', 'DIRECT', 'SHGO'],
]
param_names = ["test function", "result type", "solver"]
@@ -523,3 +569,51 @@ def setup_cache(self):
# create the logfile to start with
with open(self.dump_fn, 'w') as f:
json.dump({}, f, indent=2)
+
+
+class BenchDFO(Benchmark):
+ """
+ Benchmark the optimizers with the CUTEST DFO benchmark of Moré and Wild.
+ The original benchmark suite is available at
+ https://github.com/POptUS/BenDFO
+ """
+
+ params = [
+ list(range(53)), # adjust which problems to solve
+ ["COBYLA", "SLSQP", "Powell", "nelder-mead", "L-BFGS-B", "BFGS",
+ "trust-constr"], # note: methods must also be listed in bench_run
+ ["mean_nfev", "min_obj"], # defined in average_results
+ ]
+ param_names = ["DFO benchmark problem number", "solver", "result type"]
+
+ def setup(self, prob_number, method_name, ret_val):
+ probs = np.loadtxt(os.path.join(os.path.dirname(__file__),
+ "cutest", "dfo.txt"))
+ params = probs[prob_number]
+ nprob = int(params[0])
+ n = int(params[1])
+ m = int(params[2])
+ s = params[3]
+ factor = 10 ** s
+
+ def func(x):
+ return calfun(x, m, nprob)
+
+ x0 = dfoxs(n, nprob, factor)
+ b = getattr(self, "run_cutest")(
+ func, x0, prob_number=prob_number, methods=[method_name]
+ )
+ r = b.average_results().get(method_name)
+ if r is None:
+ raise NotImplementedError()
+ self.result = getattr(r, ret_val)
+
+ def track_all(self, prob_number, method_name, ret_val):
+ return self.result
+
+ def run_cutest(self, func, x0, prob_number, methods=None):
+ if methods is None:
+ methods = MINIMIZE_METHODS
+ b = _BenchOptimizers(f"DFO benchmark problem {prob_number}", fun=func)
+ b.bench_run(x0, methods=methods)
+ return b
diff --git a/benchmarks/benchmarks/peak_finding.py b/benchmarks/benchmarks/peak_finding.py
index 3bf7f69edf8b..799dbf02705a 100644
--- a/benchmarks/benchmarks/peak_finding.py
+++ b/benchmarks/benchmarks/peak_finding.py
@@ -4,7 +4,7 @@
with safe_import():
from scipy.signal import find_peaks, peak_prominences, peak_widths
- from scipy.misc import electrocardiogram
+ from scipy.datasets import electrocardiogram
class FindPeaks(Benchmark):
diff --git a/benchmarks/benchmarks/sparse.py b/benchmarks/benchmarks/sparse.py
index c9de26418a3f..7302e3e57a13 100644
--- a/benchmarks/benchmarks/sparse.py
+++ b/benchmarks/benchmarks/sparse.py
@@ -14,7 +14,7 @@
with safe_import():
from scipy import sparse
- from scipy.sparse import (csr_matrix, coo_matrix, dia_matrix, lil_matrix,
+ from scipy.sparse import (coo_matrix, dia_matrix, lil_matrix,
dok_matrix, rand, SparseEfficiencyWarning)
diff --git a/benchmarks/benchmarks/sparse_csgraph_dijkstra.py b/benchmarks/benchmarks/sparse_csgraph_dijkstra.py
new file mode 100755
index 000000000000..02a04852c75d
--- /dev/null
+++ b/benchmarks/benchmarks/sparse_csgraph_dijkstra.py
@@ -0,0 +1,42 @@
+"""benchmarks for the scipy.sparse.csgraph module"""
+import numpy as np
+import scipy.sparse
+
+from .common import Benchmark, safe_import
+
+with safe_import():
+ from scipy.sparse.csgraph import dijkstra
+
+
+class Dijkstra(Benchmark):
+ params = [
+ [30, 300, 900],
+ [True, False],
+ ['random', 'star']
+ ]
+ param_names = ['n', 'min_only', 'format']
+
+ def setup(self, n, min_only, format):
+ rng = np.random.default_rng(1234)
+ if format == 'random':
+ # make a random connectivity matrix
+ data = scipy.sparse.rand(n, n, density=0.2, format='csc',
+ random_state=42, dtype=np.bool_)
+ data.setdiag(np.zeros(n, dtype=np.bool_))
+ self.data = data
+ elif format == 'star':
+ rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
+ cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
+ weights = [i + 1 for i in range(n - 1)] * 2
+ self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
+ shape=(n, n))
+ # choose some random vertices
+ v = np.arange(n)
+ rng.shuffle(v)
+ self.indices = v[:int(n*.1)]
+
+ def time_dijkstra_multi(self, n, min_only, format):
+ dijkstra(self.data,
+ directed=False,
+ indices=self.indices,
+ min_only=min_only)
diff --git a/benchmarks/benchmarks/sparse_csgraph_djisktra.py b/benchmarks/benchmarks/sparse_csgraph_djisktra.py
deleted file mode 100644
index eaed3e83e3e8..000000000000
--- a/benchmarks/benchmarks/sparse_csgraph_djisktra.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""benchmarks for the scipy.sparse.csgraph module"""
-import numpy as np
-import scipy.sparse
-
-from .common import Benchmark, safe_import
-
-with safe_import():
- from scipy.sparse.csgraph import dijkstra
-
-
-class Dijkstra(Benchmark):
- params = [
- [30, 300, 900],
- [True, False]
- ]
- param_names = ['n', 'min_only']
-
- def setup(self, n, min_only):
- rng = np.random.default_rng(1234)
- # make a random connectivity matrix
- data = scipy.sparse.rand(n, n, density=0.2, format='csc',
- random_state=42, dtype=np.bool_)
- data.setdiag(np.zeros(n, dtype=np.bool_))
- self.data = data
- # choose some random vertices
- v = np.arange(n)
- rng.shuffle(v)
- self.indices = v[:int(n*.1)]
-
- def time_dijkstra_multi(self, n, min_only):
- dijkstra(self.data,
- directed=False,
- indices=self.indices,
- min_only=min_only)
diff --git a/benchmarks/benchmarks/stats.py b/benchmarks/benchmarks/stats.py
index d8204181c29e..c95c18489513 100644
--- a/benchmarks/benchmarks/stats.py
+++ b/benchmarks/benchmarks/stats.py
@@ -162,6 +162,33 @@ def time_kruskal(self):
stats.mstats.kruskal(self.a, self.b)
+# Benchmark data for the truncnorm stats() method.
+# The data in each row is:
+# a, b, mean, variance, skewness, excess kurtosis. Generated using
+# https://gist.github.com/WarrenWeckesser/636b537ee889679227d53543d333a720
+truncnorm_cases = [[-20, -19, -19.052343945976656, 0.002725073018195613,
+ -1.9838693623377885, 5.871801893091683],
+ [-30, -29, -29.034401237736176, 0.0011806604886186853,
+ -1.9929615171469608, 5.943905539773037],
+ [-40, -39, -39.02560741993011, 0.0006548827702932775,
+ -1.9960847672775606, 5.968744357649675],
+ [39, 40, 39.02560741993011, 0.0006548827702932775,
+ 1.9960847672775606, 5.968744357649675]]
+truncnorm_cases = np.array(truncnorm_cases)
+
+
+class TruncnormStats(Benchmark):
+ param_names = ['case', 'moment']
+ params = [list(range(len(truncnorm_cases))), ['m', 'v', 's', 'k']]
+
+ def track_truncnorm_stats_error(self, case, moment):
+ result_indices = dict(zip(['m', 'v', 's', 'k'], range(2, 6)))
+ ref = truncnorm_cases[case, result_indices[moment]]
+ a, b = truncnorm_cases[case, 0:2]
+ res = stats.truncnorm(a, b).stats(moments=moment)
+ return np.abs((res - ref)/ref)
+
+
class DistributionsAll(Benchmark):
# all distributions are in this list. A conversion to a set is used to
# remove duplicates that appear more than once in either `distcont` or
@@ -402,7 +429,11 @@ def time_mode(self, n_levels):
class GaussianKDE(Benchmark):
- def setup(self):
+ param_names = ['points']
+ params = [10, 6400]
+
+ def setup(self, points):
+ self.length = points
rng = np.random.default_rng(12345678)
n = 2000
m1 = rng.normal(size=n)
@@ -413,18 +444,16 @@ def setup(self):
ymin = m2.min()
ymax = m2.max()
- X, Y = np.mgrid[xmin:xmax:200j, ymin:ymax:200j]
+ X, Y = np.mgrid[xmin:xmax:80j, ymin:ymax:80j]
self.positions = np.vstack([X.ravel(), Y.ravel()])
values = np.vstack([m1, m2])
self.kernel = stats.gaussian_kde(values)
- def time_gaussian_kde_evaluate_few_points(self):
- # test gaussian_kde evaluate on a small number of points
- self.kernel(self.positions[:, :10])
+ def time_gaussian_kde_evaluate(self, length):
+ self.kernel(self.positions[:, :self.length])
- def time_gaussian_kde_evaluate_many_points(self):
- # test gaussian_kde evaluate on many points
- self.kernel(self.positions)
+ def time_gaussian_kde_logpdf(self, length):
+ self.kernel.logpdf(self.positions[:, :self.length])
class GroupSampling(Benchmark):
@@ -470,29 +499,37 @@ def time_binned_statistic_dd_reuse_bin(self, statistic):
class ContinuousFitAnalyticalMLEOverride(Benchmark):
# list of distributions to time
- dists = ["pareto", "laplace", "rayleigh",
- "invgauss", "gumbel_r", "gumbel_l"]
+ dists = ["pareto", "laplace", "rayleigh", "invgauss", "gumbel_r",
+ "gumbel_l", "powerlaw"]
# add custom values for rvs and fit, if desired, for any distribution:
# key should match name in dists and value should be list of loc, scale,
# and shapes
custom_input = {}
fnames = ['floc', 'fscale', 'f0', 'f1', 'f2']
fixed = {}
- distcont = dict(distcont)
- param_names = ["distribution", "loc_fixed", "scale_fixed",
+ param_names = ["distribution", "case", "loc_fixed", "scale_fixed",
"shape1_fixed", "shape2_fixed", "shape3_fixed"]
- params = [dists, * [[True, False]] * 5]
-
- def setup(self, dist_name, loc_fixed, scale_fixed, shape1_fixed,
- shape2_fixed, shape3_fixed):
+ # in the `_distr_params.py` list, some distributions have multiple sets of
+ # "sane" shape combinations. `case` needs to be an enumeration of the
+ # maximum number of cases for a benchmarked distribution; the maximum is
+ # currently two. Should a benchmarked distribution have more cases in the
+ # `_distr_params.py` list, this will need to be increased.
+ params = [dists, range(2), * [[True, False]] * 5]
+
+ def setup(self, dist_name, case, loc_fixed, scale_fixed,
+ shape1_fixed, shape2_fixed, shape3_fixed):
self.distn = eval("stats." + dist_name)
# default `loc` and `scale` are .834 and 4.342, and shapes are from
- # `_distr_params.py`
- default_shapes = self.distcont[dist_name]
- param_values = self.custom_input.get(dist_name, [.834, 4.342,
- *default_shapes])
+ # `_distr_params.py`. If there are multiple cases of valid shapes in
+ # `distcont`, they are benchmarked separately.
+ default_shapes_n = [s[1] for s in distcont if s[0] == dist_name]
+ if case >= len(default_shapes_n):
+ raise NotImplementedError("no alternate case for this dist")
+ default_shapes = default_shapes_n[case]
+ param_values = self.custom_input.get(dist_name, [*default_shapes,
+ .834, 4.342])
# separate relevant and non-relevant parameters for this distribution
# based on the number of shapes
nparam = len(param_values)
@@ -507,13 +544,17 @@ def setup(self, dist_name, loc_fixed, scale_fixed, shape1_fixed,
raise NotImplementedError("skip non-relevant case")
# add fixed values if fixed in relevant_parameters to self.fixed
- # with keys from self.fnames and values from parameter_values
+ # with keys from self.fnames and values in the same order as `fnames`.
+ fixed_vales = self.custom_input.get(dist_name, [.834, 4.342,
+ *default_shapes])
self.fixed = dict(zip(compress(self.fnames, relevant_parameters),
- compress(param_values, relevant_parameters)))
- self.data = self.distn.rvs(*param_values, size=1000)
+ compress(fixed_vales, relevant_parameters)))
+ self.param_values = param_values
+ self.data = self.distn.rvs(*param_values, size=1000,
+ random_state=np.random.default_rng(4653465))
- def time_fit(self, dist_name, loc_fixed, scale_fixed, shape1_fixed,
- shape2_fixed, shape3_fixed):
+ def time_fit(self, dist_name, case, loc_fixed, scale_fixed,
+ shape1_fixed, shape2_fixed, shape3_fixed):
self.distn.fit(self.data, **self.fixed)
@@ -637,3 +678,63 @@ def setup(self, n_size):
def time_somersd(self, n_size):
res = stats.somersd(self.x, self.y)
+
+
+class KolmogorovSmirnov(Benchmark):
+ param_names = ['alternative', 'mode', 'size']
+ # No auto since it defaults to exact for 20 samples
+ params = [
+ ['two-sided', 'less', 'greater'],
+ ['exact', 'approx', 'asymp'],
+ [19, 20, 21]
+ ]
+
+ def setup(self, alternative, mode, size):
+ np.random.seed(12345678)
+ a = stats.norm.rvs(size=20)
+ self.a = a
+
+ def time_ks(self, alternative, mode, size):
+ stats.kstest(self.a, 'norm', alternative=alternative,
+ mode=mode, N=size)
+
+
+class KolmogorovSmirnovTwoSamples(Benchmark):
+ param_names = ['alternative', 'mode', 'size']
+ # No auto since it defaults to exact for 20 samples
+ params = [
+ ['two-sided', 'less', 'greater'],
+ ['exact', 'asymp'],
+ [(21, 20), (20, 20)]
+ ]
+
+ def setup(self, alternative, mode, size):
+ np.random.seed(12345678)
+ a = stats.norm.rvs(size=size[0])
+ b = stats.norm.rvs(size=size[1])
+ self.a = a
+ self.b = b
+
+ def time_ks2(self, alternative, mode, size):
+ stats.ks_2samp(self.a, self.b, alternative=alternative, mode=mode)
+
+
+class RandomTable(Benchmark):
+ param_names = ["method", "ntot", "ncell"]
+ params = [
+ ["boyett", "patefield"],
+ [10, 100, 1000, 10000],
+ [4, 64, 256, 1024]
+ ]
+
+ def setup(self, method, ntot, ncell):
+ self.rng = np.random.default_rng(12345678)
+ k = int(ncell ** 0.5)
+ assert k ** 2 == ncell
+ p = np.ones(k) / k
+ row = self.rng.multinomial(ntot, p)
+ col = self.rng.multinomial(ntot, p)
+ self.dist = stats.random_table(row, col)
+
+ def time_method(self, method, ntot, ncell):
+ self.dist.rvs(1000, method=method, random_state=self.rng)
diff --git a/ci/azure-travis-template.yaml b/ci/azure-travis-template.yaml
index 1379e7f66e3f..39a448f1f3d8 100644
--- a/ci/azure-travis-template.yaml
+++ b/ci/azure-travis-template.yaml
@@ -64,6 +64,8 @@ steps:
displayName: 'Install common apt dependencies'
- script: 'echo "##vso[task.prependpath]/usr/lib/ccache"'
displayName: 'Add ccache to path'
+# Use pytest-xdist 2.5.0 until https://github.com/pytest-dev/pytest-cov/issues/557
+# is resolved.
- script: >-
pip install --upgrade ${{parameters.numpy_spec}} &&
pip install --upgrade pip setuptools==59.6.0 wheel build meson meson-python &&
@@ -72,19 +74,27 @@ steps:
gmpy2
threadpoolctl
mpmath
+ pooch
pythran
pybind11
pytest
- pytest-xdist
+ pytest-xdist==2.5.0
+ pytest-timeout
displayName: 'Install common python dependencies'
- ${{ if eq(parameters.test_mode, 'full') }}:
- - script: pip install matplotlib scikit-umfpack scikit-sparse
- displayName: 'Install full mode dependencies'
+ - script: >-
+ pip install matplotlib pooch &&
+ pip install scikit-umfpack scikit-sparse --no-deps --no-build-isolation
+ # The above two scikits both depend on scipy, so use `--no-deps`
+ # Also note that they don't provide wheels, so we build them from
+ # source (only in this job, it's a small optional dependency so test
+ # only in a single place)
+ displayName: 'Install full mode optional dependencies'
- ${{ if eq(parameters.coverage, true) }}:
- script: pip install pytest-cov coverage codecov
displayName: 'Install coverage dependencies'
- ${{ if eq(parameters.refguide_check, true) }}:
- - script: pip install matplotlib sphinx numpydoc
+ - script: pip install matplotlib sphinx numpydoc pooch
displayName: 'Install documentation dependencies'
- script: sudo apt-get install -y wamerican-small
displayName: 'Install word list (for csgraph tutorial)'
@@ -138,7 +148,7 @@ steps:
displayName: 'Build SciPy'
- script: |
set -euo pipefail
- python -u runtests.py -g -j2 -m ${{ parameters.test_mode }} ${COVERAGE:-} ${USE_WHEEL_BUILD:-} -- -rfEX --durations=10 2>&1 | tee runtests.log
+ python -u runtests.py -g -j2 -m ${{ parameters.test_mode }} ${COVERAGE:-} ${USE_WHEEL_BUILD:-} -- -rfEX --durations=10 --timeout=60 2>&1 | tee runtests.log
tools/validate_runtests_log.py ${{ parameters.test_mode }} < runtests.log
env:
${{ if eq(parameters.coverage, true) }}:
diff --git a/ci/cirrus_general_ci.yml b/ci/cirrus_general_ci.yml
new file mode 100644
index 000000000000..6fbca2e75018
--- /dev/null
+++ b/ci/cirrus_general_ci.yml
@@ -0,0 +1,145 @@
+# Regular CI for testing musllinux, linux_aarch64 and macosx_arm64 natively
+# This only runs if cirrus is not building wheels. The rationale is that
+# cibuildwheel also runs tests during the wheel build process, so there's no need
+# to have duplication.
+
+modified_clone: &MODIFIED_CLONE
+ # makes sure that for a PR the CI runs against a merged main
+ clone_script: |
+ if [ -z "$CIRRUS_PR" ]; then
+ # if you're not in a PR then clone against the branch name that was pushed to.
+ git clone --recursive --branch=$CIRRUS_BRANCH https://x-access-token:${CIRRUS_REPO_CLONE_TOKEN}@github.com/${CIRRUS_REPO_FULL_NAME}.git $CIRRUS_WORKING_DIR
+ git reset --hard $CIRRUS_CHANGE_IN_REPO
+ else
+ # it's a PR so clone the main branch then merge the changes from the PR
+ git clone --recursive https://x-access-token:${CIRRUS_REPO_CLONE_TOKEN}@github.com/${CIRRUS_REPO_FULL_NAME}.git $CIRRUS_WORKING_DIR
+ git fetch origin pull/$CIRRUS_PR/head:pull/$CIRRUS_PR
+
+ # CIRRUS_BASE_BRANCH will probably be `main` for the majority of the time
+ # However, if you do a PR against a maintenance branch we will want to
+ # merge the PR into the maintenance branch, not main
+ git checkout $CIRRUS_BASE_BRANCH
+
+ # alpine git package needs default user.name and user.email to be set before a merge
+ git -c user.email="you@example.com" merge --no-commit pull/$CIRRUS_PR
+ fi
+
+
+linux_aarch64_test_task:
+ compute_engine_instance:
+ image_project: cirrus-images
+ image: family/docker-builder-arm64
+ architecture: arm64
+ platform: linux
+ cpu: 4
+ memory: 16G
+
+ <<: *MODIFIED_CLONE
+
+ pip_cache:
+ folder: ~/.cache/pip
+
+ test_script: |
+ apt-get update
+ apt-get install -y --no-install-recommends software-properties-common gcc g++ gfortran pkg-config
+ apt-get install -y --no-install-recommends libopenblas-dev libatlas-base-dev liblapack-dev
+
+ # When this task was written the linux image used ubuntu:jammy, for which
+ # python3.10 is the default. If required different versions can be
+ # installed using the deadsnakes apt repository.
+ # add-apt-repository -y ppa:deadsnakes/ppa
+ # apt-get update
+ # DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends tzdata
+
+ apt-get install -y python3.10 python3.10-venv
+ # python3.10 -m ensurepip --default-pip --user
+
+ ln -s $(which python3.10) python
+ export PATH=$PWD:$PATH
+
+ python -m pip install meson ninja numpy cython pybind11 pythran cython
+ python -m pip install click rich_click doit pydevtool
+ python -m pip install pytest pooch
+
+ python dev.py test
+
+
+musllinux_amd64_test_task:
+ container:
+ image: alpine
+ cpu: 8
+ memory: 32G
+
+ env:
+ PATH: $PWD:$PATH
+
+ setup_script: |
+ # The alpine image doesn't have a git client. The first step is to get
+ # git, then clone in the *MODIFIED_CLONE step. To make sure the clone step
+ # works we have to delete CIRRUS_WORKING_DIR (alpine doesn't have pushd).
+ # Because this is the default working directory we should cd to that folder
+ # a subsequent script.
+
+ apk update
+ apk add openblas-dev python3 python3-dev openblas build-base gfortran git py3-pip
+ ln -sf $(which python3.10) python
+
+ _CWD=$PWD
+ echo "_CWD=$(_CWD)" >> $CIRRUS_ENV
+ cd $CIRRUS_WORKING_DIR/..
+ rm -rf $CIRRUS_WORKING_DIR
+
+ pip_cache:
+ folder: ~/.cache/pip
+
+ <<: *MODIFIED_CLONE
+
+ python_dependencies_script: |
+ cd $_CWD
+ python -m pip install cython
+ python -m pip install -vvv --upgrade numpy
+ python -m pip install meson ninja pybind11 pythran pytest
+ python -m pip install click rich_click doit pydevtool pooch
+
+ # pin setuptools to get around https://github.com/scipy/scipy/issues/17475
+ python -m pip install "setuptools<65.6.0"
+
+ build_script: |
+ python dev.py build
+
+ test_script: |
+ set -xe -o
+ python dev.py test
+
+
+macos_arm64_test_task:
+ macos_instance:
+ image: ghcr.io/cirruslabs/macos-monterey-xcode:13.3.1
+
+ <<: *MODIFIED_CLONE
+
+ pip_cache:
+ folder: ~/.cache/pip
+
+ test_script: |
+ brew install python@3.10
+
+ export PATH=/opt/homebrew/opt/python@3.10/libexec/bin:$PATH
+ python --version
+
+ # used for installing OpenBLAS/gfortran
+ bash tools/wheels/cibw_before_build_macos.sh $PWD
+
+ export PKG_CONFIG_PATH=/opt/arm64-builds/lib/pkgconfig
+ export CMAKE_PREFIX_PATH=/opt/arm64-builds/
+
+ pushd ~/
+ python -m venv scipy-dev
+ source scipy-dev/bin/activate
+ popd
+
+ python -m pip install meson ninja numpy cython pybind11 pythran cython
+ python -m pip install click rich_click doit pydevtool
+ python -m pip install pytest pooch
+ export DYLD_LIBRARY_PATH=/usr/local/gfortran/lib:/opt/arm64-builds/lib
+ python dev.py test
diff --git a/ci/cirrus_wheels.yml b/ci/cirrus_wheels.yml
new file mode 100644
index 000000000000..26afe067db76
--- /dev/null
+++ b/ci/cirrus_wheels.yml
@@ -0,0 +1,138 @@
+build_and_store_wheels: &BUILD_AND_STORE_WHEELS
+ install_cibuildwheel_script:
+ - python -m pip install cibuildwheel==2.11.3
+ cibuildwheel_script:
+ - cibuildwheel
+ wheels_artifacts:
+ path: "wheelhouse/*"
+
+
+######################################################################
+# Build linux_aarch64 natively
+######################################################################
+
+cirrus_wheels_linux_aarch64_task:
+ compute_engine_instance:
+ image_project: cirrus-images
+ image: family/docker-builder-arm64
+ architecture: arm64
+ platform: linux
+ cpu: 4
+ memory: 8G
+ matrix:
+ # build in a matrix because building and testing all four wheels in a
+ # single task takes longer than 60 mins (the default time limit for a
+ # cirrus-ci task).
+ - env:
+ CIBW_BUILD: cp38-* cp39-*
+ - env:
+ CIBW_BUILD: cp310-* cp311-*
+ build_script: |
+ apt install -y python3-venv python-is-python3
+ which python
+ echo $CIRRUS_CHANGE_MESSAGE
+ # needed for submodules
+ git submodule update --init
+ <<: *BUILD_AND_STORE_WHEELS
+
+
+######################################################################
+# Build macosx_arm64 natively
+######################################################################
+
+cirrus_wheels_macos_arm64_task:
+ macos_instance:
+ image: ghcr.io/cirruslabs/macos-monterey-xcode:13.3.1
+ matrix:
+ - env:
+ CIBW_BUILD: cp38-*
+ CIBW_BEFORE_ALL: bash tools/wheels/cibw_before_all_cp38_macosx_arm64.sh
+ - env:
+ CIBW_BUILD: cp39-*
+ - env:
+ CIBW_BUILD: cp310-* cp311-*
+ env:
+ PATH: /opt/homebrew/opt/python@3.10/bin:$PATH
+ CIBW_ENVIRONMENT: MACOSX_DEPLOYMENT_TARGET=12.0 _PYTHON_HOST_PLATFORM="macosx-12.0-arm64"
+ PKG_CONFIG_PATH: /opt/arm64-builds/lib/pkgconfig
+ # assumes that the cmake config is in /usr/local/lib/cmake
+ CMAKE_PREFIX_PATH: /opt/arm64-builds/
+ REPAIR_PATH: /usr/local/gfortran/lib:/opt/arm64-builds/lib
+ CIBW_REPAIR_WHEEL_COMMAND_MACOS: >
+ DYLD_LIBRARY_PATH=/usr/local/gfortran/lib:/opt/arm64-builds/lib delocate-listdeps {wheel} &&
+ DYLD_LIBRARY_PATH=/usr/local/gfortran/lib:/opt/arm64-builds/lib delocate-wheel --require-archs {delocate_archs} -w {dest_dir} {wheel}
+
+ install_pre_requirements_script:
+ - brew install python@3.10
+ - ln -s python3 /opt/homebrew/opt/python@3.10/bin/python
+
+ build_script:
+ - which python
+ # needed for submodules
+ - git submodule update --init
+ - uname -m
+ - python -c "import platform;print(platform.python_version());print(platform.system());print(platform.machine())"
+ - clang --version
+ <<: *BUILD_AND_STORE_WHEELS
+
+
+######################################################################
+# Upload all wheels
+######################################################################
+
+cirrus_wheels_upload_task:
+ # Artifacts don't seem to be persistent from task to task.
+ # Rather than upload wheels at the end of each cibuildwheel run we do a
+ # final upload here. This is because a run may be on different OS for
+ # which bash, etc, may not be present.
+ depends_on:
+ - cirrus_wheels_linux_aarch64
+ - cirrus_wheels_macos_arm64
+ compute_engine_instance:
+ image_project: cirrus-images
+ image: family/docker-builder
+ platform: linux
+
+ env:
+ # created as SCIPY_STAGING_UPLOAD_TOKEN_CIRRUS and SCIPY_NIGHTLY_UPLOAD_TOKEN_CIRRUS
+ SCIPY_STAGING_UPLOAD_TOKEN: ENCRYPTED[5940af5e589adf1647fb2832be2c912812de22b5b6551d93e177600da69b2103ffdf0ee1cf16671cf5e76f69a966bc30]
+ SCIPY_NIGHTLY_UPLOAD_TOKEN: ENCRYPTED[377be83afdaf9e8fa8bac105022bb1d362f72fff8501c2b5e827018b82270111828fb542fd7bd56bbfac9745603bd535]
+
+ upload_script: |
+ apt-get install -y python3-venv python-is-python3 curl
+ export IS_SCHEDULE_DISPATCH="false"
+ export IS_PUSH="false"
+
+ # cron job
+ if [[ "$CIRRUS_CRON" == "nightly" ]]; then
+ export IS_SCHEDULE_DISPATCH="true"
+ fi
+
+ # If it's a push event to a maintenance branch, and the commit message contains
+ # '[wheel build]' then upload to staging
+ COMMIT_MSG=$(git log --no-merges -1)
+ if [[ "$COMMIT_MSG" == *"[wheel build]"* ]] && [[ $CIRRUS_BRANCH == maintenance* ]]; then
+ export IS_PUSH="true"
+ fi
+
+ # The name of the zip file is derived from the `wheels_artifact` line.
+ # If you change the artifact line to `myfile_artifact` then it would be
+ # called myfile.zip
+
+ curl https://api.cirrus-ci.com/v1/artifact/build/$CIRRUS_BUILD_ID/wheels.zip --output wheels.zip
+ unzip wheels.zip
+
+ source tools/wheels/upload_wheels.sh
+ set_upload_vars
+ # For cron jobs (restricted to main branch)
+ # an upload to:
+ #
+ # https://anaconda.org/scipy-wheels-nightly/scipy
+ #
+ # Pushes to a maintenance branch that contain '[wheel build]' will
+ # cause wheels to be built and uploaded to:
+ #
+ # https://anaconda.org/multibuild-wheels-staging/scipy
+ #
+ # The tokens were originally generated at anaconda.org
+ upload_wheels
diff --git a/dev.py b/dev.py
index 6600e6141f54..57b323f98db9 100644
--- a/dev.py
+++ b/dev.py
@@ -1,433 +1,444 @@
-#!/usr/bin/env python
-"""
-dev.py [OPTIONS] [-- ARGS]
+#! /usr/bin/env python3
-Run tests, building the project first with Meson
+'''
+Developer CLI: building (meson), tests, benchmark, etc.
-Examples::
+This file contains tasks definitions for doit (https://pydoit.org).
+And also a CLI interface using click (https://click.palletsprojects.com).
- $ python dev.py
- $ python dev.py -s {SAMPLE_SUBMODULE}
- $ python dev.py -t {SAMPLE_TEST}
- $ python dev.py --ipython
- $ python dev.py --python somescript.py
- $ python dev.py --bench
- $ python dev.py --no-build --bench signal.LTI
+The CLI is ideal for project contributors while,
+doit interface is better suited for authoring the development tasks.
-Run a debugger:
+REQUIREMENTS:
+--------------
+- see environment.yml: doit, pydevtool, click, rich-click
- $ gdb --args python dev.py [...other args...]
+# USAGE:
-Generate C code coverage listing under build/lcov/:
-(requires http://ltp.sourceforge.net/coverage/lcov.php)
+## 1 - click API
- $ python dev.py --gcov [...other args...]
- $ python dev.py --lcov-html
+Commands can added using default Click API. i.e.
-"""
+```
+@cli.command()
+@click.argument('extra_argv', nargs=-1)
+@click.pass_obj
+def python(ctx_obj, extra_argv):
+ """Start a Python shell with PYTHONPATH set"""
+```
-#
-# This is a generic test runner script for projects using NumPy's test
-# framework. Change the following values to adapt to your project:
-#
+## 2 - class based Click command definition
-PROJECT_MODULE = "scipy"
-PROJECT_ROOT_FILES = ['scipy', 'LICENSE.txt', 'meson.build']
-SAMPLE_TEST = "scipy.fftpack.tests.test_real_transforms::TestIDSTIIIInt"
-SAMPLE_SUBMODULE = "optimize"
+`CliGroup` provides an alternative class based API to create Click commands.
-EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
- '/usr/local/lib/ccache', '/usr/local/lib/f90cache']
+Just use the `cls_cmd` decorator. And define a `run()` method
-# ---------------------------------------------------------------------
+```
+@cli.cls_cmd('test')
+class Test():
+ """Run tests"""
+ @classmethod
+ def run(cls):
+ print('Running tests...')
+```
-if __doc__ is None:
- __doc__ = "Run without -OO if you want usage info"
-else:
- __doc__ = __doc__.format(**globals())
+- Command may make use a Click.Group context defining a `ctx` class attribute
+- Command options are also define as class attributes
+```
+@cli.cls_cmd('test')
+class Test():
+ """Run tests"""
+ ctx = CONTEXT
+
+ verbose = Option(
+ ['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
+
+ @classmethod
+ def run(cls, **kwargs): # kwargs contains options from class and CONTEXT
+ print('Running tests...')
+```
+
+## 3 - class based interface can be run as a doit task by subclassing from Task
+
+- Extra doit task metadata can be defined as class attribute TASK_META.
+- `run()` method will be used as python-action by task
+
+```
+@cli.cls_cmd('test')
+class Test(Task): # Task base class, doit will create a task
+ """Run tests"""
+ ctx = CONTEXT
+
+ TASK_META = {
+ 'task_dep': ['build'],
+ }
+
+ @classmethod
+ def run(cls, **kwargs):
+ pass
+```
+
+## 4 - doit tasks with cmd-action "shell" or dynamic metadata
+
+Define method `task_meta()` instead of `run()`:
+
+```
+@cli.cls_cmd('refguide-check')
+class RefguideCheck(Task):
+ @classmethod
+ def task_meta(cls, **kwargs):
+ return {
+```
+
+'''
-import sys
import os
-import warnings # noqa: E402
-from pathlib import Path
+import subprocess
+import sys
+import warnings
+import shutil
+import json
+import datetime
+import time
import platform
-# the following multiprocessing import is necessary to prevent tests that use
-# multiprocessing from hanging on >= Python3.8 (macOS) using pytest. Just the
-# import is enough...
-import multiprocessing
+import importlib.util
+import errno
+import contextlib
+from sysconfig import get_path
# distutils is required to infer meson install path
# if this needs to be replaced for Python 3.12 support and there's no
-# stdlib alternative, use the hack discussed in gh-16058
+# stdlib alternative, use CmdAction and the hack discussed in gh-16058
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
from distutils import dist
from distutils.command.install import INSTALL_SCHEMES
-# In case we are run from the source directory, we don't want to import the
-# project from there:
-sys.path.pop(0)
-current_sys_path = sys.path.copy()
+from pathlib import Path
+from collections import namedtuple
+from types import ModuleType as new_module
+from dataclasses import dataclass
+
+import click
+from click import Option, Argument
+from doit import task_params
+from doit.cmd_base import ModuleTaskLoader
+from doit.reporter import ZeroReporter
+from doit.exceptions import TaskError
+from doit.api import run_tasks
+from pydevtool.cli import UnifiedContext, CliGroup, Task
+from rich.console import Console
+from rich.panel import Panel
+from rich.theme import Theme
+from rich_click import rich_click
+
+DOIT_CONFIG = {
+ 'verbosity': 2,
+ 'minversion': '0.36.0',
+}
+
+
+console_theme = Theme({
+ "cmd": "italic gray50",
+})
+
+
+class EMOJI:
+ cmd = ":computer:"
+
+
+rich_click.STYLE_ERRORS_SUGGESTION = "yellow italic"
+rich_click.SHOW_ARGUMENTS = True
+rich_click.GROUP_ARGUMENTS_OPTIONS = False
+rich_click.SHOW_METAVARS_COLUMN = True
+rich_click.USE_MARKDOWN = True
+rich_click.OPTION_GROUPS = {
+ "dev.py": [
+ {
+ "name": "Options",
+ "options": [
+ "--help", "--build-dir", "--no-build", "--install-prefix"],
+ },
+ ],
+
+ "dev.py test": [
+ {
+ "name": "Options",
+ "options": ["--help", "--verbose", "--parallel", "--coverage",
+ "--durations"],
+ },
+ {
+ "name": "Options: test selection",
+ "options": ["--submodule", "--tests", "--mode"],
+ },
+ ],
+}
+rich_click.COMMAND_GROUPS = {
+ "dev.py": [
+ {
+ "name": "build & testing",
+ "commands": ["build", "test"],
+ },
+ {
+ "name": "static checkers",
+ "commands": ["lint", "mypy"],
+ },
+ {
+ "name": "environments",
+ "commands": ["shell", "python", "ipython"],
+ },
+ {
+ "name": "documentation",
+ "commands": ["doc", "refguide-check"],
+ },
+ {
+ "name": "release",
+ "commands": ["notes", "authors"],
+ },
+ {
+ "name": "benchmarking",
+ "commands": ["bench"],
+ },
+ ]
+}
+
+
+class ErrorOnlyReporter(ZeroReporter):
+ desc = """Report errors only"""
+
+ def runtime_error(self, msg):
+ console = Console()
+ console.print("[red bold] msg")
+
+ def add_failure(self, task, fail_info):
+ console = Console()
+ if isinstance(fail_info, TaskError):
+ console.print(f'[red]Task Error - {task.name}'
+ f' => {fail_info.message}')
+ if fail_info.traceback:
+ console.print(Panel(
+ "".join(fail_info.traceback),
+ title=f"{task.name}",
+ subtitle=fail_info.message,
+ border_style="red",
+ ))
+
+
+CONTEXT = UnifiedContext({
+ 'build_dir': Option(
+ ['--build-dir'], metavar='BUILD_DIR',
+ default='build', show_default=True,
+ help=':wrench: Relative path to the build directory.'),
+ 'no_build': Option(
+ ["--no-build", "-n"], default=False, is_flag=True,
+ help=(":wrench: Do not build the project"
+ " (note event python only modification require build).")),
+ 'install_prefix': Option(
+ ['--install-prefix'], default=None, metavar='INSTALL_DIR',
+ help=(":wrench: Relative path to the install directory."
+ " Default is -install.")),
+})
+
+
+def run_doit_task(tasks):
+ """
+ :param tasks: (dict) task_name -> {options}
+ """
+ loader = ModuleTaskLoader(globals())
+ doit_config = {
+ 'verbosity': 2,
+ 'reporter': ErrorOnlyReporter,
+ }
+ return run_tasks(loader, tasks, extra_config={'GLOBAL': doit_config})
-from argparse import ArgumentParser, REMAINDER
-import shutil
-import subprocess
-import time
-import datetime
-import importlib.util
-import json # noqa: E402
-from sysconfig import get_path
-from types import ModuleType as new_module # noqa: E402
-ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__)))
+class CLI(CliGroup):
+ context = CONTEXT
+ run_doit_task = run_doit_task
-def import_module_from_path(mod_name, mod_path):
- """Import module with name `mod_name` from file path `mod_path`"""
- spec = importlib.util.spec_from_file_location(mod_name, mod_path)
- mod = importlib.util.module_from_spec(spec)
- spec.loader.exec_module(mod)
- return mod
+@click.group(cls=CLI)
+@click.pass_context
+def cli(ctx, **kwargs):
+ """Developer Tool for SciPy
+ \bCommands that require a built/installed instance are marked with :wrench:.
-# Import runtests.py
-runtests = import_module_from_path('runtests', Path(ROOT_DIR) / 'runtests.py')
-
-# Reassign sys.path as it is changed by the `runtests` import above
-sys.path = current_sys_path
-
-
-def main(argv):
- parser = ArgumentParser(usage=__doc__.lstrip())
- parser.add_argument("--verbose", "-v", action="count", default=1,
- help="more verbosity")
- parser.add_argument("--no-build", "-n", action="store_true", default=False,
- help="do not build the project (use system installed version)")
- parser.add_argument("--werror", action="store_true", default=False,
- help="Treat warnings as errors")
- parser.add_argument("--build-only", "-b", action="store_true", default=False,
- help="just build, do not run any tests")
- parser.add_argument("--doctests", action="store_true", default=False,
- help="Run doctests in module")
- parser.add_argument("--refguide-check", action="store_true", default=False,
- help="Run refguide check (do not run regular tests.)")
- parser.add_argument("--coverage", action="store_true", default=False,
- help=("report coverage of project code. HTML output"
- " goes under build/coverage"))
- parser.add_argument("--gcov", action="store_true", default=False,
- help=("enable C code coverage via gcov (requires GCC)."
- " gcov output goes to build/**/*.gc*"))
- parser.add_argument("--lcov-html", action="store_true", default=False,
- help=("produce HTML for C code coverage information "
- "from a previous run with --gcov. "
- "HTML output goes to build/lcov/"))
- parser.add_argument("--mode", "-m", default="fast",
- help="'fast', 'full', or something that could be "
- "passed to `pytest -m` as a marker expression "
- "[default: fast]")
- parser.add_argument("--submodule", "-s", default=None,
- help="Submodule whose tests to run (cluster,"
- " constants, ...)")
- parser.add_argument("--pythonpath", "-p", default=None,
- help="Paths to prepend to PYTHONPATH")
- parser.add_argument("--tests", "-t", action='append',
- help="Specify tests to run")
- parser.add_argument("--python", action="store_true",
- help="Start a Python shell with PYTHONPATH set")
- parser.add_argument("--ipython", "-i", action="store_true",
- help="Start IPython shell with PYTHONPATH set")
- parser.add_argument("--shell", action="store_true",
- help="Start Unix shell with PYTHONPATH set")
- parser.add_argument("--debug", "-g", action="store_true",
- help="Debug build")
- parser.add_argument("--parallel", "-j", type=int, default=1,
- help="Number of parallel jobs for build and testing")
- parser.add_argument("--show-build-log", action="store_true",
- help="Show build output rather than using a log file")
- parser.add_argument("--bench", action="store_true",
- help="Run benchmark suite instead of test suite")
- parser.add_argument("--bench-compare", action="append", metavar="BEFORE",
- help=("Compare benchmark results of current HEAD to"
- " BEFORE. Use an additional "
- "--bench-compare=COMMIT to override HEAD with"
- " COMMIT. Note that you need to commit your "
- "changes first!"
- ))
- parser.add_argument("args", metavar="ARGS", default=[], nargs=REMAINDER,
- help="Arguments to pass to Nose, Python or shell")
- parser.add_argument("--pep8", action="store_true", default=False,
- help="Perform pep8 check with flake8.")
- parser.add_argument("--mypy", action="store_true", default=False,
- help="Run mypy on the codebase")
- parser.add_argument("--doc", action="append", nargs="?",
- const="html-scipyorg", help="Build documentation")
- parser.add_argument("--win-cp-openblas", action="store_true",
- help="If set, and on Windows, copy OpenBLAS lib to "
- "install directory after meson install. "
- "Note: this argument may be removed in the future "
- "once a `site.cfg`-like mechanism to select BLAS/LAPACK "
- "libraries is implemented for Meson")
- parser.add_argument("--build-dir", default="build",
- help="Relative path to the build directory. "
- "Default is 'build'")
- parser.add_argument("--install-prefix", default=None,
- help="Relative path to the install directory. "
- "Default is -install.")
- args = parser.parse_args(argv)
-
- global PATH_INSTALLED
- build_dir = Path(args.build_dir)
- install_dir = args.install_prefix
- if not install_dir:
- install_dir = build_dir.parent / (build_dir.stem + "-install")
- PATH_INSTALLED = os.path.join(
- os.path.abspath(os.path.dirname(__file__)),
- install_dir
- )
- if args.win_cp_openblas and platform.system() != 'Windows':
- raise RuntimeError('--win-cp-openblas only has effect on Windows')
+ \b**python dev.py --build-dir my-build test -s stats**
- if args.pep8:
- # Lint the source using the configuration in tox.ini.
- os.system("flake8 scipy benchmarks/benchmarks")
- # Lint just the diff since branching off of main using a
- # stricter configuration.
- lint_diff = os.path.join(ROOT_DIR, 'tools', 'lint_diff.py')
- os.system(lint_diff)
- sys.exit(0)
-
- if args.mypy:
- sys.exit(run_mypy(args))
-
- if args.bench_compare:
- args.bench = True
- args.no_build = True # ASV does the building
-
- if args.lcov_html:
- # generate C code coverage output
- runtests.lcov_generate()
- sys.exit(0)
+ """
+ CLI.update_context(ctx, kwargs)
- if args.pythonpath:
- for p in reversed(args.pythonpath.split(os.pathsep)):
- sys.path.insert(0, p)
- if args.gcov:
- runtests.gcov_reset_counters()
+PROJECT_MODULE = "scipy"
+PROJECT_ROOT_FILES = ['scipy', 'LICENSE.txt', 'meson.build']
- if args.debug and args.bench:
- print("*** Benchmarks should not be run against debug version; "
- "remove -g flag ***")
- if not args.no_build:
- site_dir = build_project(args)
+@dataclass
+class Dirs:
+ """
+ root:
+ Directory where scr, build config and tools are located
+ (and this file)
+ build:
+ Directory where build output files (i.e. *.o) are saved
+ install:
+ Directory where .so from build and .py from src are put together.
+ site:
+ Directory where the built SciPy version was installed.
+ This is a custom prefix, followed by a relative path matching
+ the one the system would use for the site-packages of the active
+ Python interpreter.
+ """
+ # all paths are absolute
+ root: Path
+ build: Path
+ installed: Path
+ site: Path # /lib/python/site-packages
+
+ def __init__(self, args=None):
+ """:params args: object like Context(build_dir, install_prefix)"""
+ self.root = Path(__file__).parent.absolute()
+ if not args:
+ return
+ self.build = Path(args.build_dir).resolve()
+ if args.install_prefix:
+ self.installed = Path(args.install_prefix).resolve()
+ else:
+ self.installed = self.build.parent / (self.build.stem + "-install")
+ # relative path for site-package with py version
+ # i.e. 'lib/python3.10/site-packages'
+ self.site = self.get_site_packages()
+
+ def add_sys_path(self):
+ """Add site dir to sys.path / PYTHONPATH"""
+ site_dir = str(self.site)
sys.path.insert(0, site_dir)
os.environ['PYTHONPATH'] = \
os.pathsep.join((site_dir, os.environ.get('PYTHONPATH', '')))
- extra_argv = args.args[:]
- if extra_argv and extra_argv[0] == '--':
- extra_argv = extra_argv[1:]
-
- if args.python:
- if extra_argv:
- # Don't use subprocess, since we don't want to include the
- # current path in PYTHONPATH.
- sys.argv = extra_argv
- with open(extra_argv[0], 'r') as f:
- script = f.read()
- sys.modules['__main__'] = new_module('__main__')
- ns = dict(__name__='__main__',
- __file__=extra_argv[0])
- exec(script, ns)
- sys.exit(0)
+ def get_site_packages(self):
+ """
+ Depending on whether we have debian python or not,
+ return dist_packages path or site_packages path.
+ """
+ if 'deb_system' in INSTALL_SCHEMES:
+ # debian patched python in use
+ install_cmd = dist.Distribution().get_command_obj('install')
+ install_cmd.select_scheme('deb_system')
+ install_cmd.finalize_options()
+ plat_path = Path(install_cmd.install_platlib)
else:
- import code
- code.interact()
- sys.exit(0)
-
- if args.ipython:
- import IPython
- IPython.embed(user_ns={})
- sys.exit(0)
-
- if args.shell:
- shell = os.environ.get('SHELL', 'sh')
- print("Spawning a Unix shell...")
- os.execv(shell, [shell] + extra_argv)
- sys.exit(1)
-
- if args.doc:
- cmd = ["make", "-Cdoc", 'PYTHON="{}"'.format(sys.executable)]
- cmd += args.doc
- if args.parallel:
- cmd.append('SPHINXOPTS="-j{}"'.format(args.parallel))
- subprocess.run(cmd, check=True)
- sys.exit(0)
-
- if args.coverage:
- dst_dir = os.path.join(ROOT_DIR, args.build_dir, 'coverage')
- fn = os.path.join(dst_dir, 'coverage_html.js')
- if os.path.isdir(dst_dir) and os.path.isfile(fn):
- shutil.rmtree(dst_dir)
- extra_argv += ['--cov-report=html:' + dst_dir]
-
- if args.refguide_check:
- cmd = [os.path.join(ROOT_DIR, 'tools', 'refguide_check.py'),
- '--doctests']
- if args.verbose:
- cmd += ['-' + 'v'*args.verbose]
- if args.submodule:
- cmd += [args.submodule]
- os.execv(sys.executable, [sys.executable] + cmd)
- sys.exit(0)
-
- test, version, mod_path = get_project_info()
-
- if args.bench:
- # Run ASV
- items = extra_argv
- if args.tests:
- items += args.tests
- if args.submodule:
- items += [args.submodule]
-
- bench_args = []
- for a in items:
- bench_args.extend(['--bench', a])
-
- if not args.bench_compare:
- import scipy
- print("Running benchmarks for Scipy version %s at %s"
- % (version, mod_path))
- cmd = ['asv', 'run', '--dry-run', '--show-stderr',
- '--python=same'] + bench_args
- retval = runtests.run_asv(cmd)
- sys.exit(retval)
- else:
- if len(args.bench_compare) == 1:
- commit_a = args.bench_compare[0]
- commit_b = 'HEAD'
- elif len(args.bench_compare) == 2:
- commit_a, commit_b = args.bench_compare
- else:
- p.error("Too many commits to compare benchmarks for")
-
- # Check for uncommitted files
- if commit_b == 'HEAD':
- r1 = subprocess.call(['git', 'diff-index', '--quiet',
- '--cached', 'HEAD'])
- r2 = subprocess.call(['git', 'diff-files', '--quiet'])
- if r1 != 0 or r2 != 0:
- print("*"*80)
- print("WARNING: you have uncommitted changes --- "
- "these will NOT be benchmarked!")
- print("*"*80)
-
- # Fix commit ids (HEAD is local to current repo)
- p = subprocess.Popen(['git', 'rev-parse', commit_b],
- stdout=subprocess.PIPE)
- out, err = p.communicate()
- commit_b = out.strip()
-
- p = subprocess.Popen(['git', 'rev-parse', commit_a],
- stdout=subprocess.PIPE)
- out, err = p.communicate()
- commit_a = out.strip()
-
- cmd = ['asv', 'continuous', '--show-stderr', '--factor', '1.05',
- commit_a, commit_b] + bench_args
- runtests.run_asv(cmd)
- sys.exit(1)
-
- if args.build_only:
- sys.exit(0)
+ plat_path = Path(get_path('platlib'))
+ return self.installed / plat_path.relative_to(sys.exec_prefix)
- if args.submodule:
- tests = [PROJECT_MODULE + "." + args.submodule]
- elif args.tests:
- tests = args.tests
- else:
- tests = None
- # Run the tests
-
- if not args.no_build:
- test_dir = site_dir
- else:
- test_dir = os.path.join(ROOT_DIR, args.build_dir, 'test')
- if not os.path.isdir(test_dir):
- os.makedirs(test_dir)
-
- shutil.copyfile(os.path.join(ROOT_DIR, '.coveragerc'),
- os.path.join(test_dir, '.coveragerc'))
-
- cwd = os.getcwd()
+@contextlib.contextmanager
+def working_dir(new_dir):
+ current_dir = os.getcwd()
try:
- os.chdir(test_dir)
- print("Running tests for {} version:{}, installed at:{}".format(
- PROJECT_MODULE, version, mod_path))
- result = test(args.mode,
- verbose=args.verbose,
- extra_argv=extra_argv,
- doctests=args.doctests,
- coverage=args.coverage,
- tests=tests,
- parallel=args.parallel)
+ os.chdir(new_dir)
+ yield
finally:
- os.chdir(cwd)
+ os.chdir(current_dir)
- if isinstance(result, bool):
- sys.exit(0 if result else 1)
- elif result.wasSuccessful():
- sys.exit(0)
- else:
- sys.exit(1)
+
+def import_module_from_path(mod_name, mod_path):
+ """Import module with name `mod_name` from file path `mod_path`"""
+ spec = importlib.util.spec_from_file_location(mod_name, mod_path)
+ mod = importlib.util.module_from_spec(spec)
+ spec.loader.exec_module(mod)
+ return mod
-def get_project_info():
+def get_test_runner(project_module):
"""
- Function to import the project module and return its tests, version,
- and path where it is found.
- If the project module is not found, then it tries to find it in the
- development installed path.
+ get Test Runner from locally installed/built project
"""
- try:
- test, version, mod_path = runtests.import_module()
- except ImportError:
- # this may fail when running with --no-build, so try to detect
- # an installed scipy in a subdir inside a repo
- site_dir = get_site_packages()
- print("Trying to find scipy from development installed "
- "path at:", site_dir)
- sys.path.insert(0, site_dir)
- os.environ['PYTHONPATH'] = \
- os.pathsep.join((site_dir, os.environ.get('PYTHONPATH', '')))
- test, version, mod_path = runtests.import_module()
+ __import__(project_module)
+ # scipy._lib._testutils:PytestTester
+ test = sys.modules[project_module].test
+ version = sys.modules[project_module].__version__
+ mod_path = sys.modules[project_module].__file__
+ mod_path = os.path.abspath(os.path.join(os.path.dirname(mod_path)))
return test, version, mod_path
-def setup_build(args, env):
- """
- Setting up meson-build
+############
+
+@cli.cls_cmd('build')
+class Build(Task):
+ """:wrench: Build & install package on path.
+
+ \b
+ ```python
+ Examples:
+
+ $ python dev.py build --asan ;
+ ASAN_OPTIONS=detect_leaks=0:symbolize=1:strict_init_order=true
+ LD_PRELOAD=$(gcc --print-file-name=libasan.so)
+ python dev.py test -v -t
+ ./scipy/ndimage/tests/test_morphology.py -- -s
+ ```
"""
- cmd = ["meson", "setup", args.build_dir, "--prefix", PATH_INSTALLED]
- build_dir = Path(args.build_dir)
- run_dir = os.getcwd()
- if build_dir.exists() and not (build_dir / 'meson-info').exists():
- if list(build_dir.iterdir()):
- raise RuntimeError(
- f"You're using Meson to build in the `{build_dir.absolute()}` directory, "
- "but it looks like that directory is not empty and "
- "was not originally created by Meson. "
- f"Please remove '{build_dir.absolute()}' and try again."
- )
- if os.path.exists(build_dir):
- build_options_file = (build_dir / "meson-info"
- / "intro-buildoptions.json")
+ ctx = CONTEXT
+
+ werror = Option(
+ ['--werror'], default=False, is_flag=True,
+ help="Treat warnings as errors")
+ gcov = Option(
+ ['--gcov'], default=False, is_flag=True,
+ help="enable C code coverage via gcov (requires GCC)."
+ "gcov output goes to build/**/*.gc*")
+ asan = Option(
+ ['--asan'], default=False, is_flag=True,
+ help=("Build and run with AddressSanitizer support. "
+ "Note: the build system doesn't check whether "
+ "the project is already compiled with ASan. "
+ "If not, you need to do a clean build (delete "
+ "build and build-install directories)."))
+ debug = Option(
+ ['--debug', '-d'], default=False, is_flag=True, help="Debug build")
+ parallel = Option(
+ ['--parallel', '-j'], default=None, metavar='N_JOBS',
+ help=("Number of parallel jobs for building. "
+ "This defaults to 2 * n_cpus + 2."))
+ show_build_log = Option(
+ ['--show-build-log'], default=False, is_flag=True,
+ help="Show build output rather than using a log file")
+ win_cp_openblas = Option(
+ ['--win-cp-openblas'], default=False, is_flag=True,
+ help=("If set, and on Windows, copy OpenBLAS lib to install directory "
+ "after meson install. "
+ "Note: this argument may be removed in the future once a "
+ "`site.cfg`-like mechanism to select BLAS/LAPACK libraries is "
+ "implemented for Meson"))
+
+ @classmethod
+ def setup_build(cls, dirs, args):
+ """
+ Setting up meson-build
+ """
+ for fn in PROJECT_ROOT_FILES:
+ if not (dirs.root / fn).exists():
+ print("To build the project, run dev.py in "
+ "git checkout or unpacked source")
+ sys.exit(1)
+
+ env = dict(os.environ)
+ cmd = ["meson", "setup", dirs.build, "--prefix", dirs.installed]
+ build_dir = dirs.build
+ run_dir = Path()
+ if build_dir.exists() and not (build_dir / 'meson-info').exists():
+ if list(build_dir.iterdir()):
+ raise RuntimeError("Can't build into non-empty directory "
+ f"'{build_dir.absolute()}'")
+
+ build_options_file = (
+ build_dir / "meson-info" / "intro-buildoptions.json")
if build_options_file.exists():
with open(build_options_file) as f:
build_options = json.load(f)
@@ -436,226 +447,709 @@ def setup_build(args, env):
if option["name"] == "prefix":
installdir = option["value"]
break
- if installdir != PATH_INSTALLED:
- run_dir = os.path.join(run_dir, build_dir)
- cmd = ["meson", "--reconfigure", "--prefix", PATH_INSTALLED]
+ if installdir != str(dirs.installed):
+ run_dir = build_dir
+ cmd = ["meson", "setup", "--reconfigure",
+ "--prefix", str(dirs.installed)]
else:
return
+ if args.werror:
+ cmd += ["--werror"]
+ if args.gcov:
+ cmd += ['-Db_coverage=true']
+ if args.asan:
+ cmd += ['-Db_sanitize=address,undefined']
+ # Setting up meson build
+ cmd_str = ' '.join([str(p) for p in cmd])
+ cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
+ ret = subprocess.call(cmd, env=env, cwd=run_dir)
+ if ret == 0:
+ print("Meson build setup OK")
else:
- run_dir = os.path.join(run_dir, build_dir)
- cmd = ["meson", "--reconfigure", "--prefix", PATH_INSTALLED]
-
- if args.werror:
- cmd += ["--werror"]
- if args.gcov:
- cmd += ['-Db_coverage=true']
- # Setting up meson build
- ret = subprocess.call(cmd, env=env, cwd=run_dir)
- if ret == 0:
- print("Meson build setup OK")
- else:
- print("Meson build setup failed! ({0} elapsed)")
- sys.exit(1)
- return
+ print("Meson build setup failed!")
+ sys.exit(1)
+ return env
+
+ @classmethod
+ def build_project(cls, dirs, args, env):
+ """
+ Build a dev version of the project.
+ """
+ cmd = ["ninja", "-C", str(dirs.build)]
+ if args.parallel is not None:
+ cmd += ["-j", str(args.parallel)]
+
+ # Building with ninja-backend
+ cmd_str = ' '.join([str(p) for p in cmd])
+ cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
+ ret = subprocess.call(cmd, env=env, cwd=dirs.root)
+
+ if ret == 0:
+ print("Build OK")
+ else:
+ print("Build failed!")
+ sys.exit(1)
+ @classmethod
+ def install_project(cls, dirs, args):
+ """
+ Installs the project after building.
+ """
+ if dirs.installed.exists():
+ non_empty = len(os.listdir(dirs.installed))
+ if non_empty and not dirs.site.exists():
+ raise RuntimeError("Can't install in non-empty directory: "
+ f"'{dirs.installed}'")
+ cmd = ["meson", "install", "-C", args.build_dir, "--only-changed"]
+ log_filename = dirs.root / 'meson-install.log'
+ start_time = datetime.datetime.now()
+ cmd_str = ' '.join([str(p) for p in cmd])
+ cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
+ if args.show_build_log:
+ ret = subprocess.call(cmd, cwd=dirs.root)
+ else:
+ print("Installing, see meson-install.log...")
+ with open(log_filename, 'w') as log:
+ p = subprocess.Popen(cmd, stdout=log, stderr=log,
+ cwd=dirs.root)
+
+ try:
+ # Wait for it to finish, and print something to indicate the
+ # process is alive, but only if the log file has grown (to
+ # allow continuous integration environments kill a hanging
+ # process accurately if it produces no output)
+ last_blip = time.time()
+ last_log_size = os.stat(log_filename).st_size
+ while p.poll() is None:
+ time.sleep(0.5)
+ if time.time() - last_blip > 60:
+ log_size = os.stat(log_filename).st_size
+ if log_size > last_log_size:
+ elapsed = datetime.datetime.now() - start_time
+ print(" ... installation in progress ({0} "
+ "elapsed)".format(elapsed))
+ last_blip = time.time()
+ last_log_size = log_size
+
+ ret = p.wait()
+ except: # noqa: E722
+ p.terminate()
+ raise
+ elapsed = datetime.datetime.now() - start_time
+
+ if ret != 0:
+ if not args.show_build_log:
+ with open(log_filename, 'r') as f:
+ print(f.read())
+ print(f"Installation failed! ({elapsed} elapsed)")
+ sys.exit(1)
-def install_project(args):
- """
- Installs the project after building.
+ # ignore everything in the install directory.
+ with open(dirs.installed / ".gitignore", "w") as f:
+ f.write("*")
+
+ print("Installation OK")
+ return
+
+ @classmethod
+ def copy_openblas(cls, dirs):
+ """
+ Copies OpenBLAS DLL to the SciPy install dir, and also overwrites the
+ default `_distributor_init.py` file with the one
+ we use for wheels uploaded to PyPI so that DLL gets loaded.
+
+ Assumes pkg-config is installed and aware of OpenBLAS.
+ """
+ # Get OpenBLAS lib path from pkg-config
+ cmd = ['pkg-config', '--variable', 'libdir', 'openblas']
+ result = subprocess.run(cmd, capture_output=True, text=True)
+ if result.returncode != 0:
+ print(result.stderrr)
+ return result.returncode
+
+ openblas_lib_path = Path(result.stdout.strip())
+ if not openblas_lib_path.stem == 'lib':
+ raise RuntimeError(
+ f'Expecting "lib" at end of "{openblas_lib_path}"')
+
+ # Look in bin subdirectory for OpenBLAS binaries.
+ bin_path = openblas_lib_path.parent / 'bin'
+ # Locate, make output .libs directory in Scipy install directory.
+ scipy_path = dirs.site / 'scipy'
+ libs_path = scipy_path / '.libs'
+ libs_path.mkdir(exist_ok=True)
+ # Copy DLL files from OpenBLAS install to scipy install .libs subdir.
+ for dll_fn in bin_path.glob('*.dll'):
+ out_fname = libs_path / dll_fn.parts[-1]
+ print(f'Copying {dll_fn} to {out_fname}')
+ out_fname.write_bytes(dll_fn.read_bytes())
+
+ # Write _distributor_init.py to scipy install dir;
+ # this ensures the .libs file is on the DLL search path at run-time,
+ # so OpenBLAS gets found
+ openblas_support = import_module_from_path(
+ 'openblas_support',
+ dirs.root / 'tools' / 'openblas_support.py')
+ openblas_support.make_init(scipy_path)
+ return 0
+
+ @classmethod
+ def run(cls, add_path=False, **kwargs):
+ kwargs.update(cls.ctx.get(kwargs))
+ Args = namedtuple('Args', [k for k in kwargs.keys()])
+ args = Args(**kwargs)
+
+ cls.console = Console(theme=console_theme)
+ dirs = Dirs(args)
+ if args.no_build:
+ print("Skipping build")
+ else:
+ env = cls.setup_build(dirs, args)
+ cls.build_project(dirs, args, env)
+ cls.install_project(dirs, args)
+ if args.win_cp_openblas and platform.system() == 'Windows':
+ if cls.copy_openblas(dirs) == 0:
+ print('OpenBLAS copied')
+ else:
+ print("OpenBLAS copy failed!")
+ sys.exit(1)
+
+ # add site to sys.path
+ if add_path:
+ dirs.add_sys_path()
+
+
+@cli.cls_cmd('test')
+class Test(Task):
+ """:wrench: Run tests.
+
+ \b
+ ```python
+ Examples:
+
+ $ python dev.py test -s {SAMPLE_SUBMODULE}
+ $ python dev.py test -t scipy.optimize.tests.test_minimize_constrained
+ $ python dev.py test -s cluster -m full --durations 20
+ $ python dev.py test -s stats -- --tb=line # `--` passes next args to pytest
+ ```
"""
- if os.path.exists(PATH_INSTALLED):
- installdir = get_site_packages()
- non_empty = len(os.listdir(PATH_INSTALLED))
- if non_empty and not os.path.exists(installdir):
- raise RuntimeError("Can't install in non-empty directory: "
- f"'{PATH_INSTALLED}'")
- cmd = ["meson", "install", "-C", args.build_dir]
- log_filename = os.path.join(ROOT_DIR, 'meson-install.log')
- start_time = datetime.datetime.now()
- if args.show_build_log:
- ret = subprocess.call(cmd, cwd=ROOT_DIR)
- else:
- print("Installing, see meson-install.log...")
- with open(log_filename, 'w') as log:
- p = subprocess.Popen(cmd, stdout=log, stderr=log,
- cwd=ROOT_DIR)
+ ctx = CONTEXT
+
+ verbose = Option(
+ ['--verbose', '-v'], default=False, is_flag=True,
+ help="more verbosity")
+ # removed doctests as currently not supported by _lib/_testutils.py
+ # doctests = Option(['--doctests'], default=False)
+ coverage = Option(
+ ['--coverage', '-c'], default=False, is_flag=True,
+ help=("report coverage of project code. "
+ "HTML output goes under build/coverage"))
+ durations = Option(
+ ['--durations', '-d'], default=None, metavar="NUM_TESTS",
+ help="Show timing for the given number of slowest tests"
+ )
+ submodule = Option(
+ ['--submodule', '-s'], default=None, metavar='MODULE_NAME',
+ help="Submodule whose tests to run (cluster, constants, ...)")
+ tests = Option(
+ ['--tests', '-t'], default=None, multiple=True, metavar='TESTS',
+ help='Specify tests to run')
+ mode = Option(
+ ['--mode', '-m'], default='fast', metavar='MODE', show_default=True,
+ help=("'fast', 'full', or something that could be passed to "
+ "`pytest -m` as a marker expression"))
+ parallel = Option(
+ ['--parallel', '-j'], default=1, metavar='N_JOBS',
+ help="Number of parallel jobs for testing"
+ )
+ # Argument can't have `help=`; used to consume all of `-- arg1 arg2 arg3`
+ pytest_args = Argument(
+ ['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False
+ )
+ TASK_META = {
+ 'task_dep': ['build'],
+ }
+
+ @classmethod
+ def scipy_tests(cls, args, pytest_args):
+ dirs = Dirs(args)
+ dirs.add_sys_path()
+ print(f"SciPy from development installed path at: {dirs.site}")
+
+ # FIXME: support pos-args with doit
+ extra_argv = pytest_args[:] if pytest_args else []
+ if extra_argv and extra_argv[0] == '--':
+ extra_argv = extra_argv[1:]
+
+ if args.coverage:
+ dst_dir = dirs.root / args.build_dir / 'coverage'
+ fn = dst_dir / 'coverage_html.js'
+ if dst_dir.is_dir() and fn.is_file():
+ shutil.rmtree(dst_dir)
+ extra_argv += ['--cov-report=html:' + str(dst_dir)]
+ shutil.copyfile(dirs.root / '.coveragerc',
+ dirs.site / '.coveragerc')
+
+ if args.durations:
+ extra_argv += ['--durations', args.durations]
+
+ # convert options to test selection
+ if args.submodule:
+ tests = [PROJECT_MODULE + "." + args.submodule]
+ elif args.tests:
+ tests = args.tests
+ else:
+ tests = None
+
+ runner, version, mod_path = get_test_runner(PROJECT_MODULE)
+ # FIXME: changing CWD is not a good practice
+ with working_dir(dirs.site):
+ print("Running tests for {} version:{}, installed at:{}".format(
+ PROJECT_MODULE, version, mod_path))
+ # runner verbosity - convert bool to int
+ verbose = int(args.verbose) + 1
+ result = runner( # scipy._lib._testutils:PytestTester
+ args.mode,
+ verbose=verbose,
+ extra_argv=extra_argv,
+ doctests=False,
+ coverage=args.coverage,
+ tests=tests,
+ parallel=args.parallel)
+ return result
+
+ @classmethod
+ def run(cls, pytest_args, **kwargs):
+ """run unit-tests"""
+ kwargs.update(cls.ctx.get())
+ Args = namedtuple('Args', [k for k in kwargs.keys()])
+ args = Args(**kwargs)
+ return cls.scipy_tests(args, pytest_args)
+
+
+@cli.cls_cmd('bench')
+class Bench(Task):
+ """:wrench: Run benchmarks.
+
+ \b
+ ```python
+ Examples:
+
+ $ python dev.py bench -t integrate.SolveBVP
+ $ python dev.py bench -t linalg.Norm
+ $ python dev.py bench --compare main
+ ```
+ """
+ ctx = CONTEXT
+ TASK_META = {
+ 'task_dep': ['build'],
+ }
+ submodule = Option(
+ ['--submodule', '-s'], default=None, metavar='SUBMODULE',
+ help="Submodule whose tests to run (cluster, constants, ...)")
+ tests = Option(
+ ['--tests', '-t'], default=None, multiple=True,
+ metavar='TESTS', help='Specify tests to run')
+ compare = Option(
+ ['--compare', '-c'], default=None, metavar='COMPARE', multiple=True,
+ help=(
+ "Compare benchmark results of current HEAD to BEFORE. "
+ "Use an additional --bench COMMIT to override HEAD with COMMIT. "
+ "Note that you need to commit your changes first!"))
+
+ @staticmethod
+ def run_asv(dirs, cmd):
+ EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
+ '/usr/local/lib/ccache', '/usr/local/lib/f90cache']
+ bench_dir = dirs.root / 'benchmarks'
+ sys.path.insert(0, str(bench_dir))
+ # Always use ccache, if installed
+ env = dict(os.environ)
+ env['PATH'] = os.pathsep.join(EXTRA_PATH +
+ env.get('PATH', '').split(os.pathsep))
+ # Control BLAS/LAPACK threads
+ env['OPENBLAS_NUM_THREADS'] = '1'
+ env['MKL_NUM_THREADS'] = '1'
+
+ # Limit memory usage
+ from benchmarks.common import set_mem_rlimit
try:
- # Wait for it to finish, and print something to indicate the
- # process is alive, but only if the log file has grown (to
- # allow continuous integration environments kill a hanging
- # process accurately if it produces no output)
- last_blip = time.time()
- last_log_size = os.stat(log_filename).st_size
- while p.poll() is None:
- time.sleep(0.5)
- if time.time() - last_blip > 60:
- log_size = os.stat(log_filename).st_size
- if log_size > last_log_size:
- elapsed = datetime.datetime.now() - start_time
- print(" ... installation in progress ({0} "
- "elapsed)".format(elapsed))
- last_blip = time.time()
- last_log_size = log_size
-
- ret = p.wait()
- except: # noqa: E722
- p.terminate()
+ set_mem_rlimit()
+ except (ImportError, RuntimeError):
+ pass
+ try:
+ return subprocess.call(cmd, env=env, cwd=bench_dir)
+ except OSError as err:
+ if err.errno == errno.ENOENT:
+ cmd_str = " ".join(cmd)
+ print(f"Error when running '{cmd_str}': {err}\n")
+ print("You need to install Airspeed Velocity "
+ "(https://airspeed-velocity.github.io/asv/)")
+ print("to run Scipy benchmarks")
+ return 1
raise
- elapsed = datetime.datetime.now() - start_time
- if ret != 0:
- if not args.show_build_log:
- with open(log_filename, 'r') as f:
- print(f.read())
- print("Installation failed! ({0} elapsed)".format(elapsed))
- sys.exit(1)
+ @classmethod
+ def scipy_bench(cls, args):
+ dirs = Dirs(args)
+ dirs.add_sys_path()
+ print(f"SciPy from development installed path at: {dirs.site}")
+ with working_dir(dirs.site):
+ runner, version, mod_path = get_test_runner(PROJECT_MODULE)
+ extra_argv = []
+ if args.tests:
+ extra_argv.append(args.tests)
+ if args.submodule:
+ extra_argv.append([args.submodule])
+
+ bench_args = []
+ for a in extra_argv:
+ bench_args.extend(['--bench', ' '.join(str(x) for x in a)])
+ if not args.compare:
+ print("Running benchmarks for Scipy version %s at %s"
+ % (version, mod_path))
+ cmd = ['asv', 'run', '--dry-run', '--show-stderr',
+ '--python=same'] + bench_args
+ retval = cls.run_asv(dirs, cmd)
+ sys.exit(retval)
+ else:
+ if len(args.compare) == 1:
+ commit_a = args.compare[0]
+ commit_b = 'HEAD'
+ elif len(args.compare) == 2:
+ commit_a, commit_b = args.compare
+ else:
+ print("Too many commits to compare benchmarks for")
+ # Check for uncommitted files
+ if commit_b == 'HEAD':
+ r1 = subprocess.call(['git', 'diff-index', '--quiet',
+ '--cached', 'HEAD'])
+ r2 = subprocess.call(['git', 'diff-files', '--quiet'])
+ if r1 != 0 or r2 != 0:
+ print("*" * 80)
+ print("WARNING: you have uncommitted changes --- "
+ "these will NOT be benchmarked!")
+ print("*" * 80)
+
+ # Fix commit ids (HEAD is local to current repo)
+ p = subprocess.Popen(['git', 'rev-parse', commit_b],
+ stdout=subprocess.PIPE)
+ out, err = p.communicate()
+ commit_b = out.strip()
+
+ p = subprocess.Popen(['git', 'rev-parse', commit_a],
+ stdout=subprocess.PIPE)
+ out, err = p.communicate()
+ commit_a = out.strip()
+ cmd_compare = [
+ 'asv', 'continuous', '--show-stderr', '--factor', '1.05',
+ commit_a, commit_b
+ ] + bench_args
+ cls.run_asv(dirs, cmd_compare)
+ sys.exit(1)
+
+ @classmethod
+ def run(cls, **kwargs):
+ """run benchmark"""
+ kwargs.update(cls.ctx.get())
+ Args = namedtuple('Args', [k for k in kwargs.keys()])
+ args = Args(**kwargs)
+ cls.scipy_bench(args)
+
+
+###################
+# linters
+
+@task_params([{'name': 'output_file', 'long': 'output-file', 'default': None,
+ 'help': 'Redirect report to a file'}])
+def task_flake8(output_file):
+ """Run flake8 over the code base and benchmarks."""
+ opts = ''
+ if output_file:
+ opts += f'--output-file={output_file}'
+ return {
+ 'actions': [f"flake8 {opts} scipy benchmarks/benchmarks"],
+ 'doc': 'Lint scipy and benchmarks directory',
+ }
+
+
+def task_pep8diff():
+ # Lint just the diff since branching off of main using a
+ # stricter configuration.
+ return {
+ 'basename': 'pep8-diff',
+ 'actions': [str(Dirs().root / 'tools' / 'lint_diff.py')],
+ 'doc': 'Lint only files modified since last commit (stricter rules)',
+ }
+
+
+def task_unicode_check():
+ return {
+ 'basename': 'unicode-check',
+ 'actions': [str(Dirs().root / 'tools' / 'unicode-check.py')],
+ 'doc': 'Check for disallowed Unicode characters in the SciPy Python '
+ 'and Cython source code.',
+ }
+
+
+@cli.cls_cmd('lint')
+class Lint():
+ """:dash: Run flake8, check PEP 8 compliance on branch diff and check for
+ disallowed Unicode characters."""
+ output_file = Option(
+ ['--output-file'], default=None, help='Redirect report to a file')
+
+ def run(output_file):
+ opts = {'output_file': output_file}
+ run_doit_task({'flake8': opts, 'pep8-diff': {}, 'unicode-check': {}})
+
+
+@cli.cls_cmd('mypy')
+class Mypy(Task):
+ """:wrench: Run mypy on the codebase."""
+ ctx = CONTEXT
+
+ TASK_META = {
+ 'task_dep': ['build'],
+ }
+
+ @classmethod
+ def run(cls, **kwargs):
+ kwargs.update(cls.ctx.get())
+ Args = namedtuple('Args', [k for k in kwargs.keys()])
+ args = Args(**kwargs)
+ dirs = Dirs(args)
- # ignore everything in the install directory.
- with open(Path(PATH_INSTALLED) / ".gitignore", "w") as f:
- f.write("*")
+ try:
+ import mypy.api
+ except ImportError as e:
+ raise RuntimeError(
+ "Mypy not found. Please install it by running "
+ "pip install -r mypy_requirements.txt from the repo root"
+ ) from e
+
+ config = dirs.root / "mypy.ini"
+ check_path = PROJECT_MODULE
+
+ with working_dir(dirs.site):
+ # By default mypy won't color the output since it isn't being
+ # invoked from a tty.
+ os.environ['MYPY_FORCE_COLOR'] = '1'
+ # Change to the site directory to make sure mypy doesn't pick
+ # up any type stubs in the source tree.
+ report, errors, status = mypy.api.run([
+ "--config-file",
+ str(config),
+ check_path,
+ ])
+ print(report, end='')
+ print(errors, end='', file=sys.stderr)
+ return status == 0
+
+
+##########################################
+# DOC
+
+@cli.cls_cmd('doc')
+class Doc(Task):
+ """:wrench: Build documentation.
+
+TARGETS: Sphinx build targets [default: 'html']
- print("Installation OK")
- return
+"""
+ ctx = CONTEXT
+ args = Argument(['args'], nargs=-1, metavar='TARGETS', required=False)
+ list_targets = Option(
+ ['--list-targets', '-t'], default=False, is_flag=True,
+ help='List doc targets',
+ )
+ parallel = Option(
+ ['--parallel', '-j'], default=1, metavar='N_JOBS',
+ help="Number of parallel jobs"
+ )
-def copy_openblas():
- """
- Copies OpenBLAS DLL to the SciPy install dir, and also overwrites the
- default `_distributor_init.py` file with the one we use for wheels uploaded
- to PyPI so that DLL gets loaded.
+ @classmethod
+ def task_meta(cls, list_targets, parallel, args, **kwargs):
+ if list_targets: # list MAKE targets, remove default target
+ task_dep = []
+ targets = ''
+ else:
+ task_dep = ['build']
+ targets = ' '.join(args) if args else 'html'
+
+ kwargs.update(cls.ctx.get())
+ Args = namedtuple('Args', [k for k in kwargs.keys()])
+ build_args = Args(**kwargs)
+ dirs = Dirs(build_args)
+
+ make_params = [f'PYTHON="{sys.executable}"']
+ if parallel:
+ make_params.append(f'SPHINXOPTS="-j{parallel}"')
+
+ return {
+ 'actions': [
+ # move to doc/ so local scipy does not get imported
+ (f'cd doc; env PYTHONPATH="{dirs.site}" '
+ f'make {" ".join(make_params)} {targets}'),
+ ],
+ 'task_dep': task_dep,
+ 'io': {'capture': False},
+ }
+
+
+@cli.cls_cmd('refguide-check')
+class RefguideCheck(Task):
+ """:wrench: Run refguide check."""
+ ctx = CONTEXT
+
+ submodule = Option(
+ ['--submodule', '-s'], default=None, metavar='SUBMODULE',
+ help="Submodule whose tests to run (cluster, constants, ...)")
+ verbose = Option(
+ ['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
+
+ @classmethod
+ def task_meta(cls, **kwargs):
+ kwargs.update(cls.ctx.get())
+ Args = namedtuple('Args', [k for k in kwargs.keys()])
+ args = Args(**kwargs)
+ dirs = Dirs(args)
+
+ cmd = [f'{sys.executable}',
+ str(dirs.root / 'tools' / 'refguide_check.py'),
+ '--doctests']
+ if args.verbose:
+ cmd += ['-vvv']
+ if args.submodule:
+ cmd += [args.submodule]
+ cmd_str = ' '.join(cmd)
+ return {
+ 'actions': [f'env PYTHONPATH={dirs.site} {cmd_str}'],
+ 'task_dep': ['build'],
+ 'io': {'capture': False},
+ }
+
+
+##########################################
+# ENVS
+
+@cli.cls_cmd('python')
+class Python():
+ """:wrench: Start a Python shell with PYTHONPATH set."""
+ ctx = CONTEXT
+ pythonpath = Option(
+ ['--pythonpath', '-p'], metavar='PYTHONPATH', default=None,
+ help='Paths to prepend to PYTHONPATH')
+ extra_argv = Argument(
+ ['extra_argv'], nargs=-1, metavar='ARGS', required=False)
+
+ @classmethod
+ def _setup(cls, pythonpath, **kwargs):
+ vals = Build.opt_defaults()
+ vals.update(kwargs)
+ Build.run(add_path=True, **vals)
+ if pythonpath:
+ for p in reversed(pythonpath.split(os.pathsep)):
+ sys.path.insert(0, p)
+
+ @classmethod
+ def run(cls, pythonpath, extra_argv=None, **kwargs):
+ cls._setup(pythonpath, **kwargs)
+ if extra_argv:
+ # Don't use subprocess, since we don't want to include the
+ # current path in PYTHONPATH.
+ sys.argv = extra_argv
+ with open(extra_argv[0], 'r') as f:
+ script = f.read()
+ sys.modules['__main__'] = new_module('__main__')
+ ns = dict(__name__='__main__', __file__=extra_argv[0])
+ exec(script, ns)
+ else:
+ import code
+ code.interact()
- Assumes pkg-config is installed and aware of OpenBLAS.
- """
- # Get OpenBLAS lib path from pkg-config
- cmd = ['pkg-config', '--variable', 'libdir', 'openblas']
- result = subprocess.run(cmd, capture_output=True, text=True)
- if result.returncode != 0:
- print(result.stderrr)
- return result.returncode
-
- openblas_lib_path = Path(result.stdout.strip())
- if not openblas_lib_path.stem == 'lib':
- raise RuntimeError(f'Expecting "lib" at end of "{openblas_lib_path}"')
-
- # Look in bin subdirectory for OpenBLAS binaries.
- bin_path = openblas_lib_path.parent / 'bin'
- # Locate, make output .libs directory in Scipy install directory.
- scipy_path = Path(get_site_packages()) / 'scipy'
- libs_path = scipy_path / '.libs'
- libs_path.mkdir(exist_ok=True)
- # Copy DLL files from OpenBLAS install to scipy install .libs subdir.
- for dll_fn in bin_path.glob('*.dll'):
- out_fname = libs_path / dll_fn.parts[-1]
- print(f'Copying {dll_fn} to {out_fname}')
- out_fname.write_bytes(dll_fn.read_bytes())
-
- # Write _distributor_init.py to scipy install dir; this ensures the .libs
- # file is on the DLL search path at run-time, so OpenBLAS gets found
- openblas_support = import_module_from_path(
- 'openblas_support',
- Path(ROOT_DIR) / 'tools' / 'openblas_support.py')
- openblas_support.make_init(scipy_path)
- return 0
-
-
-def get_site_packages():
- """
- Depending on whether we have debian python or not,
- return dist_packages path or site_packages path.
- """
- if 'deb_system' in INSTALL_SCHEMES:
- # Debian patched python in use
- install_cmd = dist.Distribution().get_command_obj('install')
- install_cmd.select_scheme('deb_system')
- install_cmd.finalize_options()
- plat_path = Path(install_cmd.install_platlib)
- else:
- plat_path = Path(get_path('platlib'))
- return str(Path(PATH_INSTALLED) / plat_path.relative_to(sys.exec_prefix))
-
-
-def build_project(args):
- """
- Build a dev version of the project.
-
- Returns
- -------
- site_dir
- Directory where the built SciPy version was installed. This is a custom
- prefix, followed by a relative path matching the one the system would
- use for the site-packages of the active Python interpreter.
- """
- root_ok = [os.path.exists(os.path.join(ROOT_DIR, fn))
- for fn in PROJECT_ROOT_FILES]
- if not all(root_ok):
- print("To build the project, run dev.py in "
- "git checkout or unpacked source")
- sys.exit(1)
- env = dict(os.environ)
+@cli.cls_cmd('ipython')
+class Ipython(Python):
+ """:wrench: Start IPython shell with PYTHONPATH set."""
+ ctx = CONTEXT
+ pythonpath = Python.pythonpath
- setup_build(args, env)
+ @classmethod
+ def run(cls, pythonpath, **kwargs):
+ cls._setup(pythonpath, **kwargs)
+ import IPython
+ IPython.embed(user_ns={})
- cmd = ["ninja", "-C", args.build_dir]
- if args.parallel > 1:
- cmd += ["-j", str(args.parallel)]
- # Building with ninja-backend
- ret = subprocess.call(cmd, env=env, cwd=ROOT_DIR)
+@cli.cls_cmd('shell')
+class Shell(Python):
+ """:wrench: Start Unix shell with PYTHONPATH set."""
+ ctx = CONTEXT
+ pythonpath = Python.pythonpath
+ extra_argv = Python.extra_argv
- if ret == 0:
- print("Build OK")
- else:
- print("Build failed!")
+ @classmethod
+ def run(cls, pythonpath, extra_argv, **kwargs):
+ cls._setup(pythonpath, **kwargs)
+ shell = os.environ.get('SHELL', 'sh')
+ print("Spawning a Unix shell...")
+ os.execv(shell, [shell] + list(extra_argv))
sys.exit(1)
- install_project(args)
- site_dir = get_site_packages()
+@cli.command()
+@click.argument('version_args', nargs=2)
+@click.pass_obj
+def notes(ctx_obj, version_args):
+ """:ledger: Release notes and log generation.
- if args.win_cp_openblas and platform.system() == 'Windows':
- if copy_openblas() == 0:
- print('OpenBLAS copied')
- else:
- print("OpenBLAS copy failed!")
- sys.exit(1)
+ \b
+ ```python
+ Example:
+
+ $ python dev.py notes v1.7.0 v1.8.0
+ ```
+ """
+ if version_args:
+ sys.argv = version_args
+ log_start = sys.argv[0]
+ log_end = sys.argv[1]
+ cmd = f"python tools/write_release_and_log.py {log_start} {log_end}"
+ click.echo(cmd)
+ try:
+ subprocess.run([cmd], check=True, shell=True)
+ except subprocess.CalledProcessError:
+ print('Error caught: Incorrect log start or log end version')
- return site_dir
+@cli.command()
+@click.argument('revision_args', nargs=2)
+@click.pass_obj
+def authors(ctx_obj, revision_args):
+ """:ledger: Generate list of authors who contributed within revision
+ interval.
-def run_mypy(args):
- if args.no_build:
- raise ValueError('Cannot run mypy with --no-build')
+ \b
+ ```python
+ Example:
+ $ python dev.py authors v1.7.0 v1.8.0
+ ```
+ """
+ if revision_args:
+ sys.argv = revision_args
+ start_revision = sys.argv[0]
+ end_revision = sys.argv[1]
+ cmd = f"python tools/authors.py {start_revision}..{end_revision}"
+ click.echo(cmd)
try:
- import mypy.api
- except ImportError as e:
- raise RuntimeError(
- "Mypy not found. Please install it by running "
- "pip install -r mypy_requirements.txt from the repo root"
- ) from e
-
- site_dir = build_project(args)
- config = os.path.join(
- os.path.dirname(os.path.abspath(__file__)),
- "mypy.ini",
- )
- with runtests.working_dir(site_dir):
- # By default mypy won't color the output since it isn't being
- # invoked from a tty.
- os.environ['MYPY_FORCE_COLOR'] = '1'
- # Change to the site directory to make sure mypy doesn't pick
- # up any type stubs in the source tree.
- report, errors, status = mypy.api.run([
- "--config-file",
- config,
- PROJECT_MODULE,
- ])
- print(report, end='')
- print(errors, end='', file=sys.stderr)
- return status
-
-
-if __name__ == "__main__":
- main(argv=sys.argv[1:])
+ subprocess.run([cmd], check=True, shell=True)
+ except subprocess.CalledProcessError:
+ print('Error caught: Incorrect revision start or revision end')
+
+
+if __name__ == '__main__':
+ cli()
diff --git a/do.py b/do.py
deleted file mode 100644
index 7c9940d29106..000000000000
--- a/do.py
+++ /dev/null
@@ -1,1097 +0,0 @@
-#! /usr/bin/env python3
-
-'''
-Developer CLI: building (meson), tests, benchmark, etc.
-
-This file contains tasks definitions for doit (https://pydoit.org).
-And also a CLI interface using click (https://click.palletsprojects.com).
-
-The CLI is ideal for project contributors while,
-doit interface is better suited for authring the development tasks.
-
-REQUIREMENTS:
---------------
-- see environment.yml: doit, pydevtool, click, rich-click
-
-# USAGE:
-
-## 1 - click API
-
-Commands can added using default Click API. i.e.
-
-```
-@cli.command()
-@click.argument('extra_argv', nargs=-1)
-@click.pass_obj
-def python(ctx_obj, extra_argv):
- """Start a Python shell with PYTHONPATH set"""
-```
-
-## 2 - class based Click command definition
-
-`CliGroup` provides an alternative class based API to create Click commands.
-
-Just use the `cls_cmd` decorator. And define a `run()` method
-
-```
-@cli.cls_cmd('test')
-class Test():
- """Run tests"""
-
- @classmethod
- def run(cls):
- print('Running tests...')
-```
-
-- Command may make use a Click.Group context defining a `ctx` class attribute
-- Command options are also define as class attributes
-
-```
-@cli.cls_cmd('test')
-class Test():
- """Run tests"""
- ctx = CONTEXT
-
- verbose = Option(
- ['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
-
- @classmethod
- def run(cls, **kwargs): # kwargs contains options from class and CONTEXT
- print('Running tests...')
-```
-
-## 3 - class based interface can be run as a doit task by subclassing from Task
-
-- Extra doit task metadata can be defined as class attribute TASK_META.
-- `run()` method will be used as python-action by task
-
-```
-@cli.cls_cmd('test')
-class Test(Task): # Task base class, doit will create a task
- """Run tests"""
- ctx = CONTEXT
-
- TASK_META = {
- 'task_dep': ['build'],
- }
-
- @classmethod
- def run(cls, **kwargs):
- pass
-```
-
-## 4 - doit tasks with cmd-action "shell" or dynamic metadata
-
-Define method `task_meta()` instead of `run()`:
-
-```
-@cli.cls_cmd('refguide-check')
-class RefguideCheck(Task):
- @classmethod
- def task_meta(cls, **kwargs):
- return {
-```
-
-'''
-
-import os
-import subprocess
-import sys
-import warnings
-import shutil
-import json
-import datetime
-import time
-import platform
-import importlib.util
-import errno
-import contextlib
-from sysconfig import get_path
-
-# distutils is required to infer meson install path
-# if this needs to be replaced for Python 3.12 support and there's no
-# stdlib alternative, use CmdAction and the hack discussed in gh-16058
-with warnings.catch_warnings():
- warnings.filterwarnings("ignore", category=DeprecationWarning)
- from distutils import dist
- from distutils.command.install import INSTALL_SCHEMES
-
-from pathlib import Path
-from collections import namedtuple
-from types import ModuleType as new_module
-from dataclasses import dataclass
-
-import click
-from click import Option, Argument
-from doit import task_params
-from doit.cmd_base import ModuleTaskLoader
-from doit.reporter import ZeroReporter
-from doit.exceptions import TaskError
-from doit.api import run_tasks
-from pydevtool.cli import UnifiedContext, CliGroup, Task
-from rich.console import Console
-from rich.panel import Panel
-from rich.theme import Theme
-from rich_click import rich_click
-
-DOIT_CONFIG = {
- 'verbosity': 2,
- 'minversion': '0.36.0',
-}
-
-
-console_theme = Theme({
- "cmd": "italic gray50",
-})
-
-
-class EMOJI:
- cmd = ":computer:"
-
-
-rich_click.STYLE_ERRORS_SUGGESTION = "yellow italic"
-rich_click.SHOW_ARGUMENTS = True
-rich_click.GROUP_ARGUMENTS_OPTIONS = False
-rich_click.SHOW_METAVARS_COLUMN = True
-rich_click.USE_MARKDOWN = True
-rich_click.OPTION_GROUPS = {
- "do.py": [
- {
- "name": "Options",
- "options": [
- "--help", "--build-dir", "--no-build", "--install-prefix"],
- },
- ],
-
- "do.py test": [
- {
- "name": "Options",
- "options": ["--help", "--verbose", "--parallel", "--coverage"],
- },
- {
- "name": "Options: test selection",
- "options": ["--submodule", "--tests", "--mode"],
- },
- ],
-}
-rich_click.COMMAND_GROUPS = {
- "do.py": [
- {
- "name": "build & testing",
- "commands": ["build", "test"],
- },
- {
- "name": "static checkers",
- "commands": ["lint", "mypy"],
- },
- {
- "name": "environments",
- "commands": ["shell", "python", "ipython"],
- },
- {
- "name": "documentation",
- "commands": ["doc", "refguide-check"],
- },
- {
- "name": "release",
- "commands": ["notes", "authors"],
- },
- {
- "name": "benchmarking",
- "commands": ["bench"],
- },
- ]
-}
-
-
-class ErrorOnlyReporter(ZeroReporter):
- desc = """Report errors only"""
-
- def runtime_error(self, msg):
- console = Console()
- console.print("[red bold] msg")
-
- def add_failure(self, task, fail_info):
- console = Console()
- if isinstance(fail_info, TaskError):
- console.print(f'[red]Task Error - {task.name}'
- f' => {fail_info.message}')
- if fail_info.traceback:
- console.print(Panel(
- "".join(fail_info.traceback),
- title=f"{task.name}",
- subtitle=fail_info.message,
- border_style="red",
- ))
-
-
-CONTEXT = UnifiedContext({
- 'build_dir': Option(
- ['--build-dir'], metavar='BUILD_DIR',
- default='build', show_default=True,
- help=':wrench: Relative path to the build directory.'),
- 'no_build': Option(
- ["--no-build", "-n"], default=False, is_flag=True,
- help=(":wrench: do not build the project"
- " (note event python only modification require build)")),
- 'install_prefix': Option(
- ['--install-prefix'], default=None, metavar='INSTALL_DIR',
- help=(":wrench: Relative path to the install directory."
- " Default is -install.")),
-})
-
-
-def run_doit_task(tasks):
- """
- :param tasks: (dict) task_name -> {options}
- """
- loader = ModuleTaskLoader(globals())
- doit_config = {
- 'verbosity': 2,
- 'reporter': ErrorOnlyReporter,
- }
- return run_tasks(loader, tasks, extra_config={'GLOBAL': doit_config})
-
-
-class CLI(CliGroup):
- context = CONTEXT
- run_doit_task = run_doit_task
-
-
-@click.group(cls=CLI)
-@click.pass_context
-def cli(ctx, **kwargs):
- """Developer Tool for SciPy
-
- Commands that require a built/installed instance are marked with :wrench:.
-
-
-
- **python do.py --build-dir my-build test -s stats**
- """
- CLI.update_context(ctx, kwargs)
-
-
-PROJECT_MODULE = "scipy"
-PROJECT_ROOT_FILES = ['scipy', 'LICENSE.txt', 'meson.build']
-
-
-@dataclass
-class Dirs:
- """
- root:
- Directory where scr, build config and tools are located
- (and this file)
- build:
- Directory where build output files (i.e. *.o) are saved
- install:
- Directory where .so from build and .py from src are put together.
- site:
- Directory where the built SciPy version was installed.
- This is a custom prefix, followed by a relative path matching
- the one the system would use for the site-packages of the active
- Python interpreter.
- """
- # all paths are absolute
- root: Path
- build: Path
- installed: Path
- site: Path # /lib/python/site-packages
-
- def __init__(self, args=None):
- """:params args: object like Context(build_dir, install_prefix)"""
- self.root = Path(__file__).parent.absolute()
- if not args:
- return
- self.build = Path(args.build_dir).resolve()
- if args.install_prefix:
- self.installed = Path(args.install_prefix).resolve()
- else:
- self.installed = self.build.parent / (self.build.stem + "-install")
- # relative path for site-package with py version
- # i.e. 'lib/python3.10/site-packages'
- self.site = self.get_site_packages()
-
- def add_sys_path(self):
- """Add site dir to sys.path / PYTHONPATH"""
- site_dir = str(self.site)
- sys.path.insert(0, site_dir)
- os.environ['PYTHONPATH'] = \
- os.pathsep.join((site_dir, os.environ.get('PYTHONPATH', '')))
-
- def get_site_packages(self):
- """
- Depending on whether we have debian python or not,
- return dist_packages path or site_packages path.
- """
- if 'deb_system' in INSTALL_SCHEMES:
- # debian patched python in use
- install_cmd = dist.Distribution().get_command_obj('install')
- install_cmd.select_scheme('deb_system')
- install_cmd.finalize_options()
- plat_path = Path(install_cmd.install_platlib)
- else:
- plat_path = Path(get_path('platlib'))
- return self.installed / plat_path.relative_to(sys.exec_prefix)
-
-
-@contextlib.contextmanager
-def working_dir(new_dir):
- current_dir = os.getcwd()
- try:
- os.chdir(new_dir)
- yield
- finally:
- os.chdir(current_dir)
-
-
-def import_module_from_path(mod_name, mod_path):
- """Import module with name `mod_name` from file path `mod_path`"""
- spec = importlib.util.spec_from_file_location(mod_name, mod_path)
- mod = importlib.util.module_from_spec(spec)
- spec.loader.exec_module(mod)
- return mod
-
-
-def get_test_runner(project_module):
- """
- get Test Runner from locally installed/built project
- """
- __import__(project_module)
- # scipy._lib._testutils:PytestTester
- test = sys.modules[project_module].test
- version = sys.modules[project_module].__version__
- mod_path = sys.modules[project_module].__file__
- mod_path = os.path.abspath(os.path.join(os.path.dirname(mod_path)))
- return test, version, mod_path
-
-
-############
-
-@cli.cls_cmd('build')
-class Build(Task):
- """:wrench: build & install package on path"""
- ctx = CONTEXT
-
- werror = Option(
- ['--werror'], default=False, is_flag=True,
- help="Treat warnings as errors")
- gcov = Option(
- ['--gcov'], default=False, is_flag=True,
- help="enable C code coverage via gcov (requires GCC)."
- "gcov output goes to build/**/*.gc*")
- debug = Option(
- ['--debug', '-d'], default=False, is_flag=True, help="Debug build")
- parallel = Option(
- ['--parallel', '-j'], default=1, metavar='PARALLEL',
- help="Number of parallel jobs for build and testing")
- show_build_log = Option(
- ['--show-build-log'], default=False, is_flag=True,
- help="Show build output rather than using a log file")
- win_cp_openblas = Option(
- ['--win-cp-openblas'], default=False, is_flag=True,
- help=("If set, and on Windows, copy OpenBLAS lib to install directory"
- "after meson install. "
- "Note: this argument may be removed in the future once a "
- "`site.cfg`-like mechanism to select BLAS/LAPACK libraries is"
- "implemented for Meson"))
-
- @classmethod
- def setup_build(cls, dirs, args):
- """
- Setting up meson-build
- """
- for fn in PROJECT_ROOT_FILES:
- if not (dirs.root / fn).exists():
- print("To build the project, run dev.py in "
- "git checkout or unpacked source")
- sys.exit(1)
-
- env = dict(os.environ)
- cmd = ["meson", "setup", dirs.build, "--prefix", dirs.installed]
- build_dir = dirs.build
- run_dir = Path()
- if build_dir.exists() and not (build_dir / 'meson-info').exists():
- if list(build_dir.iterdir()):
- raise RuntimeError("Can't build into non-empty directory "
- f"'{build_dir.absolute()}'")
-
- build_options_file = (
- build_dir / "meson-info" / "intro-buildoptions.json")
- if build_options_file.exists():
- with open(build_options_file) as f:
- build_options = json.load(f)
- installdir = None
- for option in build_options:
- if option["name"] == "prefix":
- installdir = option["value"]
- break
- if installdir != str(dirs.installed):
- run_dir = build_dir
- cmd = ["meson", "--reconfigure",
- "--prefix", str(dirs.installed)]
- else:
- return
- if args.werror:
- cmd += ["--werror"]
- if args.gcov:
- cmd += ['-Db_coverage=true']
- # Setting up meson build
- cmd_str = ' '.join([str(p) for p in cmd])
- cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
- ret = subprocess.call(cmd, env=env, cwd=run_dir)
- if ret == 0:
- print("Meson build setup OK")
- else:
- print("Meson build setup failed!")
- sys.exit(1)
- return env
-
- @classmethod
- def build_project(cls, dirs, args, env):
- """
- Build a dev version of the project.
- """
- cmd = ["ninja", "-C", str(dirs.build)]
- if args.parallel > 1:
- cmd += ["-j", str(args.parallel)]
-
- # Building with ninja-backend
- cmd_str = ' '.join([str(p) for p in cmd])
- cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
- ret = subprocess.call(cmd, env=env, cwd=dirs.root)
-
- if ret == 0:
- print("Build OK")
- else:
- print("Build failed!")
- sys.exit(1)
-
- @classmethod
- def install_project(cls, dirs, args):
- """
- Installs the project after building.
- """
- if dirs.installed.exists():
- non_empty = len(os.listdir(dirs.installed))
- if non_empty and not dirs.site.exists():
- raise RuntimeError("Can't install in non-empty directory: "
- f"'{dirs.installed}'")
- cmd = ["meson", "install", "-C", args.build_dir]
- log_filename = dirs.root / 'meson-install.log'
- start_time = datetime.datetime.now()
- cmd_str = ' '.join([str(p) for p in cmd])
- cls.console.print(f"{EMOJI.cmd} [cmd] {cmd_str}")
- if args.show_build_log:
- ret = subprocess.call(cmd, cwd=dirs.root)
- else:
- print("Installing, see meson-install.log...")
- with open(log_filename, 'w') as log:
- p = subprocess.Popen(cmd, stdout=log, stderr=log,
- cwd=dirs.root)
-
- try:
- # Wait for it to finish, and print something to indicate the
- # process is alive, but only if the log file has grown (to
- # allow continuous integration environments kill a hanging
- # process accurately if it produces no output)
- last_blip = time.time()
- last_log_size = os.stat(log_filename).st_size
- while p.poll() is None:
- time.sleep(0.5)
- if time.time() - last_blip > 60:
- log_size = os.stat(log_filename).st_size
- if log_size > last_log_size:
- elapsed = datetime.datetime.now() - start_time
- print(" ... installation in progress ({0} "
- "elapsed)".format(elapsed))
- last_blip = time.time()
- last_log_size = log_size
-
- ret = p.wait()
- except: # noqa: E722
- p.terminate()
- raise
- elapsed = datetime.datetime.now() - start_time
-
- if ret != 0:
- if not args.show_build_log:
- with open(log_filename, 'r') as f:
- print(f.read())
- print(f"Installation failed! ({elapsed} elapsed)")
- sys.exit(1)
-
- # ignore everything in the install directory.
- with open(dirs.installed / ".gitignore", "w") as f:
- f.write("*")
-
- print("Installation OK")
- return
-
- @classmethod
- def copy_openblas(cls, dirs):
- """
- Copies OpenBLAS DLL to the SciPy install dir, and also overwrites the
- default `_distributor_init.py` file with the one
- we use for wheels uploaded to PyPI so that DLL gets loaded.
-
- Assumes pkg-config is installed and aware of OpenBLAS.
- """
- # Get OpenBLAS lib path from pkg-config
- cmd = ['pkg-config', '--variable', 'libdir', 'openblas']
- result = subprocess.run(cmd, capture_output=True, text=True)
- if result.returncode != 0:
- print(result.stderrr)
- return result.returncode
-
- openblas_lib_path = Path(result.stdout.strip())
- if not openblas_lib_path.stem == 'lib':
- raise RuntimeError(
- f'Expecting "lib" at end of "{openblas_lib_path}"')
-
- # Look in bin subdirectory for OpenBLAS binaries.
- bin_path = openblas_lib_path.parent / 'bin'
- # Locate, make output .libs directory in Scipy install directory.
- scipy_path = dirs.site / 'scipy'
- libs_path = scipy_path / '.libs'
- libs_path.mkdir(exist_ok=True)
- # Copy DLL files from OpenBLAS install to scipy install .libs subdir.
- for dll_fn in bin_path.glob('*.dll'):
- out_fname = libs_path / dll_fn.parts[-1]
- print(f'Copying {dll_fn} to {out_fname}')
- out_fname.write_bytes(dll_fn.read_bytes())
-
- # Write _distributor_init.py to scipy install dir;
- # this ensures the .libs file is on the DLL search path at run-time,
- # so OpenBLAS gets found
- openblas_support = import_module_from_path(
- 'openblas_support',
- dirs.root / 'tools' / 'openblas_support.py')
- openblas_support.make_init(scipy_path)
- return 0
-
- @classmethod
- def run(cls, add_path=False, **kwargs):
- kwargs.update(cls.ctx.get(kwargs))
- Args = namedtuple('Args', [k for k in kwargs.keys()])
- args = Args(**kwargs)
-
- cls.console = Console(theme=console_theme)
- dirs = Dirs(args)
- if args.no_build:
- print("Skipping build")
- else:
- env = cls.setup_build(dirs, args)
- cls.build_project(dirs, args, env)
- cls.install_project(dirs, args)
- if args.win_cp_openblas and platform.system() == 'Windows':
- if cls.copy_openblas(dirs) == 0:
- print('OpenBLAS copied')
- else:
- print("OpenBLAS copy failed!")
- sys.exit(1)
-
- # add site to sys.path
- if add_path:
- dirs.add_sys_path()
-
-
-@cli.cls_cmd('test')
-class Test(Task):
- """:wrench: Run tests
-
- Examples:
-
- $ python do.py test -s {SAMPLE_SUBMODULE}
- $ python do.py test -t scipy.optimize.tests.test_minimize_constrained
- $ python do.py test -s stats -- --tb=line
- """
- ctx = CONTEXT
-
- verbose = Option(
- ['--verbose', '-v'], default=False, is_flag=True,
- help="more verbosity")
- # removed doctests as currently not supported by _lib/_testutils.py
- # doctests = Option(['--doctests'], default=False)
- coverage = Option(
- ['--coverage'], default=False, is_flag=True,
- help=("report coverage of project code. "
- "HTML output goes under build/coverage"))
- submodule = Option(
- ['--submodule', '-s'], default=None, metavar='SUBMODULE',
- help="Submodule whose tests to run (cluster, constants, ...)")
- tests = Option(
- ['--tests', '-t'], default=None, multiple=True, metavar='TESTS',
- help='Specify tests to run')
- mode = Option(
- ['--mode', '-m'], default='fast', metavar='MODE', show_default=True,
- help=("'fast', 'full', or something that could be passed to "
- "`pytest -m` as a marker expression"))
- parallel = Option(
- ['--parallel', '-j'], default=1, metavar='PARALLEL',
- help="Number of parallel jobs for testing"
- )
- pytest_args = Argument(
- ['pytest_args'], nargs=-1, metavar='PYTEST-ARGS', required=False)
-
- TASK_META = {
- 'task_dep': ['build'],
- }
-
- @classmethod
- def scipy_tests(cls, args, pytest_args):
- dirs = Dirs(args)
- dirs.add_sys_path()
- print(f"SciPy from development installed path at: {dirs.site}")
-
- # FIXME: support pos-args with doit
- extra_argv = pytest_args[:] if pytest_args else []
- if extra_argv and extra_argv[0] == '--':
- extra_argv = extra_argv[1:]
-
- if args.coverage:
- dst_dir = dirs.root / args.build_dir / 'coverage'
- fn = dst_dir / 'coverage_html.js'
- if dst_dir.is_dir() and fn.is_file():
- shutil.rmtree(dst_dir)
- extra_argv += ['--cov-report=html:' + str(dst_dir)]
- shutil.copyfile(dirs.root / '.coveragerc',
- dirs.site / '.coveragerc')
-
- # convert options to test selection
- if args.submodule:
- tests = [PROJECT_MODULE + "." + args.submodule]
- elif args.tests:
- tests = args.tests
- else:
- tests = None
-
- runner, version, mod_path = get_test_runner(PROJECT_MODULE)
- # FIXME: changing CWD is not a good practice
- with working_dir(dirs.site):
- print("Running tests for {} version:{}, installed at:{}".format(
- PROJECT_MODULE, version, mod_path))
- # runner verbosity - convert bool to int
- verbose = int(args.verbose) + 1
- result = runner( # scipy._lib._testutils:PytestTester
- args.mode,
- verbose=verbose,
- extra_argv=extra_argv,
- doctests=False,
- coverage=args.coverage,
- tests=tests,
- parallel=args.parallel)
- return result
-
- @classmethod
- def run(cls, pytest_args, **kwargs):
- """run unit-tests"""
- kwargs.update(cls.ctx.get())
- Args = namedtuple('Args', [k for k in kwargs.keys()])
- args = Args(**kwargs)
- return cls.scipy_tests(args, pytest_args)
-
-
-@cli.cls_cmd('bench')
-class Bench(Task):
- """:wrench: Run benchmarks
-
- Examples:
-
- $ python do.py bench -t integrate.SolveBVP
- $ python do.py bench -t linalg.Norm
- $ python do.py bench --compare main
-
- """
- ctx = CONTEXT
- TASK_META = {
- 'task_dep': ['build'],
- }
- submodule = Option(
- ['--submodule', '-s'], default=None, metavar='SUBMODULE',
- help="Submodule whose tests to run (cluster, constants, ...)")
- tests = Option(
- ['--tests', '-t'], default=None, multiple=True,
- metavar='TESTS', help='Specify tests to run')
- compare = Option(
- ['--compare', '-c'], default=None, metavar='COMPARE', multiple=True,
- help=(
- "Compare benchmark results of current HEAD to BEFORE. "
- "Use an additional --bench COMMIT to override HEAD with COMMIT. "
- "Note that you need to commit your changes first!"))
-
- @staticmethod
- def run_asv(dirs, cmd):
- EXTRA_PATH = ['/usr/lib/ccache', '/usr/lib/f90cache',
- '/usr/local/lib/ccache', '/usr/local/lib/f90cache']
- bench_dir = dirs.root / 'benchmarks'
- sys.path.insert(0, str(bench_dir))
- # Always use ccache, if installed
- env = dict(os.environ)
- env['PATH'] = os.pathsep.join(EXTRA_PATH +
- env.get('PATH', '').split(os.pathsep))
- # Control BLAS/LAPACK threads
- env['OPENBLAS_NUM_THREADS'] = '1'
- env['MKL_NUM_THREADS'] = '1'
-
- # Limit memory usage
- from benchmarks.common import set_mem_rlimit
- try:
- set_mem_rlimit()
- except (ImportError, RuntimeError):
- pass
- try:
- return subprocess.call(cmd, env=env, cwd=bench_dir)
- except OSError as err:
- if err.errno == errno.ENOENT:
- cmd_str = " ".join(cmd)
- print(f"Error when running '{cmd_str}': {err}\n")
- print("You need to install Airspeed Velocity "
- "(https://airspeed-velocity.github.io/asv/)")
- print("to run Scipy benchmarks")
- return 1
- raise
-
- @classmethod
- def scipy_bench(cls, args):
- dirs = Dirs(args)
- dirs.add_sys_path()
- print(f"SciPy from development installed path at: {dirs.site}")
- with working_dir(dirs.site):
- runner, version, mod_path = get_test_runner(PROJECT_MODULE)
- extra_argv = []
- if args.tests:
- extra_argv.append(args.tests)
- if args.submodule:
- extra_argv.append([args.submodule])
-
- bench_args = []
- for a in extra_argv:
- bench_args.extend(['--bench', ' '.join(str(x) for x in a)])
- if not args.compare:
- print("Running benchmarks for Scipy version %s at %s"
- % (version, mod_path))
- cmd = ['asv', 'run', '--dry-run', '--show-stderr',
- '--python=same', '--quick'] + bench_args
- retval = cls.run_asv(dirs, cmd)
- sys.exit(retval)
- else:
- if len(args.compare) == 1:
- commit_a = args.compare[0]
- commit_b = 'HEAD'
- elif len(args.compare) == 2:
- commit_a, commit_b = args.compare
- else:
- print("Too many commits to compare benchmarks for")
- # Check for uncommitted files
- if commit_b == 'HEAD':
- r1 = subprocess.call(['git', 'diff-index', '--quiet',
- '--cached', 'HEAD'])
- r2 = subprocess.call(['git', 'diff-files', '--quiet'])
- if r1 != 0 or r2 != 0:
- print("*" * 80)
- print("WARNING: you have uncommitted changes --- "
- "these will NOT be benchmarked!")
- print("*" * 80)
-
- # Fix commit ids (HEAD is local to current repo)
- p = subprocess.Popen(['git', 'rev-parse', commit_b],
- stdout=subprocess.PIPE)
- out, err = p.communicate()
- commit_b = out.strip()
-
- p = subprocess.Popen(['git', 'rev-parse', commit_a],
- stdout=subprocess.PIPE)
- out, err = p.communicate()
- commit_a = out.strip()
- cmd_compare = [
- 'asv', 'continuous', '--show-stderr', '--factor', '1.05',
- commit_a, commit_b
- ] + bench_args
- cls.run_asv(dirs, cmd_compare)
- sys.exit(1)
-
- @classmethod
- def run(cls, **kwargs):
- """run benchamark"""
- kwargs.update(cls.ctx.get())
- Args = namedtuple('Args', [k for k in kwargs.keys()])
- args = Args(**kwargs)
- cls.scipy_bench(args)
-
-
-###################
-# linters
-
-@task_params([{'name': 'output_file', 'long': 'output-file', 'default': None,
- 'help': 'Redirect report to a file'}])
-def task_flake8(output_file):
- """Run flake8 over the code base and benchmarks."""
- opts = ''
- if output_file:
- opts += f'--output-file={output_file}'
- return {
- 'actions': [f"flake8 {opts} scipy benchmarks/benchmarks"],
- 'doc': 'Lint scipy and benchmarks directory',
- }
-
-
-def task_pep8diff():
- # Lint just the diff since branching off of main using a
- # stricter configuration.
- return {
- 'basename': 'pep8-diff',
- 'actions': [str(Dirs().root / 'tools' / 'lint_diff.py')],
- 'doc': 'Lint only files modified since last commit (stricker rules)',
- }
-
-
-@cli.cls_cmd('lint')
-class Lint():
- """:dash: run flake8, and check PEP 8 compliance on branch diff."""
- output_file = Option(
- ['--output-file'], default=None, help='Redirect report to a file')
-
- def run(output_file):
- opts = {'output_file': output_file}
- run_doit_task({'flake8': opts, 'pep8-diff': {}})
-
-
-@cli.cls_cmd('mypy')
-class Mypy(Task):
- """:wrench: Run mypy on the codebase"""
- ctx = CONTEXT
-
- TASK_META = {
- 'task_dep': ['build'],
- }
-
- @classmethod
- def run(cls, **kwargs):
- kwargs.update(cls.ctx.get())
- Args = namedtuple('Args', [k for k in kwargs.keys()])
- args = Args(**kwargs)
- dirs = Dirs(args)
-
- try:
- import mypy.api
- except ImportError as e:
- raise RuntimeError(
- "Mypy not found. Please install it by running "
- "pip install -r mypy_requirements.txt from the repo root"
- ) from e
-
- config = dirs.root / "mypy.ini"
- check_path = PROJECT_MODULE
-
- with working_dir(dirs.site):
- # By default mypy won't color the output since it isn't being
- # invoked from a tty.
- os.environ['MYPY_FORCE_COLOR'] = '1'
- # Change to the site directory to make sure mypy doesn't pick
- # up any type stubs in the source tree.
- report, errors, status = mypy.api.run([
- "--config-file",
- str(config),
- check_path,
- ])
- print(report, end='')
- print(errors, end='', file=sys.stderr)
- return status == 0
-
-
-##########################################
-# DOC
-
-@cli.cls_cmd('doc')
-class Doc(Task):
- """:wrench: Build documentation
-
-TARGETS: Sphinx build targets [default: 'html-scipyorg']
-"""
- ctx = CONTEXT
-
- args = Argument(['args'], nargs=-1, metavar='TARGETS', required=False)
- list_targets = Option(
- ['--list-targets', '-t'], default=False, is_flag=True,
- help='List doc targets',
- )
- parallel = Option(
- ['--parallel', '-j'], default=1, metavar='PARALLEL',
- help="Number of parallel jobs"
- )
-
- @classmethod
- def task_meta(cls, list_targets, parallel, args, **kwargs):
- if list_targets: # list MAKE targets, remove default target
- task_dep = []
- targets = ''
- else:
- task_dep = ['build']
- targets = ' '.join(args) if args else 'html-scipyorg'
-
- kwargs.update(cls.ctx.get())
- Args = namedtuple('Args', [k for k in kwargs.keys()])
- build_args = Args(**kwargs)
- dirs = Dirs(build_args)
-
- make_params = [f'PYTHON="{sys.executable}"']
- if parallel:
- make_params.append(f'SPHINXOPTS="-j{parallel}"')
-
- return {
- 'actions': [
- # move to doc/ so local scipy does not get imported
- (f'cd doc; env PYTHONPATH="{dirs.site}" '
- f'make {" ".join(make_params)} {targets}'),
- ],
- 'task_dep': task_dep,
- 'io': {'capture': False},
- }
-
-
-@cli.cls_cmd('refguide-check')
-class RefguideCheck(Task):
- """:wrench: Run refguide check"""
- ctx = CONTEXT
-
- submodule = Option(
- ['--submodule', '-s'], default=None, metavar='SUBMODULE',
- help="Submodule whose tests to run (cluster, constants, ...)")
- verbose = Option(
- ['--verbose', '-v'], default=False, is_flag=True, help="verbosity")
-
- @classmethod
- def task_meta(cls, **kwargs):
- kwargs.update(cls.ctx.get())
- Args = namedtuple('Args', [k for k in kwargs.keys()])
- args = Args(**kwargs)
- dirs = Dirs(args)
-
- cmd = [str(dirs.root / 'tools' / 'refguide_check.py'), '--doctests']
- if args.verbose:
- cmd += ['-vvv']
- if args.submodule:
- cmd += [args.submodule]
- cmd_str = ' '.join(cmd)
- return {
- 'actions': [f'env PYTHONPATH={dirs.site} {cmd_str}'],
- 'task_dep': ['build'],
- 'io': {'capture': False},
- }
-
-
-##########################################
-# ENVS
-
-@cli.cls_cmd('python')
-class Python():
- """:wrench: Start a Python shell with PYTHONPATH set"""
- ctx = CONTEXT
- pythonpath = Option(
- ['--pythonpath', '-p'], metavar='PYTHONPATH', default=None,
- help='Paths to prepend to PYTHONPATH')
- extra_argv = Argument(
- ['extra_argv'], nargs=-1, metavar='ARGS', required=False)
-
- @classmethod
- def _setup(cls, pythonpath, **kwargs):
- vals = Build.opt_defaults()
- vals.update(kwargs)
- Build.run(add_path=True, **vals)
- if pythonpath:
- for p in reversed(pythonpath.split(os.pathsep)):
- sys.path.insert(0, p)
-
- @classmethod
- def run(cls, pythonpath, extra_argv=None, **kwargs):
- cls._setup(pythonpath, **kwargs)
- if extra_argv:
- # Don't use subprocess, since we don't want to include the
- # current path in PYTHONPATH.
- sys.argv = extra_argv
- with open(extra_argv[0], 'r') as f:
- script = f.read()
- sys.modules['__main__'] = new_module('__main__')
- ns = dict(__name__='__main__', __file__=extra_argv[0])
- exec(script, ns)
- else:
- import code
- code.interact()
-
-
-@cli.cls_cmd('ipython')
-class Ipython(Python):
- """:wrench: Start IPython shell with PYTHONPATH set"""
- ctx = CONTEXT
- pythonpath = Python.pythonpath
-
- @classmethod
- def run(cls, pythonpath, **kwargs):
- cls._setup(pythonpath, **kwargs)
- import IPython
- IPython.embed(user_ns={})
-
-
-@cli.cls_cmd('shell')
-class Shell(Python):
- """:wrench: Start Unix shell with PYTHONPATH set"""
- ctx = CONTEXT
- pythonpath = Python.pythonpath
- extra_argv = Python.extra_argv
-
- @classmethod
- def run(cls, pythonpath, extra_argv, **kwargs):
- cls._setup(pythonpath, **kwargs)
- shell = os.environ.get('SHELL', 'sh')
- print("Spawning a Unix shell...")
- os.execv(shell, [shell] + list(extra_argv))
- sys.exit(1)
-
-
-@cli.command()
-@click.argument('version_args', nargs=2)
-@click.pass_obj
-def notes(ctx_obj, version_args):
- """:ledger: Release notes and log generation
-
- Example:
-
- $ python do.py notes v1.7.0 v1.8.0
- """
- if version_args:
- sys.argv = version_args
- log_start = sys.argv[0]
- log_end = sys.argv[1]
- cmd = f"python tools/write_release_and_log.py {log_start} {log_end}"
- click.echo(cmd)
- try:
- subprocess.run([cmd], check=True, shell=True)
- except subprocess.CalledProcessError:
- print('Error caught: Incorrect log start or log end version')
-
-
-@cli.command()
-@click.argument('revision_args', nargs=2)
-@click.pass_obj
-def authors(ctx_obj, revision_args):
- """:ledger: Generate list of authors who contributed within revision interval
-
- Example:
-
- $ python do.py authors v1.7.0 v1.8.0
- """
- if revision_args:
- sys.argv = revision_args
- start_revision = sys.argv[0]
- end_revision = sys.argv[1]
- cmd = f"python tools/authors.py {start_revision}..{end_revision}"
- click.echo(cmd)
- try:
- subprocess.run([cmd], check=True, shell=True)
- except subprocess.CalledProcessError:
- print('Error caught: Incorrect revision start or revision end')
-
-
-if __name__ == '__main__':
- cli()
diff --git a/doc/API.rst.txt b/doc/API.rst.txt
index f0b7b51f4546..be790c82225e 100644
--- a/doc/API.rst.txt
+++ b/doc/API.rst.txt
@@ -95,6 +95,8 @@ change is made.
* `scipy.constants`
+* `scipy.datasets`
+
* `scipy.fft`
* `scipy.fftpack`
@@ -125,6 +127,8 @@ change is made.
* `scipy.optimize`
+ - `scipy.optimize.cython_optimize`
+
* `scipy.signal`
- `scipy.signal.windows`
@@ -155,6 +159,7 @@ change is made.
cluster
constants
+ datasets
fft
fftpack
integrate
@@ -218,6 +223,6 @@ following, a *SciPy module* is defined as a Python package, say
See the existing SciPy submodules for guidance.
-For further details on NumPy distutils, see `NumPy Distutils - User's Guide `_.
+For further details on NumPy distutils, see `NumPy Distutils - User's Guide `_.
.. _NumPy documentation style: https://numpydoc.readthedocs.io/en/latest/format.html
diff --git a/doc/Makefile b/doc/Makefile
index c064b2a51ba6..7dce3e7dd1af 100644
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -20,7 +20,6 @@ ALLSPHINXOPTS = -d build/doctrees $(SPHINXOPTS) source
help:
@echo "Please use \`make ' where is one of"
@echo " html to make standalone HTML files"
- @echo " html-scipyorg to make standalone HTML files with scipy.org theming"
@echo " pickle to make pickle files (usable by e.g. sphinx-web)"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " changes to make an overview over all changed/added/deprecated items"
@@ -29,7 +28,7 @@ help:
@echo " dist to make a distribution-ready tree (installing Scipy in venv)"
@echo " doc-dist to make a distribution-ready tree (assuming Scipy is installed)"
@echo " upload USERNAME=... RELEASE=... to upload built docs to docs.scipy.org"
- @echo " show to show the html-scipyorg output"
+ @echo " show to show the HTML output"
clean:
-rm -rf build/* source/reference/generated
@@ -78,12 +77,12 @@ dist:
doc-dist: VERSIONWARNING=-t versionwarning
-doc-dist: html-scipyorg html
+doc-dist: html
-test -d build/htmlhelp || make htmlhelp-build
-rm -rf build/dist
mkdir -p build/dist
- cp -r build/html-scipyorg/* build/dist
- (cd build/html-scipyorg && zip -9qr ../dist/scipy-html.zip .)
+ cp -r build/html/* build/dist
+ (cd build/html && zip -9qr ../dist/scipy-html.zip .)
chmod ug=rwX,o=rX -R build/dist
find build/dist -type d -print0 | xargs -0r chmod g+s
cd build/dist && tar czf ../dist.tar.gz *
@@ -108,15 +107,7 @@ upload:
html: version-check html-build
html-build:
mkdir -p build/html build/doctrees
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html $(FILES)
- @echo
- @echo "Build finished. The HTML pages are in build/html."
-
-html-scipyorg:
- mkdir -p build/html build/doctrees
- $(SPHINXBUILD) -WT --keep-going -t scipyorg $(VERSIONWARNING) -b html $(ALLSPHINXOPTS) build/html-scipyorg $(FILES)
- @echo
- @echo "Build finished. The HTML pages are in build/html-scipyorg."
+ $(SPHINXBUILD) -WT --keep-going $(VERSIONWARNING) -b html $(ALLSPHINXOPTS) build/html $(FILES)
coverage: build version-check
mkdir -p build/coverage build/doctrees
@@ -137,4 +128,4 @@ linkcheck: version-check
"or in build/linkcheck/output.txt."
show:
- @python -c "import webbrowser; webbrowser.open_new_tab('file://$(PWD)/build/html-scipyorg/index.html')"
+ @python -c "import webbrowser; webbrowser.open_new_tab('file://$(PWD)/build/html/index.html')"
diff --git a/doc/README.md b/doc/README.md
index 670bdbacb384..9846637a4497 100644
--- a/doc/README.md
+++ b/doc/README.md
@@ -14,14 +14,7 @@ git submodule update --init
Now to build both SciPy itself and the docs, use:
```
-python3 runtests.py --doc html
-```
-
-Alternatively, if you prefer to build SciPy and the docs separately rather
-than use `runtests.py`:
-```
-python setup.py develop # in the root of the repo
-cd doc && make html-scipyorg
+python dev.py doc
```
In case the SciPy version found by the above command is different from that of the
@@ -34,7 +27,7 @@ This indicates that you're likely picking up the wrong SciPy install, check
with `python -c "import scipy; print(scipy.__file__)"`.
If the build is successful, you can open it in your browser with `make show`
-(which will open `build/html-scipyorg/index.html`).
+(which will open `build/html/index.html`).
## Building documentation for a release
@@ -56,4 +49,4 @@ be redistributed.
- `tutorial` contains all tutorial content.
- `release` contains the release notes. Note that those normally should not be
updated as part of a PR; we keep release notes for the upcoming releases
- on the wiki of the main SciPy repo.
\ No newline at end of file
+ on the wiki of the main SciPy repo.
diff --git a/doc/ROADMAP.rst.txt b/doc/ROADMAP.rst.txt
deleted file mode 100644
index 35ac3f441c65..000000000000
--- a/doc/ROADMAP.rst.txt
+++ /dev/null
@@ -1 +0,0 @@
-The roadmap content has moved. Please see http://scipy.github.io/devdocs/roadmap.html
diff --git a/doc/release/1.10.0-notes.rst b/doc/release/1.10.0-notes.rst
new file mode 100644
index 000000000000..b9c0da1a1e62
--- /dev/null
+++ b/doc/release/1.10.0-notes.rst
@@ -0,0 +1,1211 @@
+==========================
+SciPy 1.10.0 Release Notes
+==========================
+
+.. contents::
+
+SciPy 1.10.0 is the culmination of 6 months of hard work. It contains
+many new features, numerous bug-fixes, improved test coverage and better
+documentation. There have been a number of deprecations and API changes
+in this release, which are documented below. All users are encouraged to
+upgrade to this release, as there are a large number of bug-fixes and
+optimizations. Before upgrading, we recommend that users check that
+their own code does not use deprecated SciPy functionality (to do so,
+run your code with ``python -Wd`` and check for ``DeprecationWarning`` s).
+Our development attention will now shift to bug-fix releases on the
+1.10.x branch, and on adding new features on the main branch.
+
+This release requires Python 3.8+ and NumPy 1.19.5 or greater.
+
+For running on PyPy, PyPy3 6.0+ is required.
+
+
+**************************
+Highlights of this release
+**************************
+
+- A new dedicated datasets submodule (`scipy.datasets`) has been added, and is
+ now preferred over usage of `scipy.misc` for dataset retrieval.
+- A new `scipy.interpolate.make_smoothing_spline` function was added. This
+ function constructs a smoothing cubic spline from noisy data, using the
+ generalized cross-validation (GCV) criterion to find the tradeoff between
+ smoothness and proximity to data points.
+- `scipy.stats` has three new distributions, two new hypothesis tests, three
+ new sample statistics, a class for greater control over calculations
+ involving covariance matrices, and many other enhancements.
+
+************
+New features
+************
+
+`scipy.datasets` introduction
+=============================
+- A new dedicated ``datasets`` submodule has been added. The submodules
+ is meant for datasets that are relevant to other SciPy submodules ands
+ content (tutorials, examples, tests), as well as contain a curated
+ set of datasets that are of wider interest. As of this release, all
+ the datasets from `scipy.misc` have been added to `scipy.datasets`
+ (and deprecated in `scipy.misc`).
+- The submodule is based on [Pooch](https://www.fatiando.org/pooch/latest/)
+ (a new optional dependency for SciPy), a Python package to simplify fetching
+ data files. This move will, in a subsequent release, facilitate SciPy
+ to trim down the sdist/wheel sizes, by decoupling the data files and
+ moving them out of the SciPy repository, hosting them externally and
+ downloading them when requested. After downloading the datasets once,
+ the files are cached to avoid network dependence and repeated usage.
+- Added datasets from ``scipy.misc``: `scipy.datasets.face`,
+ `scipy.datasets.ascent`, `scipy.datasets.electrocardiogram`
+- Added download and caching functionality:
+
+ - `scipy.datasets.download_all`: a function to download all the `scipy.datasets`
+ associated files at once.
+ - `scipy.datasets.clear_cache`: a simple utility function to clear cached dataset
+ files from the file system.
+ - ``scipy/datasets/_download_all.py`` can be run as a standalone script for
+ packaging purposes to avoid any external dependency at build or test time.
+ This can be used by SciPy packagers (e.g., for Linux distros) which may
+ have to adhere to rules that forbid downloading sources from external
+ repositories at package build time.
+
+`scipy.integrate` improvements
+==============================
+- Added parameter ``complex_func`` to `scipy.integrate.quad`, which can be set
+ ``True`` to integrate a complex integrand.
+
+
+`scipy.interpolate` improvements
+================================
+- `scipy.interpolate.interpn` now supports tensor-product interpolation methods
+ (``slinear``, ``cubic``, ``quintic`` and ``pchip``)
+- Tensor-product interpolation methods (``slinear``, ``cubic``, ``quintic`` and
+ ``pchip``) in `scipy.interpolate.interpn` and
+ `scipy.interpolate.RegularGridInterpolator` now allow values with trailing
+ dimensions.
+- `scipy.interpolate.RegularGridInterpolator` has a new fast path for
+ ``method="linear"`` with 2D data, and ``RegularGridInterpolator`` is now
+ easier to subclass
+- `scipy.interpolate.interp1d` now can take a single value for non-spline
+ methods.
+- A new ``extrapolate`` argument is available to `scipy.interpolate.BSpline.design_matrix`,
+ allowing extrapolation based on the first and last intervals.
+- A new function `scipy.interpolate.make_smoothing_spline` has been added. It is an
+ implementation of the generalized cross-validation spline smoothing
+ algorithm. The ``lam=None`` (default) mode of this function is a clean-room
+ reimplementation of the classic ``gcvspl.f`` Fortran algorithm for
+ constructing GCV splines.
+- A new ``method="pchip"`` mode was aded to
+ `scipy.interpolate.RegularGridInterpolator`. This mode constructs an
+ interpolator using tensor products of C1-continuous monotone splines
+ (essentially, a `scipy.interpolate.PchipInterpolator` instance per
+ dimension).
+
+
+
+`scipy.sparse.linalg` improvements
+==================================
+- The spectral 2-norm is now available in `scipy.sparse.linalg.norm`.
+- The performance of `scipy.sparse.linalg.norm` for the default case (Frobenius
+ norm) has been improved.
+- LAPACK wrappers were added for ``trexc`` and ``trsen``.
+- The `scipy.sparse.linalg.lobpcg` algorithm was rewritten, yielding
+ the following improvements:
+
+ - a simple tunable restart potentially increases the attainable
+ accuracy for edge cases,
+ - internal postprocessing runs one final exact Rayleigh-Ritz method
+ giving more accurate and orthonormal eigenvectors,
+ - output the computed iterate with the smallest max norm of the residual
+ and drop the history of subsequent iterations,
+ - remove the check for ``LinearOperator`` format input and thus allow
+ a simple function handle of a callable object as an input,
+ - better handling of common user errors with input data, rather
+ than letting the algorithm fail.
+
+
+`scipy.linalg` improvements
+===========================
+- `scipy.linalg.lu_factor` now accepts rectangular arrays instead of being restricted
+ to square arrays.
+
+
+`scipy.ndimage` improvements
+============================
+- The new `scipy.ndimage.value_indices` function provides a time-efficient method to
+ search for the locations of individual values with an array of image data.
+- A new ``radius`` argument is supported by `scipy.ndimage.gaussian_filter1d` and
+ `scipy.ndimage.gaussian_filter` for adjusting the kernel size of the filter.
+
+
+`scipy.optimize` improvements
+=============================
+- `scipy.optimize.brute` now coerces non-iterable/single-value ``args`` into a
+ tuple.
+- `scipy.optimize.least_squares` and `scipy.optimize.curve_fit` now accept
+ `scipy.optimize.Bounds` for bounds constraints.
+- Added a tutorial for `scipy.optimize.milp`.
+- Improved the pretty-printing of `scipy.optimize.OptimizeResult` objects.
+- Additional options (``parallel``, ``threads``, ``mip_rel_gap``) can now
+ be passed to `scipy.optimize.linprog` with ``method='highs'``.
+
+
+`scipy.signal` improvements
+===========================
+- The new window function `scipy.signal.windows.lanczos` was added to compute a
+ Lanczos window, also known as a sinc window.
+
+
+`scipy.sparse.csgraph` improvements
+===================================
+- the performance of `scipy.sparse.csgraph.dijkstra` has been improved, and
+ star graphs in particular see a marked performance improvement
+
+
+`scipy.special` improvements
+============================
+- The new function `scipy.special.powm1`, a ufunc with signature
+ ``powm1(x, y)``, computes ``x**y - 1``. The function avoids the loss of
+ precision that can result when ``y`` is close to 0 or when ``x`` is close to
+ 1.
+- `scipy.special.erfinv` is now more accurate as it leverages the Boost equivalent under
+ the hood.
+
+
+`scipy.stats` improvements
+==========================
+- Added `scipy.stats.goodness_of_fit`, a generalized goodness-of-fit test for
+ use with any univariate distribution, any combination of known and unknown
+ parameters, and several choices of test statistic (Kolmogorov-Smirnov,
+ Cramer-von Mises, and Anderson-Darling).
+- Improved `scipy.stats.bootstrap`: Default method ``'BCa'`` now supports
+ multi-sample statistics. Also, the bootstrap distribution is returned in the
+ result object, and the result object can be passed into the function as
+ parameter ``bootstrap_result`` to add additional resamples or change the
+ confidence interval level and type.
+- Added maximum spacing estimation to `scipy.stats.fit`.
+- Added the Poisson means test ("E-test") as `scipy.stats.poisson_means_test`.
+- Added new sample statistics.
+
+ - Added `scipy.stats.contingency.odds_ratio` to compute both the conditional
+ and unconditional odds ratios and corresponding confidence intervals for
+ 2x2 contingency tables.
+ - Added `scipy.stats.directional_stats` to compute sample statistics of
+ n-dimensional directional data.
+ - Added `scipy.stats.expectile`, which generalizes the expected value in the
+ same way as quantiles are a generalization of the median.
+
+- Added new statistical distributions.
+
+ - Added `scipy.stats.uniform_direction`, a multivariate distribution to
+ sample uniformly from the surface of a hypersphere.
+ - Added `scipy.stats.random_table`, a multivariate distribution to sample
+ uniformly from m x n contingency tables with provided marginals.
+ - Added `scipy.stats.truncpareto`, the truncated Pareto distribution.
+
+- Improved the ``fit`` method of several distributions.
+
+ - `scipy.stats.skewnorm` and `scipy.stats.weibull_min` now use an analytical
+ solution when ``method='mm'``, which also serves a starting guess to
+ improve the performance of ``method='mle'``.
+ - `scipy.stats.gumbel_r` and `scipy.stats.gumbel_l`: analytical maximum
+ likelihood estimates have been extended to the cases in which location or
+ scale are fixed by the user.
+ - Analytical maximum likelihood estimates have been added for
+ `scipy.stats.powerlaw`.
+
+- Improved random variate sampling of several distributions.
+
+ - Drawing multiple samples from `scipy.stats.matrix_normal`,
+ `scipy.stats.ortho_group`, `scipy.stats.special_ortho_group`, and
+ `scipy.stats.unitary_group` is faster.
+ - The ``rvs`` method of `scipy.stats.vonmises` now wraps to the interval
+ ``[-np.pi, np.pi]``.
+ - Improved the reliability of `scipy.stats.loggamma` ``rvs`` method for small
+ values of the shape parameter.
+
+- Improved the speed and/or accuracy of functions of several statistical
+ distributions.
+
+ - Added `scipy.stats.Covariance` for better speed, accuracy, and user control
+ in multivariate normal calculations.
+ - `scipy.stats.skewnorm` methods ``cdf``, ``sf``, ``ppf``, and ``isf``
+ methods now use the implementations from Boost, improving speed while
+ maintaining accuracy. The calculation of higher-order moments is also
+ faster and more accurate.
+ - `scipy.stats.invgauss` methods ``ppf`` and ``isf`` methods now use the
+ implementations from Boost, improving speed and accuracy.
+ - `scipy.stats.invweibull` methods ``sf`` and ``isf`` are more accurate for
+ small probability masses.
+ - `scipy.stats.nct` and `scipy.stats.ncx2` now rely on the implementations
+ from Boost, improving speed and accuracy.
+ - Implemented the ``logpdf`` method of `scipy.stats.vonmises` for reliability
+ in extreme tails.
+ - Implemented the ``isf`` method of `scipy.stats.levy` for speed and
+ accuracy.
+ - Improved the robustness of `scipy.stats.studentized_range` for large ``df``
+ by adding an infinite degree-of-freedom approximation.
+ - Added a parameter ``lower_limit`` to `scipy.stats.multivariate_normal`,
+ allowing the user to change the integration limit from -inf to a desired
+ value.
+ - Improved the robustness of ``entropy`` of `scipy.stats.vonmises` for large
+ concentration values.
+
+- Enhanced `scipy.stats.gaussian_kde`.
+
+ - Added `scipy.stats.gaussian_kde.marginal`, which returns the desired
+ marginal distribution of the original kernel density estimate distribution.
+ - The ``cdf`` method of `scipy.stats.gaussian_kde` now accepts a
+ ``lower_limit`` parameter for integrating the PDF over a rectangular region.
+ - Moved calculations for `scipy.stats.gaussian_kde.logpdf` to Cython,
+ improving speed.
+ - The global interpreter lock is released by the ``pdf`` method of
+ `scipy.stats.gaussian_kde` for improved multithreading performance.
+ - Replaced explicit matrix inversion with Cholesky decomposition for speed
+ and accuracy.
+
+- Enhanced the result objects returned by many `scipy.stats` functions
+
+ - Added a ``confidence_interval`` method to the result object returned by
+ `scipy.stats.ttest_1samp` and `scipy.stats.ttest_rel`.
+ - The `scipy.stats` functions ``combine_pvalues``, ``fisher_exact``,
+ ``chi2_contingency``, ``median_test`` and ``mood`` now return
+ bunch objects rather than plain tuples, allowing attributes to be
+ accessed by name.
+ - Attributes of the result objects returned by ``multiscale_graphcorr``,
+ ``anderson_ksamp``, ``binomtest``, ``crosstab``, ``pointbiserialr``,
+ ``spearmanr``, ``kendalltau``, and ``weightedtau`` have been renamed to
+ ``statistic`` and ``pvalue`` for consistency throughout `scipy.stats`.
+ Old attribute names are still allowed for backward compatibility.
+ - `scipy.stats.anderson` now returns the parameters of the fitted
+ distribution in a `scipy.stats._result_classes.FitResult` object.
+ - The ``plot`` method of `scipy.stats._result_classes.FitResult` now accepts
+ a ``plot_type`` parameter; the options are ``'hist'`` (histogram, default),
+ ``'qq'`` (Q-Q plot), ``'pp'`` (P-P plot), and ``'cdf'`` (empirical CDF
+ plot).
+ - Kolmogorov-Smirnov tests (e.g. `scipy.stats.kstest`) now return the
+ location (argmax) at which the statistic is calculated and the variant
+ of the statistic used.
+
+- Improved the performance of several `scipy.stats` functions.
+
+ - Improved the performance of `scipy.stats.cramervonmises_2samp` and
+ `scipy.stats.ks_2samp` with ``method='exact'``.
+ - Improved the performance of `scipy.stats.siegelslopes`.
+ - Improved the performance of `scipy.stats.mstats.hdquantile_sd`.
+ - Improved the performance of `scipy.stats.binned_statistic_dd` for several
+ NumPy statistics, and binned statistics methods now support complex data.
+
+- Added the ``scramble`` optional argument to `scipy.stats.qmc.LatinHypercube`.
+ It replaces ``centered``, which is now deprecated.
+- Added a parameter ``optimization`` to all `scipy.stats.qmc.QMCEngine`
+ subclasses to improve characteristics of the quasi-random variates.
+- Added tie correction to `scipy.stats.mood`.
+- Added tutorials for resampling methods in `scipy.stats`.
+- `scipy.stats.bootstrap`, `scipy.stats.permutation_test`, and
+ `scipy.stats.monte_carlo_test` now automatically detect whether the provided
+ ``statistic`` is vectorized, so passing the ``vectorized`` argument
+ explicitly is no longer required to take advantage of vectorized statistics.
+- Improved the speed of `scipy.stats.permutation_test` for permutation types
+ ``'samples'`` and ``'pairings'``.
+- Added ``axis``, ``nan_policy``, and masked array support to
+ `scipy.stats.jarque_bera`.
+- Added the ``nan_policy`` optional argument to `scipy.stats.rankdata`.
+
+
+*******************
+Deprecated features
+*******************
+- `scipy.misc` module and all the methods in ``misc`` are deprecated in v1.10
+ and will be completely removed in SciPy v2.0.0. Users are suggested to
+ utilize the `scipy.datasets` module instead for the dataset methods.
+- `scipy.stats.qmc.LatinHypercube` parameter ``centered`` has been deprecated.
+ It is replaced by the ``scramble`` argument for more consistency with other
+ QMC engines.
+- `scipy.interpolate.interp2d` class has been deprecated. The docstring of the
+ deprecated routine lists recommended replacements.
+
+********************
+Expired Deprecations
+********************
+- There is an ongoing effort to follow through on long-standing deprecations.
+- The following previously deprecated features are affected:
+
+ - Removed ``cond`` & ``rcond`` kwargs in ``linalg.pinv``
+ - Removed wrappers ``scipy.linalg.blas.{clapack, flapack}``
+ - Removed ``scipy.stats.NumericalInverseHermite`` and removed ``tol`` & ``max_intervals`` kwargs from ``scipy.stats.sampling.NumericalInverseHermite``
+ - Removed ``local_search_options`` kwarg frrom ``scipy.optimize.dual_annealing``.
+
+
+*************
+Other changes
+*************
+- `scipy.stats.bootstrap`, `scipy.stats.permutation_test`, and
+ `scipy.stats.monte_carlo_test` now automatically detect whether the provided
+ ``statistic`` is vectorized by looking for an ``axis`` parameter in the
+ signature of ``statistic``. If an ``axis`` parameter is present in
+ ``statistic`` but should not be relied on for vectorized calls, users must
+ pass option ``vectorized==False`` explicitly.
+- `scipy.stats.multivariate_normal` will now raise a ``ValueError`` when the
+ covariance matrix is not positive semidefinite, regardless of which method
+ is called.
+
+
+
+*******
+Authors
+*******
+
+* Name (commits)
+* h-vetinari (10)
+* Jelle Aalbers (1)
+* Oriol Abril-Pla (1) +
+* Alan-Hung (1) +
+* Tania Allard (7)
+* Oren Amsalem (1) +
+* Sven Baars (10)
+* Balthasar (1) +
+* Ross Barnowski (1)
+* Christoph Baumgarten (2)
+* Peter Bell (2)
+* Sebastian Berg (1)
+* Aaron Berk (1) +
+* boatwrong (1) +
+* boeleman (1) +
+* Jake Bowhay (50)
+* Matthew Brett (4)
+* Evgeni Burovski (93)
+* Matthias Bussonnier (6)
+* Dominic C (2)
+* Mingbo Cai (1) +
+* James Campbell (2) +
+* CJ Carey (4)
+* cesaregarza (1) +
+* charlie0389 (1) +
+* Hood Chatham (5)
+* Andrew Chin (1) +
+* Daniel Ching (1) +
+* Leo Chow (1) +
+* chris (3) +
+* John Clow (1) +
+* cm7S (1) +
+* cmgodwin (1) +
+* Christopher Cowden (2) +
+* Henry Cuzco (2) +
+* Anirudh Dagar (12)
+* Hans Dembinski (2) +
+* Jaiden di Lanzo (24) +
+* Felipe Dias (1) +
+* Dieter Werthmüller (1)
+* Giuseppe Dilillo (1) +
+* dpoerio (1) +
+* drpeteb (1) +
+* Christopher Dupuis (1) +
+* Jordan Edmunds (1) +
+* Pieter Eendebak (1) +
+* Jérome Eertmans (1) +
+* Fabian Egli (2) +
+* Sebastian Ehlert (2) +
+* Kian Eliasi (1) +
+* Tomohiro Endo (1) +
+* Stefan Endres (1)
+* Zeb Engberg (4) +
+* Jonas Eschle (1) +
+* Thomas J. Fan (9)
+* fiveseven (1) +
+* Neil Flood (1) +
+* Franz Forstmayr (1)
+* Sara Fridovich-Keil (1)
+* David Gilbertson (1) +
+* Ralf Gommers (251)
+* Marco Gorelli (2) +
+* Matt Haberland (387)
+* Andrew Hawryluk (2) +
+* Christoph Hohnerlein (2) +
+* Loïc Houpert (2) +
+* Shamus Husheer (1) +
+* ideasrule (1) +
+* imoiwm (1) +
+* Lakshaya Inani (1) +
+* Joseph T. Iosue (1)
+* iwbc-mzk (1) +
+* Nathan Jacobi (3) +
+* Julien Jerphanion (5)
+* He Jia (1)
+* jmkuebler (1) +
+* Johannes Müller (1) +
+* Vedant Jolly (1) +
+* Juan Luis Cano Rodríguez (2)
+* Justin (1) +
+* jvavrek (1) +
+* jyuv (2)
+* Kai Mühlbauer (1) +
+* Nikita Karetnikov (3) +
+* Reinert Huseby Karlsen (1) +
+* kaspar (2) +
+* Toshiki Kataoka (1)
+* Robert Kern (3)
+* Joshua Klein (1) +
+* Andrew Knyazev (7)
+* Jozsef Kutas (16) +
+* Eric Larson (4)
+* Lechnio (1) +
+* Antony Lee (2)
+* Aditya Limaye (1) +
+* Xingyu Liu (2)
+* Christian Lorentzen (4)
+* Loïc Estève (2)
+* Thibaut Lunet (2) +
+* Peter Lysakovski (1)
+* marianasalamoni (2) +
+* mariprudencio (1) +
+* Paige Martin (1) +
+* Arno Marty (1) +
+* matthewborish (3) +
+* Damon McDougall (1)
+* Nicholas McKibben (22)
+* McLP (1) +
+* mdmahendri (1) +
+* Melissa Weber Mendonça (9)
+* Jarrod Millman (1)
+* Naoto Mizuno (2)
+* Shashaank N (1)
+* Pablo S Naharro (1) +
+* nboudrie (2) +
+* Andrew Nelson (52)
+* Nico Schlömer (1)
+* NiMlr (1) +
+* o-alexandre-felipe (1) +
+* Maureen Ononiwu (1) +
+* Dimitri Papadopoulos (2) +
+* partev (1) +
+* Tirth Patel (10)
+* Paulius Šarka (1) +
+* Josef Perktold (1)
+* Giacomo Petrillo (3) +
+* Matti Picus (1)
+* Rafael Pinto (1) +
+* PKNaveen (1) +
+* Ilhan Polat (6)
+* Akshita Prasanth (2) +
+* Sean Quinn (1)
+* Tyler Reddy (155)
+* Martin Reinecke (1)
+* Ned Richards (1)
+* Marie Roald (1) +
+* Sam Rosen (4) +
+* Pamphile Roy (105)
+* sabonerune (2) +
+* Atsushi Sakai (94)
+* Daniel Schmitz (27)
+* Anna Scholtz (1) +
+* Eli Schwartz (11)
+* serge-sans-paille (2)
+* JEEVANSHI SHARMA (1) +
+* ehsan shirvanian (2) +
+* siddhantwahal (2)
+* Mathieu Dutour Sikiric (1) +
+* Sourav Singh (1)
+* Alexander Soare (1) +
+* Bjørge Solli (2) +
+* Scott Staniewicz (1)
+* Ethan Steinberg (3) +
+* Albert Steppi (3)
+* Thomas Stoeger (1) +
+* Kai Striega (4)
+* Tartopohm (1) +
+* Mamoru TASAKA (2) +
+* Ewout ter Hoeven (5)
+* TianyiQ (1) +
+* Tiger (1) +
+* Will Tirone (1)
+* Ajay Shanker Tripathi (1) +
+* Edgar Andrés Margffoy Tuay (1) +
+* Dmitry Ulyumdzhiev (1) +
+* Hari Vamsi (1) +
+* VitalyChait (1) +
+* Rik Voorhaar (1) +
+* Samuel Wallan (4)
+* Stefan van der Walt (2)
+* Warren Weckesser (145)
+* wei2222 (1) +
+* windows-server-2003 (3) +
+* Marek Wojciechowski (2) +
+* Niels Wouda (1) +
+* WRKampi (1) +
+* Yeonjoo Yoo (1) +
+* Rory Yorke (1)
+* Xiao Yuan (2) +
+* Meekail Zain (2) +
+* Fabio Zanini (1) +
+* Steffen Zeile (1) +
+* Egor Zemlyanoy (19)
+* Gavin Zhang (3) +
+
+A total of 184 people contributed to this release.
+People with a "+" by their names contributed a patch for the first time.
+This list of names is automatically generated, and may not be fully complete.
+
+
+************************
+Issues closed for 1.10.0
+************************
+
+* `#1261 `__: errors in fmin_bfgs and some improvements (Trac #734)
+* `#2167 `__: BivariateSpline errors with kx=ky=1 (Trac #1642)
+* `#2304 `__: funm gives incorrect results for non-diagonalizable inputs (Trac...
+* `#3421 `__: Rename information theory functions?
+* `#3854 `__: KroghInterpolator doesn't pass through points
+* `#4043 `__: scipy.interpolate.interp1d should be able to take a single value
+* `#4555 `__: leastsq should use cholesky not inv for hessian inversion
+* `#4598 `__: von Mises random variate sampling broken for non-zero location...
+* `#4975 `__: Documentation for s in UnivariateSpline is confusing
+* `#6173 `__: scipy.interpolate.lagrange implemented through coefficients
+* `#6688 `__: ENH: optimize.basinhopping: call an acceptance test before local...
+* `#7104 `__: scipy.stats.nct - wrong values in tails
+* `#7268 `__: scipy.sparse.linalg.norm does not implement spectral norm
+* `#7521 `__: scipy.UnivariateSpline smoothing condition documentation inaccuracy
+* `#7857 `__: griddata sensible to size of original grid when it should not
+* `#8376 `__: InterpolatedUnivariateSpline.roots() seems to miss roots sometimes
+* `#9119 `__: documentation issues of functions in scipy.stats.mstats
+* `#9389 `__: Kolmogorov Smirnov 2 samples returning max distance location...
+* `#9440 `__: Unexpected successful optimization with minimize when number...
+* `#9451 `__: Add shgo to optimize benchmarks
+* `#10737 `__: Goodness of fit tests for distributions with unknown parameters
+* `#10911 `__: scipy.optimize.minimize_scalar does not automatically select...
+* `#11026 `__: rv_discrete.interval returning wrong values for alpha = 1
+* `#11053 `__: scipy.stats: Allow specifying inverse-variance matrix to multivariate_normal
+* `#11131 `__: DOC: stats.fisher_exact does not match R functionality for \`oddsratio\`...
+* `#11406 `__: scipy.sparse.linalg.svds (v1.4.1) on singular matrix does not...
+* `#11475 `__: Filter radius as optional argument for gaussian_filter1d/gaussian_filter
+* `#11772 `__: Cache covariance matrix decomposition in frozen multivariate_normal
+* `#11777 `__: non-central chi2 (scipy.stats.ncx2.pdf) gets clipped to zero...
+* `#11790 `__: NaN handling of stats.rankdata
+* `#11860 `__: Occurrence of nan values when using multinomial.pmf from scipy.stats?
+* `#11916 `__: Improve documentation for smoothing in interpolate.UnivariateSpline...
+* `#12041 `__: Spherical mean/variance
+* `#12246 `__: Interpolation 2D with SmoothBivariateSpline
+* `#12621 `__: Scalar minimization functions have no references
+* `#12632 `__: curve_fit algorithm try to transform xdata in an array of floats
+* `#12963 `__: shgo is not correctly passing jac to minimizer
+* `#13021 `__: 2D Interpolation Scaling Issues
+* `#13049 `__: Examples missing import numpy as np?
+* `#13452 `__: Calling \`len()\` on the \`scipy.spatial.transform.rotation.Rotation\`...
+* `#13529 `__: signal.decimate doesn't use sosfilters and sosfiltfilt
+* `#14098 `__: DOC-Update for InterpolatedUnivariateSpline and LSQUnivariateSpline
+* `#14198 `__: better description of solveh_banded limitations
+* `#14348 `__: Extract spline coefficient from splprep: tck
+* `#14386 `__: Let CloughTocher2DInterpolator fit "nearest" for points outside...
+* `#14472 `__: scipy.interpolate.CubicSpline boundary conditions appear to be...
+* `#14533 `__: optimize.shgo gives unexpected TypeError
+* `#14541 `__: Raspberry Pi 4 aarch64: ModuleNotFoundError: No module named...
+* `#14584 `__: scipy.signal.filter_design.zpk2sos doctests fail (values different...
+* `#14809 `__: BUG: scipy.signal.periodogram window parameter
+* `#14853 `__: BUG: sqrtm dtype
+* `#14922 `__: Question: Seemingly unused, non-working script \`isolve/tests/demo_lgres.py\`
+* `#15049 `__: BUG: Visualization of CWT matrix in signal.cwt example code
+* `#15072 `__: BUG: signal.decimate returns NaN with large float32 arrays
+* `#15393 `__: BUG: signal.decimate returns unexpected values with float32 arrays
+* `#15473 `__: ENH: \`skewnorm.cdf\` is very slow. Consider a much more efficient...
+* `#15618 `__: ENH: Generation of random 2D tables with given marginal totals
+* `#15675 `__: ENH: \`multivariate_normal\` should accept eigendecomposition...
+* `#15685 `__: ENH: The exact p-value calculation in \`stats.cramervonmises_2samp\`...
+* `#15733 `__: DEP: remove quiet parameter from fitpack
+* `#15749 `__: DEP: remove tol from \`NumericalInverseHermite\`
+* `#15792 `__: MAINT: There is no unittest and documentation of Improper integral...
+* `#15807 `__: DEP: remove dual_annealing argument 'local_search_options'
+* `#15844 `__: It's not that obvious that \`firls\` requires an even number...
+* `#15883 `__: BUG: stats.bootstrap bca implementation triggers ValueError for...
+* `#15936 `__: Please add citations to the papers for COLAMD
+* `#15996 `__: Symbol hiding when using GNU linker in the Meson build should...
+* `#16148 `__: Documentation in spearmanr
+* `#16235 `__: BUG: Memory leak in function \`Py_FindObjects\` due to new reference...
+* `#16236 `__: BUG: Memory leak in function \`py_filter2d\` due to new reference...
+* `#16251 `__: DEP: Execute deprecation of scipy.linalg.blas.{clapack, flapack}
+* `#16252 `__: DEP: add deprecation warnings to kwargs \`turbo\` / \`eigvals\`...
+* `#16253 `__: DEP: add deprecation warning for kwargs \`nyq\` / \`Hz\` in firwin\*
+* `#16256 `__: DEP: add deprecation warning for binom_test
+* `#16272 `__: BUG: unclear error for invalid bracketing
+* `#16291 `__: BUG: lambertw returns nan's on small values
+* `#16297 `__: DOC: minor release procedure adjustment
+* `#16319 `__: ENH: improved accuracy and orthonormality of output eigenvectors...
+* `#16333 `__: DOC: rvalue description is missing in stats.probplot
+* `#16334 `__: BUG: CLI help is not accessible using light themes
+* `#16338 `__: ENH: Add option to clip out of bounds input values to minimum...
+* `#16342 `__: BUG: IIRdesign function ftype='bessel' not recognized
+* `#16344 `__: ENH: improved \`stats.ortho_group\`
+* `#16364 `__: ENH: stats: return bunches rather than plain tuples
+* `#16380 `__: BUG: RegularGridInterpolator error message is wrong
+* `#16386 `__: TST: sparse/linalg/tests/test_expm_multiply.py::test_expm_multiply_dtype...
+* `#16399 `__: \`test_mio.py::test_recarray\` failure due to dtype handling...
+* `#16413 `__: DOC: rvs method docstrings refer to seed argument instead of...
+* `#16433 `__: ENH: scipy.stats.bootstrap() should do BCa for multivariate statistics...
+* `#16472 `__: handle spline interpolation methods in \`interpn\`
+* `#16476 `__: dev.py does not propagate error codes, thus hides errors on CI
+* `#16490 `__: DOC: err on example for \`scipy.signal.upfirdn\`
+* `#16558 `__: BUG: leaves_color_list incorrect when distance=0
+* `#16580 `__: Typo in scipy/optimize/tests/test_optimize.py, logit instead...
+* `#16582 `__: TST: RegularGridInterpolator tests should be parameterised
+* `#16603 `__: ENH, DOC: Add policy on typo and small docs fixes
+* `#16663 `__: BUG: \`bool(rotation)\` leads to error
+* `#16673 `__: Test failure for \`TestPoisson.test_mindist\` in Azure CI job
+* `#16713 `__: BUG/DOC: spatial: docstrings of \`Rotation\` methods are missing...
+* `#16726 `__: CI: Python 3.11 tests are failing because a dependency is using...
+* `#16741 `__: BUG: DOC: editing docstring example in svds
+* `#16759 `__: DOC: Add 'import numpy as np' to the 'Examples' section of docstrings.
+* `#16763 `__: BUG: numpy version requirement mismatch docs vs setup.py
+* `#16773 `__: BUG: indexing error in scipy.spatial.Voronoi in 3D
+* `#16796 `__: DOC: Method "bisect" for root_scalar lacks correct argument list
+* `#16819 `__: BUG: stats.binned_statistic_2d is ~8x slower when using \`statistic=np.mean\`...
+* `#16833 `__: Runtime performance in BSpline.design_matrix is inferior to BSpline().__call__()
+* `#16892 `__: Add legend to \`rv_histogram\` plot in docs
+* `#16912 `__: MAINT: stats: optimize: Move \`_contains_nan\` function to more...
+* `#16914 `__: BUG: documentation of scipy.stats.truncnorm could be clearer
+* `#17031 `__: BUG: stats: Intermittent failure of the test 'test_plot_iv'
+* `#17033 `__: New CI failures in \`sparse\` with nightly numpy
+* `#17047 `__: BUG: Documentation error in scipy.signal
+* `#17056 `__: Mypy failure in CI for \`numpy/__init__.pyi\` positional-only...
+* `#17065 `__: BUG: minimize(method=’L-BFGS-B’) documentation is contradictory
+* `#17070 `__: Using Meson-built 1.10.0.dev0 nightly wheel in a conda environment...
+* `#17074 `__: BUG: scipy.optimize.linprog does not fulfill integer constraints...
+* `#17078 `__: DOC: "These are not universal functions" difficult to understand...
+* `#17089 `__: ENH: Documentation on test behind p-values of .spearmanr
+* `#17129 `__: DOC: inconsistency in when a new feature was added
+* `#17155 `__: BUG: stats: Bug in XSLOW tests in TestNumericalInverseHermite
+* `#17167 `__: BUG: bernoulli.pmf returns non-zero values with non-integer arguments
+* `#17168 `__: \`test_powm1\` failing in CI on Windows
+* `#17174 `__: MAINT, REL: wheels not uploaded to staging on push to maintenance
+* `#17241 `__: BUG: CubicSpline segfaults when passing empty values for \`y\`with...
+* `#17336 `__: BUG: Meson build unconditionally probes for pythran, despite...
+* `#17375 `__: BUG: resample_poly() freezes with large data and specific samplerate...
+* `#17380 `__: BUG: optimize: using \`integrality\` prevents \`linprog\` from...
+* `#17382 `__: BUG/DOC: optimize: \`minimize\` doc should reflect tnc's deprecation...
+* `#17412 `__: BUG: Meson error:compiler for language "cpp", not specified for...
+* `#17444 `__: BUG: beta.ppf causes segfault
+* `#17468 `__: Weird errors with running the tests \`scipy.stats.tests.test_distributions\`...
+* `#17518 `__: ENH: stats.pearsonr: support complex data
+* `#17523 `__: BUG: \`[source]\` button in the docs sending to the wrong place
+* `#17578 `__: TST, BLD, CI: 1.10.0rc1 wheel build/test failures
+* `#17619 `__: BUG: core dump when calling scipy.optimize.linprog
+* `#17644 `__: BUG: 1.10.0rc2 Windows wheel tests runs all segfault
+* `#17650 `__: BUG: Assertion failed when using HiGHS
+
+************************
+Pull requests for 1.10.0
+************************
+
+* `#9072 `__: ENH: Added rectangular integral to multivariate_normal
+* `#9932 `__: ENH: stats.gaussian_kde: add method that returns marginal distribution
+* `#11712 `__: BUG: trust-constr evaluates function out of bounds
+* `#12211 `__: DOC: Dice similiarity index
+* `#12312 `__: ENH: Accelerate matrix normal sampling using matmul
+* `#12594 `__: BUG: fixed indexing error when using bounds in Powell's method...
+* `#13053 `__: ENH: add MLE for stats.powerlaw.fit
+* `#13265 `__: ENH: Kstest exact performance improvements
+* `#13340 `__: ENH: stats: Add the function odds_ratio.
+* `#13663 `__: ENH: linalg: Add LAPACK wrappers for trexc and trsen.
+* `#13753 `__: DOC: optimize: update Powell docs to reflect API
+* `#13957 `__: ENH: stats.ks_2samp: Pythranize remaining exact p-value calculations
+* `#14248 `__: MAINT:linalg: Make lu_factor accept rectangular arrays
+* `#14317 `__: ENH: Optimize sparse frobenius norm
+* `#14402 `__: DOC: Clarify argument documentation for \`solve\`
+* `#14430 `__: ENH: improve siegelslopes via pythran
+* `#14563 `__: WIP: stats: bins=auto in docstrings
+* `#14579 `__: BENCH: optimize: add DFO CUTEST benchmark
+* `#14638 `__: DOC: added mention of the limitations of Thomas' algorithm
+* `#14840 `__: ENH: Addition of Poisson Means Test (E-test).
+* `#15097 `__: ENH: add radius to gaussian_filter1d and gaussian_filter
+* `#15444 `__: ENH: Infinite df approximation for Studentized Range PDF
+* `#15493 `__: ENH: Convert gaussian_kde logpdf to Cython
+* `#15607 `__: ENH: Add \`scipy.datasets\` submodule
+* `#15709 `__: ENH: improve the computation time of stats.cramervonmises_2samp()
+* `#15770 `__: ENH: stats: replace ncx2 stats distribution with Boost non_central_chi_squared
+* `#15878 `__: DEP: remove local_search_options of dual_annealing
+* `#15892 `__: BUG: stats: use mean behavior for percentileofscore in bootstrap
+* `#15901 `__: DEP: Deprecate scipy.misc in favour of scipy.datasets
+* `#15967 `__: TST/DOC: stats: explain/check 100% interval for discrete distributions
+* `#15972 `__: DOC: length of \`bands\` param. specified in \`firls\`
+* `#16002 `__: ENH: Allow specyfing inverse covariance of a multivariate normal...
+* `#16017 `__: ENH: special: Use boost for a couple ufuncs.
+* `#16069 `__: ENH: add additional MLE for fixed parameters in gumbel_r.fit
+* `#16096 `__: BUG: use SOS filters in decimate for numerical stability
+* `#16109 `__: ENH: add \`optimization\` to \`QMCEngine\`
+* `#16140 `__: ENH: stats: Add \`nan_policy\` optional argument for \`stats.rankdata\`
+* `#16224 `__: Add a \`pchip\` mode to RegularGridInterpolator.
+* `#16227 `__: BUG: special: Fix a couple issues with the 'double-double' code...
+* `#16238 `__: MAINT: stats: support string array for _contains_nan and add...
+* `#16268 `__: DOC: optimize: add marginals/slack example to \`linprog\`
+* `#16294 `__: BUG: linalg: Add precision preservation for \`sqrtm\`
+* `#16298 `__: REL: set version to 1.10.0.dev0
+* `#16299 `__: DEP: Execute deprecation of scipy.linalg.blas.{clapack, flapack}
+* `#16307 `__: DEP: add deprecation warning for binom_test
+* `#16315 `__: DEP: add deprecation warning for kwargs nyq / Hz in firwin
+* `#16317 `__: ENH: stats: add truncated (i.e. upper bounded) Pareto distribution...
+* `#16320 `__: ENH: improved accuracy and orthonormality of output eigenvectors...
+* `#16327 `__: DOC: BLD: remove \`-scipyopt\` from html Make command and build...
+* `#16328 `__: MAINT: retry openblas download in CI
+* `#16332 `__: BLD: ensure we get understandable messages when git submodules...
+* `#16335 `__: BLD: update NumPy to >=1.19.5
+* `#16336 `__: MAINT: forward port git scoping
+* `#16340 `__: DEP: remove tol & max_intervals from NumericalInverseHermite
+* `#16346 `__: DEV: add meson-python to environment.yml
+* `#16351 `__: Added "import numpy as np" statement to filter examples
+* `#16354 `__: DOC: optimize: remove callback doc from the options in \`_minimize_lbfgsb\`...
+* `#16355 `__: DEP: add deprecation warnings to kwargs turbo / eigvals of linalg.eigh
+* `#16356 `__: DOC: add examples to \`signal.medfilt2d\`
+* `#16357 `__: BENCH: Add SHGO and DIRECT to optimization benchmark
+* `#16362 `__: ENH: Provide more information when a value is out of bounds in...
+* `#16367 `__: BUG: unclear error for invalid bracketing
+* `#16371 `__: MAINT: remove last (already safe) usage of \`mktemp\`
+* `#16372 `__: MAINT: rename \`do.py\` to \`dev.py\`
+* `#16373 `__: DOC: added rvalue description in \`stats.probplot\`
+* `#16377 `__: ENH: stats.bootstrap: update warning to mention np.min
+* `#16383 `__: BUG: fix error message of RegularGridInterpolator
+* `#16387 `__: ENH: stats.combine_pvalues: convert output tuple to Bunch
+* `#16388 `__: DEP: deprecate \`stats.kendalltau\` kwarg \`initial_lexsort\`
+* `#16389 `__: DEP: sharpen stats deprecations
+* `#16392 `__: DEP: add warning to \`sparse.gmres\` deprecated kwarg \`restrt\`
+* `#16397 `__: MAINT: fix two refcounting issues in \`ndimage\`
+* `#16398 `__: MAINT: Replace find_common_types
+* `#16406 `__: MAINT: stats.rankdata: change default to nan_policy='propagate'
+* `#16407 `__: ENH: stats.fisher_exact: convert output tuple to Bunch
+* `#16411 `__: MAINT: optimize.brute should coerce non-tuple args to tuple
+* `#16415 `__: DOC: stats: fix seed -> random_state in \`rvs\` docstring
+* `#16423 `__: MAINT: stats: not using nested TypeErrors in _contains_nan
+* `#16424 `__: MAINT: future-proof \`stats.kde\` for changes in numpy casting...
+* `#16425 `__: DOC: Procedure adjustment in file doc/source/dev/core-dev/releasing.rst.inc
+* `#16428 `__: MAINT: fix up \`_sputils.get_index_dtype\` for NEP 50 casting...
+* `#16431 `__: CI: fix Gitpod build after dev.py update to the new CLI
+* `#16432 `__: Docstring fixes in lobpcg.py
+* `#16434 `__: DOC: stats.mstats.sen_seasonal_slopes: add docstring
+* `#16435 `__: ENH: directional mean
+* `#16438 `__: MAINT: remove unused \`DeprecatedImport\`
+* `#16439 `__: ENH: stats.chi2_contingency: convert output tuple to Bunch
+* `#16440 `__: ENH: stats.median_test: convert output tuple to Bunch
+* `#16441 `__: ENH: stats.mood: convert output tuple to Bunch
+* `#16442 `__: MAINT: fix issues with Python scalar related casting behavior...
+* `#16447 `__: BLD: make it easier to build with AddressSanitizer
+* `#16449 `__: ENH: improve scipy.interpolate.RegularGridInterpolator performance
+* `#16450 `__: BUG: Fix CLI Help in light themes
+* `#16454 `__: ENH: stats.bootstrap: return bootstrap distribution
+* `#16455 `__: ENH: stats.bootstrap: add BCa method for multi-sample statistic
+* `#16462 `__: CI: Update Python 3.8-dbg job to ubuntu-20.04
+* `#16463 `__: ENH: stats.jarque_bera: add axis, nan_policy, masked array support
+* `#16470 `__: DOC: stats.spearmanr: add information about p-value calculation
+* `#16471 `__: MAINT: interpolate/RGI: only call \`find_indices\` when needed
+* `#16474