diff --git a/.circleci/config.yml b/.circleci/config.yml index 83caf83010..519f7caf58 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -8,7 +8,7 @@ jobs: # 'machine' executor runs Unit tests ~x1.5 faster, comparing to 'docker' executor # but the fastest is still ~x1.5-2 slower, comparing to Travis machine: true - parallelism: 4 + parallelism: 3 working_directory: ~/st2 steps: - checkout @@ -43,8 +43,8 @@ jobs: # Run st2 Integration tests integration: docker: - - image: circleci/python:3.6 - - image: mongo:4.0 + - image: circleci/python:3.8 + - image: mongo:4.4 - image: rabbitmq:3 working_directory: ~/st2 steps: @@ -57,8 +57,9 @@ jobs: name: Install Mongo Shell command: | set -x - sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 9DA31620334BD75D9DCB49F368818C72E52529D4 - echo "deb http://repo.mongodb.org/apt/debian jessie/mongodb-org/4.0 main" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.0.list + sudo apt-get -qq -y install gnupg curl + curl -fsSL https://www.mongodb.org/static/pgp/server-4.4.asc | sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/mongodb-server-4.4.gpg + echo "deb [ arch=amd64 ] https://repo.mongodb.org/apt/ubuntu focal/mongodb-org/4.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-4.4.list sudo apt-get -qq -y update sudo apt-get -qq -y install mongodb-org-shell - run: @@ -79,8 +80,8 @@ jobs: # Run st2 Lint Checks lint: docker: - - image: circleci/python:3.6 - - image: mongo:4.0 + - image: circleci/python:3.8 + - image: mongo:4.4 - image: rabbitmq:3 working_directory: ~/st2 steps: @@ -107,16 +108,16 @@ jobs: # Build & Test st2 packages packages: - parallelism: 4 + parallelism: 3 # 4CPUs & 8GB RAM CircleCI machine # sadly, it doesn't work with 'setup_remote_docker' resource_class: large docker: # The primary container is an instance of the first list image listed. Your build commands run in this container. - - image: circleci/python:3.6 + - image: circleci/python:3.8 working_directory: ~/st2 environment: - - DISTROS: "bionic focal el7 el8" + - DISTROS: "focal el8 el9" - ST2_PACKAGES_REPO: https://github.com/StackStorm/st2-packages - ST2_PACKAGES: "st2" - ST2_CHECKOUT: 0 @@ -124,23 +125,40 @@ jobs: - BASH_ENV: ~/.buildenv steps: - checkout + - run: + name: Install latest Docker Compose V2 + command: | + set -x + export CODENAME=$(source /etc/os-release && echo "$VERSION_CODENAME") + export DISTRO=$(source /etc/os-release && echo "$ID") + export ARCH=$(dpkg --print-architecture) + # get gpg key for download.docker + curl -fsSL https://download.docker.com/linux/${DISTRO}/gpg | sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/download.docker.gpg + # set source list + sudo tee <<<"deb [arch=${ARCH}] https://download.docker.com/linux/${DISTRO} ${CODENAME} stable" /etc/apt/sources.list.d/download.docker.list + # update package list + sudo apt update + # install docker CLI and Docker Compose v2 + sudo apt install docker-ce-cli docker-compose-plugin - setup_remote_docker: reusable: true # default - false exclusive: true # default - true - version: 19.03.14 + version: docker24 - run: name: Docker version command: | set -x docker --version - docker-compose --version + docker compose version - run: name: Download st2-packages repository command: | set -x + PIP_VERSION=$(grep ^PIP_VERSION Makefile) git clone ${ST2_PACKAGES_REPO} ~/st2-packages cd ~/st2-packages git checkout ${CIRCLE_BRANCH} || true + sed -i -e "s/^PIP_VERSION .*$/${PIP_VERSION}/" packages/st2/debian/rules - run: name: Initialize packages Build Environment command: | @@ -167,7 +185,7 @@ jobs: name: Copy st2-packages files to build containers command: | # creating dummy container which will hold a volume with data files - docker create -v /root/st2-packages -v ${ST2_GITDIR} -v /root/build -v /var/log/st2 -v /root/.cache/pip -v /tmp/wheelhouse --name st2-packages-vol alpine:3.4 /bin/true + docker create -v /root/st2-packages -v ${ST2_GITDIR} -v /root/build -v /var/log/st2 -v /root/.cache/pip -v /tmp/wheelhouse --name st2-packages-vol alpine:3.12 /bin/true # copy st2-packages data files into this volume docker cp ~/st2-packages st2-packages-vol:/root # copy st2 source files into this volume @@ -222,7 +240,7 @@ jobs: - image: circleci/ruby:2.7 working_directory: /tmp/deploy environment: - - DISTROS: "bionic focal el7 el8" + - DISTROS: "focal el8 el9" steps: - attach_workspace: at: . diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000000..c79b9dd172 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,8 @@ +# See: https://www.mankier.com/5/gitattributes + +# lockfile merge conflicts: do not manually merge. +# The "-merge" makes git leave the current branch's lockfile as-is, like a binary file. +# To resolve the conflict, resolve any conflicts in requirements files, +# and then regenerste the lockfile with (resolve names are 'st2', 'black', etc): +# pants generate-lockfiles --resolve= +/lockfiles/*.lock -merge diff --git a/.github/actions/apt-packages/action.yaml b/.github/actions/apt-packages/action.yaml new file mode 100644 index 0000000000..db0695a9bc --- /dev/null +++ b/.github/actions/apt-packages/action.yaml @@ -0,0 +1,33 @@ +--- +name: Cache and Install APT Dependencies +description: + Light wrapper around the actions/cache action and our script + to maintain the input vars in only one place for all workflows. + +runs: + using: "composite" + steps: + - name: Cache APT Dependencies + id: cache-apt-deps + uses: actions/cache@v4 + with: + path: | + ~/apt_cache + key: ${{ runner.os }}-v8-apt-${{ hashFiles('scripts/github/apt-packages.txt') }} + restore-keys: | + ${{ runner.os }}-v8-apt- + + - name: Install APT Depedencies + shell: bash + env: + CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} + run: | + # install dev dependencies for Python YAML and LDAP packages + # https://github.com/StackStorm/st2-auth-ldap + ./scripts/github/install-apt-packages-use-cache.sh + + - name: Install Mongo Shell + shell: bash + run: | + # Required as part of switch to GHA image ubuntu-22.04 + ./scripts/github/install-mongosh.sh diff --git a/.github/actions/init-pants/action.yaml b/.github/actions/init-pants/action.yaml new file mode 100644 index 0000000000..5e92d3b0f3 --- /dev/null +++ b/.github/actions/init-pants/action.yaml @@ -0,0 +1,38 @@ +--- +name: Initialize Pants and its GHA caches +description: + Light wrapper around the pantsbuild/actions/init-pants action + to maintain the input vars in only one place for all workflows. + +inputs: + gha-cache-key: + description: Qualify all cache keys with this string. Useful for invalidating everything. + required: true + +runs: + using: "composite" + steps: + - name: Initialize Pants and its GHA caches + uses: pantsbuild/actions/init-pants@v10 + # This action adds an env var to make pants use both pants.ci.toml & pants.toml. + # This action also creates 3 GHA caches (1 is optional). + # - `pants-setup` has the bootsrapped pants install + # - `pants-named-caches` has pip/wheel and PEX caches + # - `pants-lmdb-store` has the fine-grained process cache. + # If we ever use a remote cache, then we can drop this. + # Otherwise, we may need an additional workflow or job to delete old caches + # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + with: + base-branch: master + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: ${{ inputs.gha-cache-key }} + # This hash should include all of our lockfiles so that the pip/pex caches + # get invalidated on any transitive dependency update. + named-caches-hash: ${{ hashFiles('lockfiles/*.lock') }} + # enable the optional lmdb_store cache since we can't rely on remote caching yet. + cache-lmdb-store: 'true' + # Try the experimental support for using GHA cache as a pants remote cache. + experimental-remote-cache-via-gha: 'true' + # install whatever version of python we need for our in-repo pants-plugins + setup-python-for-plugins: 'true' + diff --git a/.github/actions/setup-python/action.yaml b/.github/actions/setup-python/action.yaml new file mode 100644 index 0000000000..551cf6736d --- /dev/null +++ b/.github/actions/setup-python/action.yaml @@ -0,0 +1,34 @@ +--- +name: Install Python and Cache Deps +description: + Light wrapper around the actions/setup-python and actions/cache actions + to maintain the input vars in only one place for all workflows. + +inputs: + python-version: + description: Which version of python to install. + required: true + +runs: + using: "composite" + steps: + - name: 'Set up Python (${{ inputs.python-version }})' + uses: actions/setup-python@v5 + with: + python-version: '${{ inputs.python-version }}' + + - name: Cache Python Dependencies + uses: actions/cache@v4 + with: + path: | + ~/.cache/pip + virtualenv + ~/virtualenv + # TODO: maybe make the virtualenv a partial cache to exclude st2*? + # !virtualenv/lib/python*/site-packages/st2* + # !virtualenv/bin/st2* + key: ${{ runner.os }}-v5-python-${{ inputs.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt', 'lockfiles/*.lock') }} + # Don't use alternative key as if requirements.txt has altered we + # don't want to retrieve previous cache + #restore-keys: | + # ${{ runner.os }}-v5-python-${{ inputs.python }}- diff --git a/.github/workflows/checks.yaml b/.github/workflows/checks.yaml index d7ceec8528..23ae018965 100644 --- a/.github/workflows/checks.yaml +++ b/.github/workflows/checks.yaml @@ -12,9 +12,9 @@ jobs: # See: https://keepachangelog.com/en/1.0.0/ changelog-checker: name: Add CHANGELOG.rst - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v4 - name: Changelog check # https://github.com/marketplace/actions/changelog-checker uses: Zomzog/changelog-checker@v1.2.0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 3087324189..02a9cdd5b0 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -11,7 +11,7 @@ on: # also version tags, which include bugfix releases (eg: v3.4.0) - v[0-9]+.[0-9]+.[0-9]+ pull_request: - type: [opened, reopened, edited] + types: [opened, reopened, synchronize] branches: # Only for PRs targeting those branches - master @@ -28,7 +28,7 @@ jobs: # same file set which has already passed, etc. pre_job: name: Skip Duplicate Jobs Pre Job - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} steps: @@ -44,8 +44,8 @@ jobs: # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-20.04 + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: @@ -53,77 +53,32 @@ jobs: # each job runs subset of tests. # NOTE: We need to use full Python version as part of Python deps cache key otherwise # setup virtualenv step will fail. - include: + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + make: - name: 'Lint Checks (black, flake8, etc.)' task: 'ci-checks' - python-version-short: '3.6' - python-version: '3.6.13' - name: 'Compile (pip deps, pylint, etc.)' task: 'ci-compile' - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Lint Checks (black, flake8, etc.)' - task: 'ci-checks' - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Compile (pip deps, pylint, etc.)' - task: 'ci-compile' - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Lint Checks (black, flake8, etc.)' - task: 'ci-checks' - python-version-short: '3.9' - python-version: '3.9.14' - - name: 'Compile (pip deps, pylint, etc.)' - task: 'ci-compile' - python-version-short: '3.9' - python-version: '3.9.14' - env: - TASK: '${{ matrix.task }}' + TASK: '${{ matrix.make.task }}' COLUMNS: '120' PYLINT_CONCURRENCY: '6' steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v4-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v4-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v7- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -146,22 +101,17 @@ jobs: self-check: needs: pre_job - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-20.04 + # FIXME: dropping the repetition in this name requires changing required checks on GitHub + name: 'Self-check on Python ${{ matrix.python.version-short }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: - include: - # TODO: Check if we want to fix the errors on Py 3.6 to have it tested as well - #- name: 'Self-check on Python 3.6' - # python-version-short: '3.6' - # python-version: '3.6.13' - - name: 'Self-check on Python 3.8' - python-version-short: '3.8' - python-version: '3.8.14' + python: + - {version-short: '3.8', version: '3.8.18'} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -174,6 +124,18 @@ jobs: - 5672:5672/tcp # AMQP standard port - 15672:15672/tcp # Management: HTTP, CLI + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp env: # CI st2.conf (with ST2_CI_USER user instead of stanley) ST2_CONF: 'conf/st2.ci.conf' @@ -188,55 +150,22 @@ jobs: TESTS_TO_SKIP: "tests.test_quickstart_rules tests.test_run_pack_tests_tool" steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v3-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - restore-keys: | - ${{ runner.os }}-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 - with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v5-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v5- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - cat /etc/environment - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh - name: Install requirements run: | ./scripts/ci/install-requirements.sh - - name: Run Redis Service Container - timeout-minutes: 2 - run: | - docker run --rm --detach -p 127.0.0.1:6379:6379/tcp --name redis redis:latest - until [ "$(docker inspect -f {{.State.Running}} redis)" == "true" ]; do sleep 0.1; done - name: Setup Tests run: | # prep a ci-specific dev conf file that uses runner instead of stanley @@ -265,13 +194,21 @@ jobs: run: | script -e -c "make .ci-prepare-integration" && exit 0 - name: Extend the path for upcoming tasks - run: | - echo ${HOME}/work/st2/st2/virtualenv/bin - echo ${HOME}/work/st2/st2/virtualenv/bin >> $GITHUB_PATH + # pants uses PEP 660 editable wheels to add our code to the virtualenv. + # But PEP 660 editable wheels do not include 'scripts'. + # https://peps.python.org/pep-0660/#limitations + # So, we need to include each bin dir in PATH instead of virtualenv/bin. + run: | + for component_bin in ${GITHUB_WORKSPACE}/st2*/bin; do + echo ${component_bin} | tee -a $GITHUB_PATH + done + echo ${GITHUB_WORKSPACE}/virtualenv/bin | tee -a $GITHUB_PATH - name: Create symlinks to find the binaries when running st2 actions + # st2 is actually a console_script entry point, not just a 'script' + # so it IS included in the virtualenv. But, st2-run-pack-tests might not be included. run: | - ln -s ${HOME}/work/st2/st2/virtualenv/bin/st2 /usr/local/bin/st2 - ln -s ${HOME}/work/st2/st2/virtualenv/bin/st2-run-pack-tests /usr/local/bin/st2-run-pack-tests + ln -s ${GITHUB_WORKSPACE}/virtualenv/bin/st2 /usr/local/bin/st2 + ln -s ${GITHUB_WORKSPACE}/st2common/bin/st2-run-pack-tests /usr/local/bin/st2-run-pack-tests - name: Install st2client timeout-minutes: 5 run: | @@ -282,21 +219,19 @@ jobs: env: ST2_CONF: /home/runner/work/st2/st2/conf/st2.ci.conf run: | - sudo -E ST2_AUTH_TOKEN=$(st2 auth testu -p 'testp' -t) PATH=${PATH} virtualenv/bin/st2-self-check + sudo -E ST2_AUTH_TOKEN=$(st2 auth testu -p 'testp' -t) PATH=${PATH} st2common/bin/st2-self-check - name: Compress Service Logs Before upload if: ${{ failure() }} run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs tar cvzpf logs.tar.gz logs/* - name: Upload StackStorm services Logs if: ${{ failure() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: logs + name: logs-py${{ matrix.python.version }} path: logs.tar.gz retention-days: 7 - - name: Stop Redis Service Container - if: "${{ always() }}" - run: docker rm --force redis || true unit-tests: needs: pre_job @@ -304,61 +239,48 @@ jobs: # coverage, etc) # NB: disabled. See TODO above pre_job # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-20.04 + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: To speed the CI run, we split unit and integration tests into multiple jobs where # each job runs subset of tests. - include: - - name: 'Unit Tests (chunk 1)' - task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Unit Tests (chunk 2)' - task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Unit Tests (chunk 1)' - task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Unit Tests (chunk 2)' - task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.8' - python-version: '3.8.10' + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + make: - name: 'Unit Tests (chunk 1)' task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.9' - python-version: '3.9.14' + shard: {k: 0, n: 2} - name: 'Unit Tests (chunk 2)' task: 'ci-unit' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.9' - python-version: '3.9.14' + shard: {k: 1, n: 2} + # This job is slow so we only run in on a daily basis # - name: 'Micro Benchmarks' # task: 'micro-benchmarks' - # python-version: '3.6.13' - # nosetests_node_total: 1 - # nosetests_node_ index: 0 + # shard: {k: 0, n: 1} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + rabbitmq: image: rabbitmq:3.8-management @@ -370,10 +292,10 @@ jobs: - 15672:15672/tcp # Management: HTTP, CLI env: - TASK: '${{ matrix.task }}' - PYTHON_VERSION_SHORT: '${{ matrix.python-version-short }}' - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + TASK: '${{ matrix.make.task }}' + PYTHON_VERSION_SHORT: '${{ matrix.python.version-short }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. @@ -395,45 +317,16 @@ jobs: PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v4-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v4-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v5-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v5- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -489,73 +382,33 @@ jobs: # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-20.04 + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: To speed the CI run, we split unit and integration tests into multiple jobs where # each job runs subset of tests. - include: + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + make: # We run pack tests here since they rely on some integration tests set # up (aka stanley user being present, etc.) - name: 'Pack Tests' task: 'ci-packs-tests' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Integration Tests (chunk 1)' - task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Integration Tests (chunk 2)' - task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Pack Tests' - task: 'ci-packs-tests' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Integration Tests (chunk 1)' - task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Integration Tests (chunk 2)' - task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Pack Tests' - task: 'ci-packs-tests' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.9' - python-version: '3.9.14' + shard: {k: 0, n: 1} - name: 'Integration Tests (chunk 1)' task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 0 - python-version-short: '3.9' - python-version: '3.9.14' + shard: {k: 0, n: 2} - name: 'Integration Tests (chunk 2)' task: 'ci-integration' - nosetests_node_total: 2 - nosetests_node_index: 1 - python-version-short: '3.9' - python-version: '3.9.14' + shard: {k: 1, n: 2} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -590,27 +443,24 @@ jobs: #- 4369:4369/tcp # epmd # - # Used for the coordination backend for integration tests - # NOTE: To speed things up, we only start redis for integration tests - # where it's needed - # redis: - # # Docker Hub image - # image: redis - # # Set health checks to wait until redis has started - # options: >- - # --name "redis" - # --health-cmd "redis-cli ping" - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 6379:6379/tcp + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp env: - TASK: '${{ matrix.task }}' - PYTHON_VERSION_SHORT: '${{ matrix.python-version-short }}' - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + TASK: '${{ matrix.make.task }}' + PYTHON_VERSION_SHORT: '${{ matrix.python.version-short }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. @@ -633,45 +483,16 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v4-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v4-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v5-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v5- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -686,11 +507,6 @@ jobs: cp conf/st2.dev.conf "${ST2_CONF}" ; sed -i -e "s/stanley/${ST2_CI_USER}/" "${ST2_CONF}" sudo -E ./scripts/ci/add-itest-user-key.sh - - name: Run Redis Service Container - timeout-minutes: 2 - run: | - docker run --rm --detach -p 127.0.0.1:6379:6379/tcp --name redis redis:latest - until [ "$(docker inspect -f {{.State.Running}} redis)" == "true" ]; do sleep 0.1; done - name: Permissions Workaround run: | echo "$ST2_CI_REPO_PATH" @@ -707,7 +523,6 @@ jobs: ./scripts/ci/print-versions.sh - name: make - if: "${{ env.TASK == 'ci-integration' }}" #timeout-minutes: 7 # TODO: Use dynamic timeout value based on the branch - for master we # need to use timeout x2 due to coverage overhead @@ -725,26 +540,25 @@ jobs: - name: Compress Service Logs Before upload if: ${{ failure() && env.TASK == 'ci-integration' }} run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs tar cvzpf logs.tar.gz logs/* - name: Upload StackStorm services Logs if: ${{ failure() && env.TASK == 'ci-integration' }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: logs + name: logs-py${{ matrix.python.version }}-pytest-${{ matrix.make.shard.k }} path: logs.tar.gz retention-days: 7 - - name: Stop Redis Service Container - if: "${{ always() }}" - run: docker rm --force redis || true slack-notification: name: Slack notification for failed master builds if: always() needs: - lint-checks + - self-check - unit-tests - integration-tests - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Workflow conclusion # this step creates an environment variable WORKFLOW_CONCLUSION and is the most reliable way to check the status of previous jobs diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml index 5e338f26ed..7ab8b17f8d 100644 --- a/.github/workflows/lint.yaml +++ b/.github/workflows/lint.yaml @@ -13,7 +13,7 @@ on: # also version tags, which include bugfix releases (eg: v3.4.0) - v[0-9]+.[0-9]+.[0-9]+ pull_request: - type: [opened, reopened, edited] + types: [opened, reopened, synchronize] branches: # Only for PRs targeting those branches - master @@ -26,63 +26,63 @@ jobs: # Lint checks which don't depend on any service containes, etc. to be running. lint-checks: name: 'Lint Checks (pants runs: shellcheck, bandit, black, flake8, pylint)' - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 env: COLUMNS: '120' steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: # a test uses a submodule, and pants needs access to it to calculate deps. submodules: 'true' - #- name: Cache APT Dependencies - # id: cache-apt-deps - # uses: actions/cache@v2 - # with: - # path: | - # ~/apt_cache - # key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - # restore-keys: | - # ${{ runner.os }}-apt-v7- - - name: Install APT Depedencies - env: - CACHE_HIT: 'false' # cache doesn't work - #CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Initialize Pants and its GHA caches - uses: pantsbuild/actions/init-pants@v2 - # This action adds an env var to make pants use both pants.ci.toml & pants.toml. - # This action also creates 3 GHA caches (1 is optional). - # - `pants-setup` has the bootsrapped pants install - # - `pants-named-caches` has pip/wheel and PEX caches - # - `pants-lmdb-store` has the fine-grained process cache. - # If we ever use a remote cache, then we can drop this. - # Otherwise, we may need an additional workflow or job to delete old caches - # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + uses: ./.github/actions/init-pants with: - base-branch: master # To ignore a bad cache, bump the cache* integer. gha-cache-key: cache0 - # This hash should include all of our lockfiles so that the pip/pex caches - # get invalidated on any transitive dependency update. - named-caches-hash: ${{ hashFiles('requirements.txt') }} - # enable the optional lmdb_store cache since we're not using remote caching. - cache-lmdb-store: 'true' - name: Lint run: | - ./pants lint :: + pants lint :: - name: Upload pants log - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: pants-log-py${{ matrix.python-version }} path: .pants.d/pants.log if: always() # We want the log even on failures. + + set_merge_ok: + name: Set Merge OK (Lint) + if: always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') + needs: + - lint-checks + outputs: + merge_ok: ${{ steps.set_merge_ok.outputs.merge_ok }} + runs-on: ubuntu-latest + steps: + - id: set_merge_ok + run: echo 'merge_ok=true' >> ${GITHUB_OUTPUT} + + merge_ok: + name: Merge OK (Lint) + if: always() + needs: + - set_merge_ok + runs-on: ubuntu-latest + steps: + - run: | + merge_ok="${{ needs.set_merge_ok.outputs.merge_ok }}" + if [[ "${merge_ok}" == "true" ]]; then + echo "Merge OK" + exit 0 + else + echo "Merge NOT OK" + exit 1 + fi diff --git a/.github/workflows/microbenchmarks.yaml b/.github/workflows/microbenchmarks.yaml index 9477b256f8..e150189ec2 100644 --- a/.github/workflows/microbenchmarks.yaml +++ b/.github/workflows/microbenchmarks.yaml @@ -5,13 +5,23 @@ name: Micro Benchmarks on: schedule: - cron: '30 3 * * *' + pull_request: + types: [opened, reopened, synchronize] + branches: + # Only for PRs targeting those branches + - master + - v[0-9]+.[0-9]+ + paths: + # Only for PRs that touch the benchmarks + - .github/workflows/microbenchmarks.yaml + - st2common/benchmarks/** jobs: # Special job which automatically cancels old runs for the same branch, prevents runs for the # same file set which has already passed, etc. pre_job: name: Skip Duplicate Jobs Pre Job - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} steps: @@ -26,35 +36,25 @@ jobs: # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-20.04 + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: We need to use full Python version as part of Python deps cache key otherwise # setup virtualenv step will fail. - include: + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + make: - name: 'Microbenchmarks' task: 'micro-benchmarks' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Microbenchmarks' - task: 'micro-benchmarks' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Microbenchmarks' - task: 'micro-benchmarks' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.9' - python-version: '3.9.14' + shard: {k: 0, n: 1} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -68,10 +68,10 @@ jobs: - 15672:15672/tcp # Management: HTTP, CLI env: - TASK: '${{ matrix.task }}' + TASK: '${{ matrix.make.task }}' - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' COLUMNS: '120' ST2_CI: 'true' @@ -80,37 +80,13 @@ jobs: PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin steps: - name: Checkout repository - uses: actions/checkout@v2 - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 + uses: actions/checkout@v4 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 - with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - key: ${{ runner.os }}-v4-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v4-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 - with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v7- - - name: Install APT Dependencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -126,9 +102,9 @@ jobs: run: | script -e -c "make ${TASK}" && exit 0 - name: Upload Histograms - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: benchmark_histograms + name: benchmark_histograms-py${{ matrix.python.version }} path: benchmark_histograms/ retention-days: 30 @@ -137,7 +113,7 @@ jobs: if: always() needs: - micro-benchmarks - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Workflow conclusion # this step creates an environment variable WORKFLOW_CONCLUSION and is the most reliable way to check the status of previous jobs diff --git a/.github/workflows/orquesta-integration-tests.yaml b/.github/workflows/orquesta-integration-tests.yaml index b45dd5fb84..591f8a6cb0 100644 --- a/.github/workflows/orquesta-integration-tests.yaml +++ b/.github/workflows/orquesta-integration-tests.yaml @@ -15,7 +15,7 @@ on: # also version tags, which include bugfix releases (eg: v3.4.0) - v[0-9]+.[0-9]+.[0-9]+ pull_request: - type: [opened, reopened, edited] + types: [opened, reopened, synchronize] branches: # Only for PRs targeting those branches - master @@ -32,7 +32,7 @@ jobs: # same file set which has already passed, etc. pre_job: name: Skip Duplicate Jobs Pre Job - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 outputs: should_skip: ${{ steps.skip_check.outputs.should_skip }} steps: @@ -47,35 +47,25 @@ jobs: # NOTE: We always want to run job on master since we run some additional checks there (code # coverage, etc) # if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.ref == 'refs/heads/master' }} - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-20.04 + name: '${{ matrix.make.name }} - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: We need to use full Python version as part of Python deps cache key otherwise # setup virtualenv step will fail. - include: + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + make: - name: 'Integration Tests (Orquesta)' task: 'ci-orquesta' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version: '3.6.13' - python-version-short: '3.6' - - name: 'Integration Tests (Orquesta)' - task: 'ci-orquesta' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Integration Tests (Orquesta)' - task: 'ci-orquesta' - nosetests_node_total: 1 - nosetests_node_index: 0 - python-version-short: '3.9' - python-version: '3.9.14' + shard: {k: 0, n: 1} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -88,26 +78,23 @@ jobs: - 5672:5672/tcp # AMQP standard port - 15672:15672/tcp # Management: HTTP, CLI - # Used for the coordination backend for integration tests - # NOTE: To speed things up, we only start redis for integration tests - # where it's needed - # redis: - # # Docker Hub image - # image: redis - # # Set health checks to wait until redis has started - # options: >- - # --name "redis" - # --health-cmd "redis-cli ping" - # --health-interval 10s - # --health-timeout 5s - # --health-retries 5 - # ports: - # - 6379:6379/tcp + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp env: - TASK: '${{ matrix.task }}' - NODE_TOTAL: '${{ matrix.nosetests_node_total }}' - NODE_INDEX: '${{ matrix.nosetests_node_index }}' + TASK: '${{ matrix.make.task }}' + NODE_TOTAL: '${{ matrix.make.shard.n }}' + NODE_INDEX: '${{ matrix.make.shard.k }}' # We need to explicitly specify terminal width otherwise some CLI tests fail on container # environments where small terminal size is used. @@ -129,45 +116,16 @@ jobs: PATH: /home/runner/.local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Custom Environment Setup run: | ./scripts/github/setup-environment.sh - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 - with: - python-version: '${{ matrix.python-version }}' - - name: Cache Python Dependencies - uses: actions/cache@v2 + - name: 'Set up Python (${{ matrix.python.version }}) and Cache Deps' + uses: ./.github/actions/setup-python with: - path: | - ~/.cache/pip - virtualenv - ~/virtualenv - # TODO: maybe make the virtualenv a partial cache to exclude st2*? - # !virtualenv/lib/python*/site-packages/st2* - # !virtualenv/bin/st2* - key: ${{ runner.os }}-v4-python-${{ matrix.python-version }}-${{ hashFiles('requirements.txt', 'test-requirements.txt') }} - # Don't use alternative key as if requirements.txt has altered we - # don't want to retrieve previous cache - #restore-keys: | - # ${{ runner.os }}-v4-python-${{ matrix.python }}- - - name: Cache APT Dependencies - id: cache-apt-deps - uses: actions/cache@v2 - with: - path: | - ~/apt_cache - key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - restore-keys: | - ${{ runner.os }}-apt-v7- - - name: Install APT Depedencies - env: - CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} - run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Install virtualenv run: | ./scripts/github/install-virtualenv.sh @@ -182,11 +140,6 @@ jobs: cp conf/st2.dev.conf "${ST2_CONF}" ; sed -i -e "s/stanley/${ST2_CI_USER}/" "${ST2_CONF}" sudo -E ./scripts/ci/add-itest-user-key.sh - - name: Run Redis Service Container - timeout-minutes: 2 - run: | - docker run --rm --detach -p 127.0.0.1:6379:6379/tcp --name redis redis:latest - until [ "$(docker inspect -f {{.State.Running}} redis)" == "true" ]; do sleep 0.1; done - name: Permissions Workaround run: | echo "$ST2_CI_REPO_PATH" @@ -195,7 +148,7 @@ jobs: run: | ./scripts/ci/print-versions.sh - name: make - timeout-minutes: 31 + timeout-minutes: 41 env: MAX_ATTEMPTS: 3 RETRY_DELAY: 5 @@ -217,33 +170,25 @@ jobs: set -e echo "Failed after ${MAX_ATTEMPTS} attempts, failing the job." exit 1 - - name: Upload StackStorm services Logs - #if: ${{ failure() }} - uses: actions/upload-artifact@v2 - with: - name: logs - path: logs/ - name: Compress Service Logs Before upload if: ${{ failure() }} run: | + ./tools/launchdev.sh stop # stop st2 before collecting logs tar cvzpf logs.tar.gz logs/* - name: Upload StackStorm services Logs if: ${{ failure() }} - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: logs + name: logs-py${{ matrix.python.version }} path: logs.tar.gz retention-days: 7 - - name: Stop Redis Service Container - if: "${{ always() }}" - run: docker rm --force redis || true slack-notification: name: Slack notification for failed master builds if: always() needs: - integration-tests - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Workflow conclusion # this step creates an environment variable WORKFLOW_CONCLUSION and is the most reliable way to check the status of previous jobs diff --git a/.github/workflows/pants.yaml b/.github/workflows/pants.yaml index 64fddfbc77..94a60df45e 100644 --- a/.github/workflows/pants.yaml +++ b/.github/workflows/pants.yaml @@ -12,7 +12,7 @@ on: # also version tags, which include bugfix releases (eg: v3.4.0) - v[0-9]+.[0-9]+.[0-9]+ pull_request: - type: [opened, reopened, edited] + types: [opened, reopened, synchronize] branches: # Only for PRs targeting those branches - master @@ -21,40 +21,26 @@ on: jobs: pants-tailor: name: Make sure pants BUILD files are up-to-date - runs-on: ubuntu-20.04 + runs-on: ubuntu-22.04 steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: # a test uses a submodule, and pants needs access to it to calculate deps. submodules: 'true' - name: Initialize Pants and its GHA caches - uses: pantsbuild/actions/init-pants@v2 - # This action adds an env var to make pants use both pants.ci.toml & pants.toml. - # This action also creates 3 GHA caches (1 is optional). - # - `pants-setup` has the bootsrapped pants install - # - `pants-named-caches` has pip/wheel and PEX caches - # - `pants-lmdb-store` has the fine-grained process cache. - # If we ever use a remote cache, then we can drop this. - # Otherwise, we may need an additional workflow or job to delete old caches - # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. + uses: ./.github/actions/init-pants with: - base-branch: master # To ignore a bad cache, bump the cache* integer. gha-cache-key: cache0-BUILD - # This hash should include all of our lockfiles so that the pip/pex caches - # get invalidated on any transitive dependency update. - named-caches-hash: ${{ hashFiles('requirements.txt') }} - # enable the optional lmdb_store cache since we're not using remote caching. - cache-lmdb-store: 'true' - name: Check BUILD files run: | - ./pants tailor --check update-build-files --check :: + pants tailor --check update-build-files --check :: - name: Upload pants log - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: name: pants-log-py${{ matrix.python-version }} path: .pants.d/pants.log diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 172c0cd64d..dd6e0b2e26 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -13,7 +13,7 @@ on: # also version tags, which include bugfix releases (eg: v3.4.0) - v[0-9]+.[0-9]+.[0-9]+ pull_request: - type: [opened, reopened, edited] + types: [opened, reopened, synchronize] branches: # Only for PRs targeting those branches - master @@ -22,29 +22,127 @@ on: # # run every night at midnight # - cron: '0 0 * * *' +env: + COLUMNS: '120' + + # Tell StackStorm that we are indeed in CI mode, using our CI-provider agnostic var. + ST2_CI: 'true' + + # GitHub Actions uses the 'runner' user, so use that instead of stanley. + ST2TESTS_SYSTEM_USER: 'runner' + + # This is the host:port provided by services.redis + ST2TESTS_REDIS_HOST: '127.0.0.1' + ST2TESTS_REDIS_PORT: '6379' + jobs: - test: - name: '${{ matrix.name }} - Python ${{ matrix.python-version-short }}' - runs-on: ubuntu-20.04 + pants-plugins-tests: + name: 'Pants Plugins Tests (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + # Pants itself uses only 3.11 + - {version-short: '3.11', version: '3.11.11'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Test pants-plugins + run: | + pants test pants-plugins/:: + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-pants-plugins-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + unit-tests: + name: 'Unit Tests Shard ${{ matrix.shard.k }}/${{ matrix.shard.n }} (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 strategy: fail-fast: false matrix: # NOTE: We need to use full Python version as part of Python deps cache key otherwise # setup virtualenv step will fail. - include: - - name: 'Test (pants runs: pytest)' - python-version-short: '3.6' - python-version: '3.6.13' - - name: 'Test (pants runs: pytest)' - python-version-short: '3.8' - python-version: '3.8.10' - - name: 'Test (pants runs: pytest)' - python-version-short: '3.9' - python-version: '3.9.14' + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + shard: + # Sharding of tests is handled by pants: + # https://www.pantsbuild.org/stable/docs/using-pants/advanced-target-selection#sharding-the-input-targets + - {k: '0', n: '4'} + - {k: '1', n: '4'} + - {k: '2', n: '4'} + - {k: '3', n: '4'} services: mongo: - image: mongo:4.4 + image: mongo:7.0 ports: - 27017:27017 @@ -70,70 +168,571 @@ jobs: ports: - 6379:6379/tcp - env: - COLUMNS: '120' + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Unit Tests + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=unit + --test-shard=${{ matrix.shard.k }}/${{ matrix.shard.n }} + test '::' + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-unit-tests-shard-${{ matrix.shard.k }}_${{ matrix.shard.n }} + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + pack-tests: + name: 'Pack Tests (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Pack Tests + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=pack + test '::' + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-pack-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + integration-tests: + name: 'Integration Tests (pants runs: pytest) - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + # In GHA, these services are started first before the code is checked out. + # We use bitnami images to facilitate reconfiguring RabbitMQ during integration tests. + # We rely on custom config and SSL certs that are in the repo. + # Many images require config in env vars (which we can't change during the test job) + # or they require config in entrypoint args (which we can't override for GHA services) + # bitnami builds ways to get config files from mounted volumes. + rabbitmq: + image: bitnami/rabbitmq:3.8 + volumes: + - /home/runner/rabbitmq_conf:/bitnami/conf # RABBITMQ_MOUNTED_CONF_DIR + env: + # tell bitnami/rabbitmq to enable this by default + RABBITMQ_PLUGINS: rabbitmq_management + RABBITMQ_USERNAME: guest + RABBITMQ_PASSWORD: guest + RABBITMQ_LOGS: '-' + # bitnami's default relative limit was too high + RABBITMQ_DISK_FREE_ABSOLUTE_LIMIT: 50MB + + # These are strictly docker options, not entrypoint args (GHA restriction) + options: >- + --name rabbitmq + ports: + # These 6 ports are exposed by bitnami/rabbitmq (see https://www.rabbitmq.com/networking.html#ports) + # host_port:container_port/protocol + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + #- 15671:15671/tcp # Management: SSL port + #- 25672:25672/tcp # inter-node or CLI + #- 4369:4369/tcp # epmd + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages + + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + # This is only required for st2common/tests/integration/test_rabbitmq_ssl_listener.py + - name: Reconfigure RabbitMQ + # bitnami image allows (see bitnami/rabbitmq readme): + # Here we're copying a rabbitmq.config file which won't do anything. + # We need to switch to custom.conf or advanced.config. + timeout-minutes: 2 # may die if rabbitmq fails to start + run: | + ./scripts/github/configure-rabbitmq.sh + + - name: Integration Tests + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=integration + --tag=-st2cluster + test '::' + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-integration-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + integration-st2cluster-tests: + name: 'Integration Tests (Full Cluster)- Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: # a test uses a submodule, and pants needs access to it to calculate deps. - submodules: 'true' + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' - - name: 'Set up Python (${{ matrix.python-version }})' - uses: actions/setup-python@v2 + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 with: - python-version: '${{ matrix.python-version }}' + python-version: '${{ matrix.python.version }}' + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - #- name: Cache APT Dependencies - # id: cache-apt-deps - # uses: actions/cache@v2 - # with: - # path: | - # ~/apt_cache - # key: ${{ runner.os }}-apt-v7-${{ hashFiles('scripts/github/apt-packages.txt') }} - # restore-keys: | - # ${{ runner.os }}-apt-v7- - - name: Install APT Depedencies + - name: Initialize Pants and its GHA caches + uses: ./.github/actions/init-pants + with: + # To ignore a bad cache, bump the cache* integer. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Export virtualenv to run Dev ST2 Cluster + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + export + --resolve=st2 + + - name: Launch Dev ST2 Cluster env: - CACHE_HIT: 'false' # cache doesn't work - #CACHE_HIT: ${{steps.cache-apt-deps.outputs.cache-hit}} + VIRTUALENV_DIR: ./dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + run: | + sudo -E ./scripts/github/prepare-integration.sh + + - name: Integration Tests + env: + ST2_CI_RUN_ORQUESTA_PAUSE_RESUME_TESTS: 'true' + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + --tag=integration + --tag=st2cluster + test '::' + + - name: Compress Service Logs Before upload + if: failure() run: | - # install dev dependencies for Python YAML and LDAP packages - # https://github.com/StackStorm/st2-auth-ldap - ./scripts/github/install-apt-packages-use-cache.sh + ./tools/launchdev.sh stop # stop st2 before collecting logs + tar cvzpf logs.tar.gz logs/* + + - name: Upload StackStorm services Logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: logs-py${{ matrix.python-version }}-st2cluster-integration-tests + path: logs.tar.gz + retention-days: 7 + + - name: Upload pants log + uses: actions/upload-artifact@v4 + with: + name: pants-log-py${{ matrix.python.version }}-st2cluster-integration-tests + path: .pants.d/pants.log + if: always() # We want the log even on failures. + + self-check: + name: 'Self-Check - Python ${{ matrix.python.version-short }}' + runs-on: ubuntu-22.04 + strategy: + fail-fast: false + matrix: + # NOTE: We need to use full Python version as part of Python deps cache key otherwise + # setup virtualenv step will fail. + python: + - {version-short: '3.8', version: '3.8.18'} + - {version-short: '3.9', version: '3.9.21'} + - {version-short: '3.10', version: '3.10.16'} + - {version-short: '3.11', version: '3.11.11'} + + services: + mongo: + image: mongo:7.0 + ports: + - 27017:27017 + + rabbitmq: + image: rabbitmq:3.8-management + options: >- + --name rabbitmq + ports: + - 5671:5671/tcp # AMQP SSL port + - 5672:5672/tcp # AMQP standard port + - 15672:15672/tcp # Management: HTTP, CLI + + redis: + # Docker Hub image + image: redis + # Set health checks to wait until redis has started + options: >- + --name "redis" + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379/tcp + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # a test uses a submodule, and pants needs access to it to calculate deps. + submodules: 'recursive' + # sadly, the submodule will only have fetch-depth=1, which is what we want + # for st2.git, but not for the submodules. We still want actions/checkout + # to do the initial checkout, however, so that it adds auth for fetching + # in the submodule. + + - name: Fetch repository submodules + run: | + git submodule status + git submodule foreach 'git fetch --all --tags' + git submodule foreach 'git tag' + + - name: 'Set up Python (${{ matrix.python.version }})' + id: python + uses: actions/setup-python@v5 + with: + python-version: '${{ matrix.python.version }}' + + - name: Cache and Install APT Dependencies + uses: ./.github/actions/apt-packages - name: Initialize Pants and its GHA caches - uses: pantsbuild/actions/init-pants@v2 - # This action adds an env var to make pants use both pants.ci.toml & pants.toml. - # This action also creates 3 GHA caches (1 is optional). - # - `pants-setup` has the bootsrapped pants install - # - `pants-named-caches` has pip/wheel and PEX caches - # - `pants-lmdb-store` has the fine-grained process cache. - # If we ever use a remote cache, then we can drop this. - # Otherwise, we may need an additional workflow or job to delete old caches - # if they are not expiring fast enough, and we hit the GHA 10GB per repo max. - with: - base-branch: master + uses: ./.github/actions/init-pants + with: # To ignore a bad cache, bump the cache* integer. - gha-cache-key: cache0-py${{ matrix.python-version }} - # This hash should include all of our lockfiles so that the pip/pex caches - # get invalidated on any transitive dependency update. - named-caches-hash: ${{ hashFiles('requirements.txt') }} - # enable the optional lmdb_store cache since we're not using remote caching. - cache-lmdb-store: 'true' - - - name: Test - # We do not support running pytest everywhere yet. When we do it will be simply: - # ./pants test :: - # Until then, we need to manually adjust this command line to test what we can. + gha-cache-key: cache0-py${{ matrix.python.version }} + + - name: Export virtualenv to run Dev ST2 Cluster + run: > + pants + --python-bootstrap-search-path=[] + --python-bootstrap-search-path=${{ steps.python.outputs.python-path }} + export + --resolve=st2 + + - name: Add ST2 System User Key + env: + ST2_CI_USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__SSH_KEY_FILE: /home/${{ env.ST2TESTS_SYSTEM_USER }}/.ssh/stanley_rsa + run: | + sudo -E ./scripts/ci/add-itest-user-key.sh + + - name: Launch Dev ST2 Cluster + env: + # NOTE: ST2_CONF defaults to ${GITHUB_WORKSPACE}/conf/st2.dev.conf + VIRTUALENV_DIR: ./dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + ST2_SYSTEM_USER__USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__SSH_KEY_FILE: /home/${{ env.ST2TESTS_SYSTEM_USER }}/.ssh/stanley_rsa + run: | + sudo -E ./scripts/github/prepare-integration.sh + + - name: Extend the path for upcoming tasks + # pants uses PEP 660 editable wheels to add our code to the virtualenv. + # But PEP 660 editable wheels do not include 'scripts'. + # https://peps.python.org/pep-0660/#limitations + # So, we need to include each bin dir in PATH instead of virtualenv/bin. + env: + VIRTUALENV_DIR: dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + run: | + for component_bin in ${GITHUB_WORKSPACE}/st2*/bin; do + echo ${component_bin} | tee -a $GITHUB_PATH + done + echo ${GITHUB_WORKSPACE}/${VIRTUALENV_DIR}/bin | tee -a $GITHUB_PATH + + - name: Create symlinks to find the binaries when running st2 actions + # st2 is actually a console_script entry point, not just a 'script' + # so it IS included in the virtualenv. But, st2-run-pack-tests might not be included. + env: + VIRTUALENV_DIR: dist/export/python/virtualenvs/st2/${{ steps.python.outputs.python-version }} + run: | + ln -s ${GITHUB_WORKSPACE}/${VIRTUALENV_DIR}/bin/st2 /usr/local/bin/st2 + ln -s ${GITHUB_WORKSPACE}/st2common/bin/st2-run-pack-tests /usr/local/bin/st2-run-pack-tests + + - name: Run st2-self-check + env: + # Space separated list of tests to be skipped if the self-check is running in GitHub Actions + TESTS_TO_SKIP: "tests.test_quickstart_rules tests.test_run_pack_tests_tool" + ST2_SYSTEM_USER__USER: ${{ env.ST2TESTS_SYSTEM_USER }} + ST2_SYSTEM_USER__SSH_KEY_FILE: /home/${{ env.ST2TESTS_SYSTEM_USER }}/.ssh/stanley_rsa + run: > + sudo + -E + ST2_AUTH_TOKEN=$(st2 auth testu -p 'testp' -t) + ST2_CONF=${GITHUB_WORKSPACE}/conf/st2.dev.conf + PATH=${PATH} + st2common/bin/st2-self-check + + - name: Compress Service Logs Before upload + if: failure() run: | - ./pants test pylint_plugins/:: pants-plugins/:: + ./tools/launchdev.sh stop # stop st2 before collecting logs + tar cvzpf logs.tar.gz logs/* + + - name: Upload StackStorm services Logs + if: failure() + uses: actions/upload-artifact@v4 + with: + name: logs-py${{ matrix.python-version }}-self-check + path: logs.tar.gz + retention-days: 7 - name: Upload pants log - uses: actions/upload-artifact@v2 + uses: actions/upload-artifact@v4 with: - name: pants-log-py${{ matrix.python-version }} + name: pants-log-py${{ matrix.python.version }}-self-check path: .pants.d/pants.log if: always() # We want the log even on failures. + + set_merge_ok: + name: Set Merge OK (Tests) + if: always() && !contains(needs.*.result, 'failure') && !contains(needs.*.result, 'cancelled') + needs: + - pants-plugins-tests + - unit-tests + - pack-tests + - integration-tests + - integration-st2cluster-tests + - self-check + outputs: + merge_ok: ${{ steps.set_merge_ok.outputs.merge_ok }} + runs-on: ubuntu-latest + steps: + - id: set_merge_ok + run: echo 'merge_ok=true' >> ${GITHUB_OUTPUT} + + merge_ok: + name: Merge OK (Tests) + if: always() + needs: + - set_merge_ok + runs-on: ubuntu-latest + steps: + - run: | + merge_ok="${{ needs.set_merge_ok.outputs.merge_ok }}" + if [[ "${merge_ok}" == "true" ]]; then + echo "Merge OK" + exit 0 + else + echo "Merge NOT OK" + exit 1 + fi diff --git a/.gitignore b/.gitignore index 090159801f..dc1b6aec20 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ *.py[cod] *.sqlite *.log +*.orig .stamp* # C extensions @@ -71,4 +72,4 @@ benchmark_histograms/ [._]*.sw[a-p]x [._]sw[a-p]x -**/build/lib/** \ No newline at end of file +**/build/lib/** diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bdd3a3aee6..45e5daaf44 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -37,7 +37,7 @@ repos: language: script types: [file, python] - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.5.0 + rev: v5.0.0 hooks: - id: trailing-whitespace exclude: (^conf/|^st2common/st2common/openapi.yaml|^st2client/tests/fixtures|^st2tests/st2tests/fixtures) diff --git a/ADOPTERS.md b/ADOPTERS.md index c8cdd6ea60..50c3834f79 100644 --- a/ADOPTERS.md +++ b/ADOPTERS.md @@ -7,6 +7,7 @@ This is an alphabetical list of known [StackStorm](https://stackstorm.com/) adop * [Adobe](https://www.adobe.com/) - Multinational computer software company. After evaluating both SaltStack and Rundeck, Adobe chose StackStorm towards their journey to self-healing infrastructure. As a result, SRE team could resolve thousands of alerts and fix 70% of the outages automatically without human intervention. [[ DevOpsDays Notes ](https://threadreaderapp.com/thread/1098901714567081984.html)] [[ DevOpsCon Talk ](https://devopscon.io/monitoring-traceability-diagnostics/workflow-engines-our-journey-towards-a-self-healing-infrastructure/)] * [Bitovi](https://www.bitovi.com/) - Consulting company, implemented an Automation solution based on StackStorm API with HA capabilities and custom UI for a Fortune top 10 organization. [[ Blog ](https://www.bitovi.com/blog/stackstorm-solves-devops-automation-for-enterprise-client)] [[ Case study ](https://stackstorm.com/case-study-bitovi/)] +* [CERN](https://home.cern) - CERN's Batch team uses StackStorm for Auto-Remediation Workflows for their compute farm, handling AFS storage overloads, and other automation for maintaining the research infrastructure. [[ HEPIX Presentation ](https://codimd.web.cern.ch/p/r6lbybhXy#/1)] [[ CHEP Presentation ](https://indico.jlab.org/event/459/contributions/11638/attachments/9708/14174/chep23_stackstorm.pptx)] * [DMM.com](https://dmm-corp.com/en/) - Large content provider in Japan. StackStorm is used in Operations helping to maintain online services and development at scale. [[ Case study ](https://stackstorm.com/case-study-dmm/)] * [DigitalOcean](https://www.digitalocean.com/about) - DigitalOcean simplifies cloud computing so builders can spend more time creating software that changes the world. Internally, StackStorm is used as a consistent frontend to our numerous operational tools, and it also plays the part of the orchestration and automation engine driving the machine lifecycle of our vast fleet of machines spread across the globe. * [Dimension Data](https://www.dimensiondata.com/en/about-us) - Global systems integrator and IT services provider, using StackStorm for Datacenter Orchestration as well as Infrastructure, Networking, Security Automation for their large clients and government projects. [[ Case study ](https://stackstorm.com/case-study-dimension-data/)] diff --git a/BUILD b/BUILD index 179cc680c4..6472bed457 100644 --- a/BUILD +++ b/BUILD @@ -27,6 +27,36 @@ python_requirements( "st2auth/st2auth/backends/constants.py", ] ), + # make sure anything that uses st2-rbac-backend gets its deps + "st2-rbac-backend": dict( + dependencies=[ + # alphabetical order + "st2common/st2common/config.py", + "st2common/st2common/constants/keyvalue.py", + "st2common/st2common/constants/triggers.py", + "st2common/st2common/content/loader.py", + "st2common/st2common/exceptions/db.py", + "st2common/st2common/exceptions/rbac.py", + "st2common/st2common/log.py", + "st2common/st2common/models/api/rbac.py", + "st2common/st2common/models/db/action.py", + "st2common/st2common/models/db/auth.py", + "st2common/st2common/models/db/pack.py", + "st2common/st2common/models/db/rbac.py", + "st2common/st2common/models/db/webhook.py", + "st2common/st2common/models/system/common.py", + "st2common/st2common/persistence/auth.py", + "st2common/st2common/persistence/execution.py", + "st2common/st2common/persistence/rbac.py", + "st2common/st2common/rbac/backends/__init__.py", + "st2common/st2common/rbac/backends/base.py", + "st2common/st2common/rbac/types.py", + "st2common/st2common/script_setup.py", + "st2common/st2common/util/action_db.py", + "st2common/st2common/util/misc.py", + "st2common/st2common/util/uid.py", + ] + ), }, ) @@ -35,6 +65,14 @@ target( dependencies=[ "//:reqs#st2-auth-backend-flat-file", "//:reqs#st2-auth-ldap", + "//:reqs#st2-auth-backend-pam", + ], +) + +target( + name="rbac_backends", + dependencies=[ + "//:reqs#st2-rbac-backend", ], ) @@ -47,3 +85,38 @@ file( name="license", source="LICENSE", ) + +shell_sources( + name="root", + skip_shfmt=True, +) + +file( + name="logs_directory", + source="logs/.gitignore", +) + +files( + name="gitmodules", + sources=[ + ".gitmodules", + "**/.git", + ], +) + +shell_command( + name="capture_git_modules", + environment="in_repo_workspace", + command="cp -r .git/modules {chroot}/.git", + tools=["cp"], + # execution_dependencies allows pants to invalidate the output + # of this command if the .gitmodules file changes (for example: + # if a submodule gets updated to a different repo). + # Sadly this does not get invalidated if the submodule commit + # is updated. In our case, that should be rare. To work around + # this, kill the `pantsd` process after updating a submodule. + execution_dependencies=[":gitmodules"], + output_dependencies=[":gitmodules"], + output_directories=[".git/modules"], + workdir="/", +) diff --git a/BUILD.environment b/BUILD.environment new file mode 100644 index 0000000000..f549e53f3e --- /dev/null +++ b/BUILD.environment @@ -0,0 +1,23 @@ +# Everything listed in pants.toml [evironments-preview.names] should be defined here. +# Relevant docs: +# - https://www.pantsbuild.org/stable/docs/using-pants/environments +# - https://www.pantsbuild.org/stable/reference/targets/experimental_workspace_environment +# - https://www.pantsbuild.org/stable/reference/targets/local_environment +# - https://www.pantsbuild.org/stable/reference/targets/docker_environment + +# This file MUST NOT use any macros. + +experimental_workspace_environment( + name="in_repo_workspace", + description=( + """ + This allows shell_command and similar to run in the repo, instead of in a sandbox. + Only use this environment for commands or goals that are idempotent. + Ideally, such commands do NOT change anything in the repo. + + If you need to capture output, note that output gets captured from a temporary + sandbox, not from the repo root. So, you may need to copy output files into + the sandbox with something like `cp path/to/file {chroot}/path/to/file`. + """ + ), +) diff --git a/BUILD.tools b/BUILD.tools index 0b544e186e..fbb07c6538 100644 --- a/BUILD.tools +++ b/BUILD.tools @@ -1,15 +1,27 @@ # This BUILD file has requirements for most of the tools resolves +python_requirement( + name="crudini-reqs", + resolve="st2", + requirements=[ + "crudini", + ], +) +pex_binary( + name="crudini", + resolve="st2", + dependencies=[":crudini-reqs"], + entry_point="crudini", +) + python_requirement( name="bandit-reqs", resolve="bandit", requirements=[ - "bandit==1.7.0", + # https://github.com/pantsbuild/pants/blob/release_2.23.0rc0/src/python/pants/backend/python/lint/bandit/subsystem.py#L44-L52 + "bandit>=1.7.0,<1.8", "setuptools", - "GitPython==3.1.18", - # bandit needs stevedore which needs importlib-metadata<5 - # see: https://github.com/PyCQA/bandit/pull/952 - "importlib-metadata<5;python_version<'3.8'", + "GitPython>=3.1.24", ], ) @@ -26,9 +38,9 @@ python_requirement( name="flake8-reqs", resolve="flake8", requirements=[ - "flake8==4.0.1", # st2flake8 does not support flake8 v5 + "flake8==7.0.0", # st2flake8 does not support flake8 v5 # license check plugin - "st2flake8==0.1.0", # TODO: remove in favor of regex-lint or preamble + "st2flake8>0.1.0", # TODO: remove in favor of regex-lint or preamble ], ) @@ -37,27 +49,21 @@ python_requirement( python_requirement( name="pytest-reqs", - resolve="pytest", + resolve="st2", requirements=[ "pytest==7.0.1", # copied from https://www.pantsbuild.org/v2.14/docs/reference-pytest#version - "pytest-benchmark[histogram]==3.4.1", - # "pytest-timer[colorama]", - "pytest-icdiff", - "pygments", - # "pytest-timeout", - # "pytest-mock", - "pytest-cov>=2.12,!=2.12.1,<3.1", - "pytest-xdist>=2.5,<3", - ], -) - -python_requirement( - name="setuptools-reqs", - resolve="setuptools", - requirements=[ - # setuptools 59.7 (at least) does not support python 3.6 - "setuptools>=50.3.0,<59.0", - "wheel>=0.35.1,<0.38", + "pytest-benchmark[histogram]==3.4.1", # used for st2common/benchmarks + # "pytest-timer[colorama]", # report test timing (--with-timer ala nose-timer) + "pytest-icdiff", # make diff output easier to read + # "pygments", # highlight code in tracebacks (already included in requirements-pants.txt) + # + # other possible plugins + # "pytest-timeout", # time limit on tests + # "pytest-mock", # more convenient mocking + # + # needed by pants + "pytest-cov>=2.12,!=2.12.1,<3.1", # coverage + "pytest-xdist>=2.5,<3", # parallel test runs (pants uses this if [pytest].xdist_enabled) ], ) @@ -65,7 +71,8 @@ python_requirement( name="twine-reqs", resolve="twine", requirements=[ - "twine>=3.7.1,<3.8", + "twine>=3.7.1,<5", "colorama>=0.4.3", + "importlib-metadata<8", ], ) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 7a1bfa4e8e..710369b6c7 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -4,6 +4,123 @@ Changelog in development -------------- +Python 3.6 is no longer supported; Stackstorm requires at least Python 3.8. +This release adds support for Python 3.10 and 3.11, so StackStorm supports python 3.8 - 3.11. + +Newer MongoDB versions are now supported. CI uses MongoDB 7.0. + +Several st2.conf database options have been renamed or deprecated. Most of the options will continue to work using their old name. +However, if you use `[database].ssl_keyfile` and/or `[database].ssl_certfile`, you MUST migrate to `[database].tls_certificate_key_file`. +This new option expects the key and certificate in the same file. Use something like the following to create that file from your old files: + +``` +cat path/to/ssl_keyfile path/to/ssl_certfile > path/to/tls_certificate_key_file +``` + +Other options that were renamed under `[database]` are (more details available in `st2.conf.sample`): + +* `ssl` -> `tls` +* `ssl_cert_reqs` -> `tls_allow_invalid_certificates` (opt type change: string -> boolean) +* `ssl_ca_certs` -> `tls_ca_file` +* `ssl_match_hostnames` -> `tls_allow_invalid_hostnames` (meaning is inverted: the new option is the opposite of the old) + +Fixed +~~~~~ +* Fixed #6021 and #5327 by adding max_page_size to api_opts and added limit and offset to list_values() methods of + both action_service and sensor_service +* Fix `packs.get` action. Assumed `master` is primary branch on all packs. #6225 +* Restore Pack integration testing (it was inadvertently skipped) and stop testing against `bionic` and `el7`. #6135 +* Fix Popen.pid typo in st2tests. #6184 +* Bump tooz package to `6.2.0` to fix TLS. #6220 (@jk464) +* Shells via `pywinrm` are initialized with the 65001 codepage to ensure raw string responses are UTF-8. #6034 (@stealthii) + +Changed +~~~~~~~ +* Removed code in all dist_utils.py that was sanitizing the `python_version` environment marker that limited packages in the requirements.txt only being installed on lower python versions. (by @skiedude) +* Bumped `jsonschema` 2.6.0 -> 3.2.0 now that python3.6 is not supported. #6118 +* Bumped many deps based on the lockfiles generated by pants+pex. #6181 #6227 #6200 #6252 #6268 #6329 (by @cognifloyd and @nzlosh) +* Switch to python3's standard lib unittest from unittest2, a backport of python3 unittest features for python2. #6187 (by @nzlosh) +* Drop Python 3.6 testing in CircleCI. #6080 + Contributed by (@philipphomberger Schwarz IT KG) +* Refactor `tools/launchdev.sh` to use `tmux` instead of `screen`. #6186 (by @nzlosh and @cognifloyd) +* Updated package build container environment to use py3.8 and mongo4.4 #6129 +* Fix misc DeprecationWarnings to prepare for python 3.10 support. #6188 (by @nzlosh) +* Update st2client deps: editor and prompt-toolkit. #6189 (by @nzlosh) +* Updated dependency oslo.config to prepare for python 3.10 support. #6193 (by @nzlosh) + +* Updated unit tests to use redis for coordination instead of the NoOp driver. This will hopefully make CI more stable. #6245 + Contributed by @FileMagic, @guzzijones, and @cognifloyd + +* Renamed `[database].ssl*` options to support pymongo 4, which we have to update to support newer MongoDB servers. + Please see the note above about migrating to the newer options, especially if you use `[database].ssl_keyfile` + and/or `[database].ssl_certfile`, as those options are ignored in StackStorm 3.9.0. #6250 + Contributed by @cognifloyd + +* Update mongoengine to 0.29 and pymongo to 4.6.3. The pymongo bump (from 3.x to 4.x) is a major update. #6252 + Contributed by @cognifloyd + +* Update CI from testing with mongo 4.4 to testing with MongoDB 7.0. #6246 + Contributed by @guzzijones + +* Relaxed `dnspython` pinning for compatibility with python 3.10 and greater. #6265 + Contributed by @nzlosh + +* Switched tests from `nosetest` to `pytest`. `st2-run-pack-tests` also uses pytest. + So, all pack tests must be runnable by `pytest`, which may require migration. #6291 + Contributed by @nzlosh, @FileMagic, @guzzijones, and @cognifloyd. + +* Migrated github actions from image ubunutu 20.04 with python 3.8.10 to image ubuntu 22.04 with python 3.8.12. #6301 + Contributed by @nzlosh + +Added +~~~~~ +* Continue introducing `pants `_ to improve DX (Developer Experience) + working on StackStorm, improve our security posture, and improve CI reliability thanks in part + to pants' use of PEX lockfiles. This is not a user-facing addition. + #6118 #6141 #6133 #6120 #6181 #6183 #6200 #6237 #6229 #6240 #6241 #6244 #6251 #6253 + #6254 #6258 #6259 #6260 #6269 #6275 #6279 #6278 #6282 #6283 #6273 #6287 #6306 #6307 + #6311 #6314 #6315 #6317 #6319 #6312 #6320 #6321 #6323 #6324 #6325 #6326 #6327 #6328 + #6329 #6330 + Contributed by @cognifloyd +* Build of ST2 EL9 packages #6153 + Contributed by @amanda11 +* Ensure `.pth` files in the st2 virtualenv get loaded by pack virtualenvs. #6183 + Contributed by @cognifloyd +* Allow `st2-rule-tester` to run without a mongo connection if user is testing against local `rule`/`trigger-instance` files. #6208 + Contributed by @jk464 + +* Added a `get_result` method to the `ExecutionResourceManager` Class for st2client + Contributed by @skiedude + +* Added new env var for tests: `ST2TESTS_SYSTEM_USER`. When set, this will override `system_user.user` in st2 conf + so that you can run tests on systems that do not have the `stanley` user. When running tests locally, use the + following to set system user to the current user: `export ST2TESTS_SYSTEM_USER=$(id -un)` #6242 + Contributed by @cognifloyd + +* Added experimental support for setting conf vars via environment variables. All settings in `st2.conf` can be + overriden via enviornment vars in the format: `ST2___