From 28712a87dbcdab199d0996448131172c163e4836 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 20 Jan 2023 06:32:29 +0000 Subject: [PATCH 01/55] Bump cache seed Signed-off-by: Pedro Algarvio From 4831efbf37c335e7fba0b705d323ac877dab99c9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 20 Jan 2023 06:32:29 +0000 Subject: [PATCH 02/55] Bump cache seed Signed-off-by: Pedro Algarvio From c694db5b677af8b1532f3a9a89a03843d6dc9b67 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 17 Jan 2023 04:50:55 +0000 Subject: [PATCH 03/55] Start building onedir packages Signed-off-by: Pedro Algarvio --- .github/actions/build-onedir-bare/action.yml | 36 ++++++ .github/workflows/build-salt-action.yml | 124 +++++++++++++++++++ .github/workflows/test-action.yml | 15 +++ noxfile.py | 3 + 4 files changed, 178 insertions(+) create mode 100644 .github/actions/build-onedir-bare/action.yml create mode 100644 .github/workflows/build-salt-action.yml diff --git a/.github/actions/build-onedir-bare/action.yml b/.github/actions/build-onedir-bare/action.yml new file mode 100644 index 000000000000..be262f9362df --- /dev/null +++ b/.github/actions/build-onedir-bare/action.yml @@ -0,0 +1,36 @@ +--- +name: build-onedir-bare +description: Build Bare Onedir Package +inputs: + platform: + required: true + type: string + description: The platform to build + arch: + required: true + type: string + description: The platform arch to build + package-name: + required: false + type: string + description: The onedir package name to create + default: salt + +runs: + using: composite + + steps: + + - name: Cache Bare Onedir Package Directory + id: onedir-pkg-cache + uses: actions/cache@v3 + with: + path: artifacts/${{ inputs.package-name }} + key: relenv|${{ env.RELENV_VERSION }}|bare|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles('.relenv/**/*.xz') }} + + - name: Create Onedir Directory + shell: bash + if: steps.onedir-pkg-cache.outputs.cache-hit != 'true' + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + python3 -m relenv create --arch=${{ inputs.arch }} artifacts/${{ inputs.package-name }} diff --git a/.github/workflows/build-salt-action.yml b/.github/workflows/build-salt-action.yml new file mode 100644 index 000000000000..28a2e6f03741 --- /dev/null +++ b/.github/workflows/build-salt-action.yml @@ -0,0 +1,124 @@ +--- +name: Build Salt Onedir + +on: + - workflow_call + +jobs: + + linux: + name: Linux + strategy: + fail-fast: false + matrix: + arch: + - x86_64 + - aarch64 + runs-on: + - self-hosted + - linux + - ${{ matrix.arch }} + steps: + - uses: actions/checkout@v3 + - name: Setup Relenv + uses: ./.github/actions/setup-relenv + with: + platform: linux + arch: ${{ matrix.arch }} + - name: Install Salt into Relenv Onedir + uses: ./.github/actions/build-onedir-pkg + with: + platform: linux + arch: ${{ matrix.arch }} + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-linux-${{ matrix.arch }} + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error + + windows: + name: Windows + strategy: + fail-fast: false + matrix: + arch: + - amd64 + runs-on: windows-latest + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Setup Relenv + uses: ./.github/actions/setup-relenv + with: + platform: windows + arch: ${{ matrix.arch }} + - name: Install Salt into Relenv Onedir + uses: ./.github/actions/build-onedir-pkg + with: + platform: windows + arch: ${{ matrix.arch }} + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-windows-${{ matrix.arch }} + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error + + macos: + name: macOS + strategy: + fail-fast: false + matrix: + arch: + - x86_64 + runs-on: macos-12 + steps: + - uses: actions/checkout@v3 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Setup Relenv + uses: ./.github/actions/setup-relenv + with: + platform: darwin + arch: ${{ matrix.arch }} + - name: Install Salt into Relenv Onedir + uses: ./.github/actions/build-onedir-pkg + with: + platform: darwin + arch: ${{ matrix.arch }} + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-macos-${{ matrix.arch }} + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 9b6e1e0b3ff0..0e31c68185fc 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -102,6 +102,21 @@ jobs: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + - name: Download Onedir Tarball as an Artifact + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.arch }}-${{ inputs.platform }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.arch }}-${{ inputs.platform }}.tar.xz + # Skip jobs if nox.*.tar.* is already cached - name: Download Onedir Tarball as an Artifact if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' diff --git a/noxfile.py b/noxfile.py index 24511b067196..72044f1d46a7 100644 --- a/noxfile.py +++ b/noxfile.py @@ -329,6 +329,9 @@ def _install_requirements( if not _upgrade_pip_setuptools_and_wheel(session, onedir=onedir): return False + if onedir and not IS_WINDOWS and not IS_DARWIN and not IS_FREEBSD: + session_run_always(session, "python3", "-m", "relenv", "toolchain", "fetch") + # Install requirements requirements_file = _get_pip_requirements_file( session, transport, requirements_type=requirements_type From a2f4ea76ec43a803503ef7d8c91feb450e14e9f3 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Tue, 17 Jan 2023 11:21:32 -0700 Subject: [PATCH 04/55] Initial setup of package tests (migrated from salt-pkg) --- .pre-commit-config.yaml | 56 + noxfile.py | 22 + pkg/tests/__init__.py | 0 pkg/tests/conftest.py | 341 ++++ pkg/tests/files/check_imports.sls | 53 + pkg/tests/files/check_python.py | 13 + pkg/tests/files/debianbased.sls | 24 + pkg/tests/files/redhatbased.sls | 24 + pkg/tests/integration/__init__.py | 0 pkg/tests/integration/test_check_imports.py | 17 + .../integration/test_enabled_disabled.py | 43 + pkg/tests/integration/test_hash.py | 42 + pkg/tests/integration/test_help.py | 13 + pkg/tests/integration/test_pip.py | 125 ++ pkg/tests/integration/test_pip_upgrade.py | 92 ++ pkg/tests/integration/test_pkg.py | 32 + pkg/tests/integration/test_python.py | 31 + pkg/tests/integration/test_salt_api.py | 14 + pkg/tests/integration/test_salt_call.py | 59 + pkg/tests/integration/test_salt_exec.py | 25 + pkg/tests/integration/test_salt_grains.py | 34 + pkg/tests/integration/test_salt_key.py | 7 + pkg/tests/integration/test_salt_minion.py | 19 + pkg/tests/integration/test_salt_output.py | 15 + pkg/tests/integration/test_salt_pillar.py | 6 + pkg/tests/integration/test_salt_state_file.py | 16 + pkg/tests/integration/test_systemd_config.py | 43 + pkg/tests/integration/test_version.py | 110 ++ pkg/tests/support/__init__.py | 0 pkg/tests/support/coverage/sitecustomize.py | 11 + pkg/tests/support/helpers.py | 1404 +++++++++++++++++ pkg/tests/upgrade/test_salt_upgrade.py | 70 + requirements/static/ci/pkgtests.in | 2 + requirements/static/ci/py3.10/pkgtests.txt | 204 +++ requirements/static/ci/py3.7/pkgtests.txt | 219 +++ requirements/static/ci/py3.8/pkgtests.txt | 208 +++ requirements/static/ci/py3.9/pkgtests.txt | 204 +++ 37 files changed, 3598 insertions(+) create mode 100644 pkg/tests/__init__.py create mode 100644 pkg/tests/conftest.py create mode 100644 pkg/tests/files/check_imports.sls create mode 100644 pkg/tests/files/check_python.py create mode 100644 pkg/tests/files/debianbased.sls create mode 100644 pkg/tests/files/redhatbased.sls create mode 100644 pkg/tests/integration/__init__.py create mode 100644 pkg/tests/integration/test_check_imports.py create mode 100644 pkg/tests/integration/test_enabled_disabled.py create mode 100644 pkg/tests/integration/test_hash.py create mode 100644 pkg/tests/integration/test_help.py create mode 100644 pkg/tests/integration/test_pip.py create mode 100644 pkg/tests/integration/test_pip_upgrade.py create mode 100644 pkg/tests/integration/test_pkg.py create mode 100644 pkg/tests/integration/test_python.py create mode 100644 pkg/tests/integration/test_salt_api.py create mode 100644 pkg/tests/integration/test_salt_call.py create mode 100644 pkg/tests/integration/test_salt_exec.py create mode 100644 pkg/tests/integration/test_salt_grains.py create mode 100644 pkg/tests/integration/test_salt_key.py create mode 100644 pkg/tests/integration/test_salt_minion.py create mode 100644 pkg/tests/integration/test_salt_output.py create mode 100644 pkg/tests/integration/test_salt_pillar.py create mode 100644 pkg/tests/integration/test_salt_state_file.py create mode 100644 pkg/tests/integration/test_systemd_config.py create mode 100644 pkg/tests/integration/test_version.py create mode 100644 pkg/tests/support/__init__.py create mode 100644 pkg/tests/support/coverage/sitecustomize.py create mode 100644 pkg/tests/support/helpers.py create mode 100644 pkg/tests/upgrade/test_salt_upgrade.py create mode 100644 requirements/static/ci/pkgtests.in create mode 100644 requirements/static/ci/py3.10/pkgtests.txt create mode 100644 requirements/static/ci/py3.7/pkgtests.txt create mode 100644 requirements/static/ci/py3.8/pkgtests.txt create mode 100644 requirements/static/ci/py3.9/pkgtests.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 896c54b0a062..c919ca2d18fa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1035,6 +1035,62 @@ repos: - requirements/static/ci/invoke.in # <---- Invoke ----------------------------------------------------------------------------------------------------- + # <---- PKG ci requirements----------------------------------------------------------------------------------------- + - id: pip-tools-compile + alias: compile-ci-pkg-3.7-requirements + name: PKG tests CI Py3.7 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + pass_filenames: false + args: + - -v + - --py-version=3.7 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - requirements/static/ci/pkgtests.in + + - id: pip-tools-compile + alias: compile-ci-pkg-3.8-requirements + name: PKG tests CI Py3.8 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + pass_filenames: false + args: + - -v + - --py-version=3.8 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - requirements/static/ci/pkgtests.in + + - id: pip-tools-compile + alias: compile-ci-pkg-3.9-requirements + name: PKG tests CI Py3.9 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + pass_filenames: false + args: + - -v + - --py-version=3.9 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - requirements/static/ci/pkgtests.in + + + - id: pip-tools-compile + alias: compile-ci-pkg-3.10-requirements + name: PKG tests CI Py3.10 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + pass_filenames: false + args: + - -v + - --py-version=3.10 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - requirements/static/ci/pkgtests.in + + + # ----- Tools ----------------------------------------------------------------------------------------------------> - id: pip-tools-compile alias: compile-ci-tools-3.9-requirements diff --git a/noxfile.py b/noxfile.py index 72044f1d46a7..c36542301d52 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1758,3 +1758,25 @@ def build(session): ] session.run("sha256sum", *packages, external=True) session.run("python", "-m", "twine", "check", "dist/*") + + +@nox.session(python=_PYTHON_VERSIONS, name="test-pkgs") +@nox.parametrize("coverage", [False, True]) +def test_pkgs(session, coverage): + """ + pytest pkg tests session + """ + pydir = _get_pydir(session) + # Install requirements + if _upgrade_pip_setuptools_and_wheel(session): + requirements_file = os.path.join( + "requirements", "static", "ci", _get_pydir(session), "pkgtests.txt" + ) + + install_command = ["--progress-bar=off", "-r", requirements_file] + session.install(*install_command, silent=PIP_INSTALL_SILENT) + + cmd_args = [ + "pkg/tests/", + ] + session.posargs + _pytest(session, coverage, cmd_args) diff --git a/pkg/tests/__init__.py b/pkg/tests/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py new file mode 100644 index 000000000000..47d6ebd1acb6 --- /dev/null +++ b/pkg/tests/conftest.py @@ -0,0 +1,341 @@ +import logging +import pathlib +import re +import shutil + +import pytest +from pytestskipmarkers.utils import platform +from saltfactories.utils import random_string +from saltfactories.utils.tempfiles import SaltPillarTree, SaltStateTree + +from tests.support.helpers import ( + ARTIFACTS_DIR, + CODE_DIR, + TESTS_DIR, + ApiRequest, + SaltMaster, + SaltPkgInstall, + TestUser, +) + +log = logging.getLogger(__name__) + + +@pytest.fixture(scope="session") +def version(): + """ + get version number from artifact + """ + _version = "" + for artifact in ARTIFACTS_DIR.glob("**/*.*"): + _version = re.search( + r"([0-9].*)(\-[0-9].fc|\-[0-9].el|\+ds|\-[0-9].am|\-[0-9]-[a-z]*-[a-z]*[0-9_]*.(tar.gz|zip|exe|pkg|rpm))", + artifact.name, + ) + if _version: + _version = _version.groups()[0].replace("_", "-").replace("~", "") + break + return _version + + +def pytest_addoption(parser): + """ + register argparse-style options and ini-style config values. + """ + test_selection_group = parser.getgroup("Tests Runtime Selection") + # test_selection_group.addoption( + # "--system-service", + # default=False, + # action="store_true", + # help="Run the daemons as system services", + # ) + test_selection_group.addoption( + "--upgrade", + default=False, + action="store_true", + help="Install previous version and then upgrade then run tests", + ) + test_selection_group.addoption( + "--no-install", + default=False, + action="store_true", + help="Do not install salt and use a previous install Salt package", + ) + test_selection_group.addoption( + "--no-uninstall", + default=False, + action="store_true", + help="Do not uninstall salt packages after test run is complete", + ) + + +@pytest.fixture(scope="session") +def salt_factories_root_dir(request, tmp_path_factory): + root_dir = SaltPkgInstall.salt_factories_root_dir( + request.config.getoption("--system-service") + ) + if root_dir is not None: + yield root_dir + else: + root_dir = tmp_path_factory.mktemp("salt-tests") + try: + yield root_dir + finally: + shutil.rmtree(str(root_dir), ignore_errors=True) + + +@pytest.fixture(scope="session") +def salt_factories_config(salt_factories_root_dir): + return { + "code_dir": CODE_DIR, + "root_dir": salt_factories_root_dir, + "system_install": True, + } + + +@pytest.fixture(scope="session") +def install_salt(request, salt_factories_root_dir): + with SaltPkgInstall( + conf_dir=salt_factories_root_dir / "etc" / "salt", + system_service=request.config.getoption("--system-service"), + upgrade=request.config.getoption("--upgrade"), + no_uninstall=request.config.getoption("--no-uninstall"), + no_install=request.config.getoption("--no-install"), + ) as fixture: + yield fixture + + +@pytest.fixture(scope="session") +def salt_factories(salt_factories, salt_factories_root_dir): + salt_factories.root_dir = salt_factories_root_dir + return salt_factories + + +@pytest.fixture(scope="session") +def state_tree(): + if platform.is_windows(): + file_root = pathlib.Path("C:/salt/srv/salt") + elif platform.is_darwin(): + file_root = pathlib.Path("/opt/srv/salt") + else: + file_root = pathlib.Path("/srv/salt") + envs = { + "base": [ + str(file_root), + str(TESTS_DIR / "files"), + ], + } + tree = SaltStateTree(envs=envs) + test_sls_contents = """ + test_foo: + test.succeed_with_changes: + - name: foo + """ + states_sls_contents = """ + update: + pkg.installed: + - name: bash + salt_dude: + user.present: + - name: dude + - fullname: Salt Dude + """ + win_states_sls_contents = """ + create_empty_file: + file.managed: + - name: C://salt/test/txt + salt_dude: + user.present: + - name: dude + - fullname: Salt Dude + """ + with tree.base.temp_file("test.sls", test_sls_contents), tree.base.temp_file( + "states.sls", states_sls_contents + ), tree.base.temp_file("win_states.sls", win_states_sls_contents): + yield tree + + +@pytest.fixture(scope="session") +def pillar_tree(): + """ + Add pillar files + """ + if platform.is_windows(): + pillar_root = pathlib.Path("C:/salt/srv/pillar") + elif platform.is_darwin(): + pillar_root = pathlib.Path("/opt/srv/pillar") + else: + pillar_root = pathlib.Path("/srv/pillar") + pillar_root.mkdir(mode=0o777, parents=True, exist_ok=True) + tree = SaltPillarTree( + envs={ + "base": [ + str(pillar_root), + ] + }, + ) + top_file_contents = """ + base: + '*': + - test + """ + test_file_contents = """ + info: test + """ + with tree.base.temp_file("top.sls", top_file_contents), tree.base.temp_file( + "test.sls", test_file_contents + ): + yield tree + + +@pytest.fixture(scope="module") +def sls(state_tree): + """ + Add an sls file + """ + test_sls_contents = """ + test_foo: + test.succeed_with_changes: + - name: foo + """ + states_sls_contents = """ + update: + pkg.installed: + - name: bash + salt_dude: + user.present: + - name: dude + - fullname: Salt Dude + """ + win_states_sls_contents = """ + create_empty_file: + file.managed: + - name: C://salt/test/txt + salt_dude: + user.present: + - name: dude + - fullname: Salt Dude + """ + with state_tree.base.temp_file( + "tests.sls", test_sls_contents + ), state_tree.base.temp_file( + "states.sls", states_sls_contents + ), state_tree.base.temp_file( + "win_states.sls", win_states_sls_contents + ): + yield + + +@pytest.fixture(scope="session") +def salt_master(salt_factories, install_salt, state_tree, pillar_tree): + """ + Start up a master + """ + start_timeout = None + # Since the daemons are "packaged" with tiamat, the salt plugins provided + # by salt-factories won't be discovered. Provide the required `*_dirs` on + # the configuration so that they can still be used. + config_defaults = { + "engines_dirs": [ + str(salt_factories.get_salt_engines_path()), + ], + "log_handlers_dirs": [ + str(salt_factories.get_salt_log_handlers_path()), + ], + } + config_overrides = { + "timeout": 30, + "file_roots": state_tree.as_dict(), + "pillar_roots": pillar_tree.as_dict(), + "rest_cherrypy": {"port": 8000, "disable_ssl": True}, + "external_auth": {"auto": {"saltdev": [".*"]}}, + } + if (platform.is_windows() or platform.is_darwin()) and install_salt.singlebin: + start_timeout = 240 + # For every minion started we have to accept it's key. + # On windows, using single binary, it has to decompress it and run the command. Too slow. + # So, just in this scenario, use open mode + config_overrides["open_mode"] = True + factory = salt_factories.salt_master_daemon( + random_string("master-"), + defaults=config_defaults, + overrides=config_overrides, + factory_class=SaltMaster, + salt_pkg_install=install_salt, + ) + factory.after_terminate(pytest.helpers.remove_stale_master_key, factory) + with factory.started(start_timeout=start_timeout): + yield factory + + +@pytest.fixture(scope="session") +def salt_minion(salt_master, install_salt): + """ + Start up a minion + """ + start_timeout = None + if (platform.is_windows() or platform.is_darwin()) and install_salt.singlebin: + start_timeout = 240 + minion_id = random_string("minion-") + # Since the daemons are "packaged" with tiamat, the salt plugins provided + # by salt-factories won't be discovered. Provide the required `*_dirs` on + # the configuration so that they can still be used. + config_defaults = { + "engines_dirs": salt_master.config["engines_dirs"].copy(), + "log_handlers_dirs": salt_master.config["log_handlers_dirs"].copy(), + } + config_overrides = { + "id": minion_id, + "file_roots": salt_master.config["file_roots"].copy(), + "pillar_roots": salt_master.config["pillar_roots"].copy(), + } + factory = salt_master.salt_minion_daemon( + minion_id, + overrides=config_overrides, + defaults=config_defaults, + ) + factory.after_terminate( + pytest.helpers.remove_stale_minion_key, salt_master, factory.id + ) + with factory.started(start_timeout=start_timeout): + yield factory + + +@pytest.fixture(scope="module") +def salt_cli(salt_master): + return salt_master.salt_cli() + + +@pytest.fixture(scope="module") +def salt_key_cli(salt_master): + return salt_master.salt_key_cli() + + +@pytest.fixture(scope="module") +def salt_call_cli(salt_minion): + return salt_minion.salt_call_cli() + + +@pytest.fixture(scope="module") +def test_account(salt_call_cli): + with TestUser(salt_call_cli=salt_call_cli) as account: + yield account + + +@pytest.fixture(scope="module") +def salt_api(salt_master, install_salt): + """ + start up and configure salt_api + """ + start_timeout = None + if platform.is_windows() and install_salt.singlebin: + start_timeout = 240 + factory = salt_master.salt_api_daemon() + with factory.started(start_timeout=start_timeout): + yield factory + + +@pytest.fixture(scope="module") +def api_request(test_account, salt_api): + with ApiRequest(salt_api=salt_api, test_account=test_account) as session: + yield session diff --git a/pkg/tests/files/check_imports.sls b/pkg/tests/files/check_imports.sls new file mode 100644 index 000000000000..0dde9d6ad332 --- /dev/null +++ b/pkg/tests/files/check_imports.sls @@ -0,0 +1,53 @@ +#!py +import importlib + +def run(): + config = {} + for test_import in [ + 'templates', 'platform', 'cli', 'executors', 'config', 'wheel', 'netapi', + 'cache', 'proxy', 'transport', 'metaproxy', 'modules', 'tokens', 'matchers', + 'acl', 'auth', 'log', 'engines', 'client', 'returners', 'runners', 'tops', + 'output', 'daemons', 'thorium', 'renderers', 'states', 'cloud', 'roster', + 'beacons', 'pillar', 'spm', 'utils', 'sdb', 'fileserver', 'defaults', + 'ext', 'queues', 'grains', 'serializers' + ]: + try: + import_name = "salt.{}".format(test_import) + importlib.import_module(import_name) + config['test_imports_succeeded'] = { + 'test.succeed_without_changes': [ + { + 'name': import_name + }, + ], + } + except ModuleNotFoundError as err: + config['test_imports_failed'] = { + 'test.fail_without_changes': [ + { + 'name': import_name, + 'comment': "The imports test failed. The error was: {}".format(err) + }, + ], + } + + for stdlib_import in ["telnetlib"]: + try: + importlib.import_module(stdlib_import) + config['stdlib_imports_succeeded'] = { + 'test.succeed_without_changes': [ + { + 'name': stdlib_import + }, + ], + } + except ModuleNotFoundError as err: + config['stdlib_imports_failed'] = { + 'test.fail_without_changes': [ + { + 'name': stdlib_import, + 'comment': "The stdlib imports test failed. The error was: {}".format(err) + }, + ], + } + return config diff --git a/pkg/tests/files/check_python.py b/pkg/tests/files/check_python.py new file mode 100644 index 000000000000..f1d46b76df7b --- /dev/null +++ b/pkg/tests/files/check_python.py @@ -0,0 +1,13 @@ +import sys + +import salt.utils.data + +user_arg = sys.argv + +if user_arg[1] == "raise": + raise Exception("test") + +if salt.utils.data.is_true(user_arg[1]): + sys.exit(0) +else: + sys.exit(1) diff --git a/pkg/tests/files/debianbased.sls b/pkg/tests/files/debianbased.sls new file mode 100644 index 000000000000..2d1fb4cb35f5 --- /dev/null +++ b/pkg/tests/files/debianbased.sls @@ -0,0 +1,24 @@ +{% set services_enabled = ['salt-master', 'salt-minion', 'salt-syndic', 'salt-api'] %} +{% set services_disabled = [] %} + +{% for service in services_enabled %} +check_services_enabled_{{ service }}: + service.enabled: + - name: {{ service }} +run_if_changes_{{ service }}: + cmd.run: + - name: failtest service is enabled + - onchanges: + - service: check_services_enabled_{{ service }} +{% endfor %} + +{% for service in services_disabled %} +check_services_disabled_{{ service }}: + service.disabled: + - name: {{ service }} +run_if_changes_{{ service }}: + cmd.run: + - name: failtest service is disabled + - onchanges: + - service: check_services_disabled_{{ service }} +{% endfor %} diff --git a/pkg/tests/files/redhatbased.sls b/pkg/tests/files/redhatbased.sls new file mode 100644 index 000000000000..1ea16e95c9f6 --- /dev/null +++ b/pkg/tests/files/redhatbased.sls @@ -0,0 +1,24 @@ +{% set services_enabled = [] %} +{% set services_disabled = ['salt-master', 'salt-minion', 'salt-syndic', 'salt-api'] %} + +{% for service in services_enabled %} +check_services_enabled_{{ service }}: + service.enabled: + - name: {{ service }} +run_if_changes_{{ service }}: + cmd.run: + - name: failtest service is enabled + - onchanges: + - service: check_services_enabled_{{ service }} +{% endfor %} + +{% for service in services_disabled %} +check_services_disabled_{{ service }}: + service.disabled: + - name: {{ service }} +run_if_changes_{{ service }}: + cmd.run: + - name: failtest service is disabled + - onchanges: + - service: check_services_disabled_{{ service }} +{% endfor %} diff --git a/pkg/tests/integration/__init__.py b/pkg/tests/integration/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/tests/integration/test_check_imports.py b/pkg/tests/integration/test_check_imports.py new file mode 100644 index 000000000000..742b08c39179 --- /dev/null +++ b/pkg/tests/integration/test_check_imports.py @@ -0,0 +1,17 @@ +import logging + +from saltfactories.utils.functional import MultiStateResult + +log = logging.getLogger(__name__) + + +def test_check_imports(salt_cli, salt_minion): + """ + Test imports + """ + ret = salt_cli.run("state.sls", "check_imports", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert ret.data + result = MultiStateResult(raw=ret.data) + for state_ret in result: + assert state_ret.result is True diff --git a/pkg/tests/integration/test_enabled_disabled.py b/pkg/tests/integration/test_enabled_disabled.py new file mode 100644 index 000000000000..887da53e1c05 --- /dev/null +++ b/pkg/tests/integration/test_enabled_disabled.py @@ -0,0 +1,43 @@ +import pytest +from saltfactories.utils.functional import MultiStateResult + + +@pytest.mark.skip_on_windows(reason="Linux test only") +def test_services(install_salt, salt_cli, salt_minion): + """ + Check if Services are enabled/disabled + """ + if install_salt.compressed: + pytest.skip("Skip test on single binary and onedir package") + + ret = salt_cli.run("grains.get", "os_family", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert ret.data + + state_name = desired_state = None + os_family = ret.data + + if os_family == "Debian": + state_name = "debianbased" + desired_state = "enabled" + elif os_family == "RedHat": + state_name = "redhatbased" + desired_state = "disabled" + else: + pytest.fail(f"Don't know how to handle os_family={os_family}") + + ret = salt_cli.run("state.apply", state_name, minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert ret.data + + expected_in_comment = f"is already {desired_state}, and is in the desired state" + + result = MultiStateResult(raw=ret.data) + for state_ret in result: + assert state_ret.result is True + if "__id__" not in state_ret.full_return: + # This is a state requirement + # For example: + # State was not run because none of the onchanges reqs changed + continue + assert expected_in_comment in state_ret.comment diff --git a/pkg/tests/integration/test_hash.py b/pkg/tests/integration/test_hash.py new file mode 100644 index 000000000000..026246e68088 --- /dev/null +++ b/pkg/tests/integration/test_hash.py @@ -0,0 +1,42 @@ +import hashlib +import logging +import sys + +import pytest + +log = logging.getLogger(__name__) + + +@pytest.mark.usefixtures("version") +def test_hashes(install_salt, salt_cli, salt_minion): + """ + Test the hashes generated for both single binary + and the onedir packages. + """ + if not install_salt.compressed: + pytest.skip("This test requires the single binary or onedir package") + + hashes = install_salt.salt_hashes + pkg = install_salt.pkgs[0] + + with open(pkg, "rb") as fh: + file_bytes = fh.read() + + delimiter = "/" + if sys.platform.startswith("win"): + delimiter = "\\" + + for _hash in hashes.keys(): + hash_file = hashes[_hash]["file"] + found_hash = False + with open(hash_file) as fp: + for line in fp: + if pkg.rsplit(delimiter, 1)[-1] in line: + found_hash = True + assert ( + getattr(hashlib, _hash.lower())(file_bytes).hexdigest() + == line.split()[0] + ) + + if not found_hash: + assert False, f"A {_hash} hash was not found in {hash_file} for pkg {pkg}" diff --git a/pkg/tests/integration/test_help.py b/pkg/tests/integration/test_help.py new file mode 100644 index 000000000000..7379f2e915d2 --- /dev/null +++ b/pkg/tests/integration/test_help.py @@ -0,0 +1,13 @@ +def test_help(install_salt): + """ + Test --help works for all salt cmds + """ + for cmd in install_salt.binary_paths.values(): + if "salt-cloud" in cmd: + assert True + elif "salt-ssh" in cmd: + assert True + else: + ret = install_salt.proc.run(*cmd, "--help") + assert "Usage" in ret.stdout + assert ret.returncode == 0 diff --git a/pkg/tests/integration/test_pip.py b/pkg/tests/integration/test_pip.py new file mode 100644 index 000000000000..e118fcf4a827 --- /dev/null +++ b/pkg/tests/integration/test_pip.py @@ -0,0 +1,125 @@ +import os +import pathlib +import shutil +import subprocess + +import pytest +from pytestskipmarkers.utils import platform + + +@pytest.fixture +def pypath(): + if platform.is_windows(): + return pathlib.Path(os.getenv("LocalAppData"), "salt", "pypath") + return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "pypath") + + +@pytest.fixture(autouse=True) +def wipe_pypath(pypath): + try: + yield + finally: + # Let's make sure pypath is clean after each test, since it's contents + # are not actually part of the test suite, and they break other test + # suite assumptions + for path in pypath.glob("*"): + if path.is_dir(): + shutil.rmtree(path, ignore_errors=True) + else: + path.unlink() + + +def test_pip_install(salt_call_cli): + """ + Test pip.install and ensure + module can use installed library + """ + dep = "PyGithub" + repo = "https://github.com/saltstack/salt.git" + + try: + install = salt_call_cli.run("--local", "pip.install", dep) + assert install.returncode == 0 + + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr + finally: + ret = salt_call_cli.run("--local", "pip.uninstall", dep) + assert ret.returncode == 0 + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "The github execution module cannot be loaded" in use_lib.stderr + + +def demote(user_uid, user_gid): + def result(): + os.setgid(user_gid) + os.setuid(user_uid) + + return result + + +@pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows") +def test_pip_non_root(install_salt, test_account, pypath): + # Let's make sure pypath does not exist + shutil.rmtree(pypath) + + assert not pypath.exists() + # We should be able to issue a --help without being root + ret = subprocess.run( + install_salt.binary_paths["salt"] + ["--help"], + preexec_fn=demote(test_account.uid, test_account.gid), + env=test_account.env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + assert "Usage" in ret.stdout + assert not pypath.exists() + + # Try to pip install something, should fail + ret = subprocess.run( + install_salt.binary_paths["pip"] + ["install", "pep8"], + preexec_fn=demote(test_account.uid, test_account.gid), + env=test_account.env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 1, ret.stderr + assert f"The path '{pypath}' does not exist or could not be created." in ret.stderr + assert not pypath.exists() + + # Let tiamat-pip create the pypath directory for us + ret = subprocess.run( + install_salt.binary_paths["pip"] + ["install", "-h"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + + # Now, we should still not be able to install as non-root + ret = subprocess.run( + install_salt.binary_paths["pip"] + ["install", "pep8"], + preexec_fn=demote(test_account.uid, test_account.gid), + env=test_account.env, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode != 0, ret.stderr + + # But we should be able to install as root + ret = subprocess.run( + install_salt.binary_paths["pip"] + ["install", "pep8"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr diff --git a/pkg/tests/integration/test_pip_upgrade.py b/pkg/tests/integration/test_pip_upgrade.py new file mode 100644 index 000000000000..20f6cd08218a --- /dev/null +++ b/pkg/tests/integration/test_pip_upgrade.py @@ -0,0 +1,92 @@ +import logging +import subprocess + +import pytest + +log = logging.getLogger(__name__) + + +def test_pip_install(install_salt, salt_call_cli): + """ + Test pip.install and ensure that a package included in the tiamat build can be upgraded + """ + ret = subprocess.run( + install_salt.binary_paths["salt"] + ["--versions-report"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + check=True, + shell=False, + ) + assert ret.returncode == 0 + + possible_upgrades = [ + "docker-py", + "msgpack", + "pycparser", + "python-gnupg", + "pyyaml", + "pyzmq", + "jinja2", + ] + found_new = False + for dep in possible_upgrades: + get_latest = salt_call_cli.run("--local", "pip.list_all_versions", dep) + if not get_latest.data: + # No information available + continue + dep_version = get_latest.data[-1] + installed_version = None + for line in ret.stdout.splitlines(): + if dep in line.lower(): + installed_version = line.lower().strip().split(":")[-1].strip() + break + else: + pytest.fail(f"Failed to find {dep} in the versions report output") + + if dep_version == installed_version: + log.warning(f"The {dep} dependency is already latest") + else: + found_new = True + break + + if found_new: + try: + install = salt_call_cli.run( + "--local", "pip.install", f"{dep}=={dep_version}" + ) + assert install + log.warning(install) + # The assert is commented out because pip will actually trigger a failure since + # we're breaking the dependency tree, but, for the purpose of this test, we can + # ignore it. + # + # assert install.returncode == 0 + + ret = subprocess.run( + install_salt.binary_paths["salt"] + ["--versions-report"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + check=True, + shell=False, + ) + assert ret.returncode == 0 + for line in ret.stdout.splitlines(): + if dep in line.lower(): + new_version = line.lower().strip().split(":")[-1].strip() + if new_version == installed_version: + pytest.fail( + f"The newly installed version of {dep} does not show in the versions report" + ) + assert new_version == dep_version + break + else: + pytest.fail(f"Failed to find {dep} in the versions report output") + finally: + log.info(f"Uninstalling {dep_version}") + assert salt_call_cli.run( + "--local", "pip.uninstall", f"{dep}=={dep_version}" + ) + else: + pytest.skip("Did not find an upgrade version for any of the dependencies") diff --git a/pkg/tests/integration/test_pkg.py b/pkg/tests/integration/test_pkg.py new file mode 100644 index 000000000000..2913ba6fc784 --- /dev/null +++ b/pkg/tests/integration/test_pkg.py @@ -0,0 +1,32 @@ +import sys + +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_linux, +] + + +@pytest.fixture(scope="module") +def grains(salt_call_cli): + ret = salt_call_cli.run("--local", "grains.items") + assert ret.data, ret + return ret.data + + +@pytest.fixture(scope="module") +def pkgname(grains): + if sys.platform.startswith("win"): + return "putty" + elif grains["os_family"] == "RedHat": + if grains["os"] == "VMware Photon OS": + return "snoopy" + return "units" + elif grains["os_family"] == "Debian": + return "ifenslave" + return "figlet" + + +def test_pkg_install(salt_call_cli, pkgname): + ret = salt_call_cli.run("--local", "state.single", "pkg.installed", pkgname) + assert ret.returncode == 0 diff --git a/pkg/tests/integration/test_python.py b/pkg/tests/integration/test_python.py new file mode 100644 index 000000000000..878905e54840 --- /dev/null +++ b/pkg/tests/integration/test_python.py @@ -0,0 +1,31 @@ +import subprocess + +import pytest + +from tests.support.helpers import TESTS_DIR + + +@pytest.mark.parametrize("exp_ret,user_arg", [(1, "false"), (0, "true")]) +def test_python_script(install_salt, exp_ret, user_arg): + ret = subprocess.run( + install_salt.binary_paths["salt"] + + ["python", str(TESTS_DIR / "files" / "check_python.py"), user_arg], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + + assert ret.returncode == exp_ret, ret.stderr + + +def test_python_script_exception(install_salt): + ret = subprocess.run( + install_salt.binary_paths["salt"] + + ["python", str(TESTS_DIR / "files" / "check_python.py"), "raise"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert "Exception: test" in ret.stderr diff --git a/pkg/tests/integration/test_salt_api.py b/pkg/tests/integration/test_salt_api.py new file mode 100644 index 000000000000..0c9485038c14 --- /dev/null +++ b/pkg/tests/integration/test_salt_api.py @@ -0,0 +1,14 @@ +def test_salt_api(api_request): + """ + Test running a command against the salt api + """ + ret = api_request.post( + "/run", + data={ + "client": "local", + "tgt": "*", + "fun": "test.arg", + "arg": ["foo", "bar"], + }, + ) + assert ret["args"] == ["foo", "bar"] diff --git a/pkg/tests/integration/test_salt_call.py b/pkg/tests/integration/test_salt_call.py new file mode 100644 index 000000000000..13af02bb394b --- /dev/null +++ b/pkg/tests/integration/test_salt_call.py @@ -0,0 +1,59 @@ +import pytest + + +def test_salt_call_local(salt_call_cli): + """ + Test salt-call --local test.ping + """ + ret = salt_call_cli.run("--local", "test.ping") + assert ret.data is True + assert ret.returncode == 0 + + +def test_salt_call(salt_call_cli): + """ + Test salt-call test.ping + """ + ret = salt_call_cli.run("test.ping") + assert ret.data is True + assert ret.returncode == 0 + + +def test_sls(salt_call_cli): + """ + Test calling a sls file + """ + ret = salt_call_cli.run("state.apply", "test") + assert ret.data, ret + sls_ret = ret.data[next(iter(ret.data))] + assert sls_ret["changes"]["testing"]["new"] == "Something pretended to change" + assert ret.returncode == 0 + + +def test_salt_call_local_sys_doc_none(salt_call_cli): + """ + Test salt-call --local sys.doc none + """ + ret = salt_call_cli.run("--local", "sys.doc", "none") + assert not ret.data + assert ret.returncode == 0 + + +def test_salt_call_local_sys_doc_aliasses(salt_call_cli): + """ + Test salt-call --local sys.doc aliasses + """ + ret = salt_call_cli.run("--local", "sys.doc", "aliases.list_aliases") + assert "aliases.list_aliases" in ret.data + assert ret.returncode == 0 + + +@pytest.mark.skip_on_windows() +def test_salt_call_cmd_run_id_runas(salt_call_cli, test_account, caplog): + """ + Test salt-call --local cmd_run id with runas + """ + ret = salt_call_cli.run("--local", "cmd.run", "id", runas=test_account.username) + assert "Environment could not be retrieved for user" not in caplog.text + assert str(test_account.uid) in ret.stdout + assert str(test_account.gid) in ret.stdout diff --git a/pkg/tests/integration/test_salt_exec.py b/pkg/tests/integration/test_salt_exec.py new file mode 100644 index 000000000000..9b7d7fc7f682 --- /dev/null +++ b/pkg/tests/integration/test_salt_exec.py @@ -0,0 +1,25 @@ +from sys import platform + + +def test_salt_cmd_run(salt_cli, salt_minion): + """ + Test salt cmd.run 'ipconfig' or 'ls -lah /' + """ + ret = None + if platform.startswith("win"): + ret = salt_cli.run("cmd.run", "ipconfig", minion_tgt=salt_minion.id) + else: + ret = salt_cli.run("cmd.run", "ls -lah /", minion_tgt=salt_minion.id) + assert ret + assert ret.stdout + + +def test_salt_list_users(salt_cli, salt_minion): + """ + Test salt user.list_users + """ + ret = salt_cli.run("user.list_users", minion_tgt=salt_minion.id) + if platform.startswith("win"): + assert "Administrator" in ret.stdout + else: + assert "root" in ret.stdout diff --git a/pkg/tests/integration/test_salt_grains.py b/pkg/tests/integration/test_salt_grains.py new file mode 100644 index 000000000000..e42dbb1c1c8f --- /dev/null +++ b/pkg/tests/integration/test_salt_grains.py @@ -0,0 +1,34 @@ +def test_grains_items(salt_cli, salt_minion): + """ + Test grains.items + """ + ret = salt_cli.run("grains.items", minion_tgt=salt_minion.id) + assert ret.data, ret + assert "osrelease" in ret.data + + +def test_grains_item_os(salt_cli, salt_minion): + """ + Test grains.item os + """ + ret = salt_cli.run("grains.item", "os", minion_tgt=salt_minion.id) + assert ret.data, ret + assert "os" in ret.data + + +def test_grains_item_pythonversion(salt_cli, salt_minion): + """ + Test grains.item pythonversion + """ + ret = salt_cli.run("grains.item", "pythonversion", minion_tgt=salt_minion.id) + assert ret.data, ret + assert "pythonversion" in ret.data + + +def test_grains_setval_key_val(salt_cli, salt_minion): + """ + Test grains.setval key val + """ + ret = salt_cli.run("grains.setval", "key", "val", minion_tgt=salt_minion.id) + assert ret.data, ret + assert "key" in ret.data diff --git a/pkg/tests/integration/test_salt_key.py b/pkg/tests/integration/test_salt_key.py new file mode 100644 index 000000000000..5a2db4cddea1 --- /dev/null +++ b/pkg/tests/integration/test_salt_key.py @@ -0,0 +1,7 @@ +def test_salt_key(salt_key_cli, salt_minion): + """ + Test running salt-key -L + """ + ret = salt_key_cli.run("-L") + assert ret.data + assert salt_minion.id in ret.data["minions"] diff --git a/pkg/tests/integration/test_salt_minion.py b/pkg/tests/integration/test_salt_minion.py new file mode 100644 index 000000000000..1c9e743dad53 --- /dev/null +++ b/pkg/tests/integration/test_salt_minion.py @@ -0,0 +1,19 @@ +def test_salt_minion_ping(salt_cli, salt_minion): + """ + Test running a command against a targeted minion + """ + ret = salt_cli.run("test.ping", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert ret.data is True + + +def test_salt_minion_setproctitle(salt_cli, salt_minion): + """ + Test that setproctitle is working + for the running Salt minion + """ + ret = salt_cli.run( + "ps.pgrep", "MinionProcessManager", full=True, minion_tgt=salt_minion.id + ) + assert ret.returncode == 0 + assert ret.data != "" diff --git a/pkg/tests/integration/test_salt_output.py b/pkg/tests/integration/test_salt_output.py new file mode 100644 index 000000000000..953618b2dfb4 --- /dev/null +++ b/pkg/tests/integration/test_salt_output.py @@ -0,0 +1,15 @@ +import pytest + + +@pytest.mark.parametrize("output_fmt", ["yaml", "json"]) +def test_salt_output(salt_cli, salt_minion, output_fmt): + """ + Test --output + """ + ret = salt_cli.run( + f"--output={output_fmt}", "test.fib", "7", minion_tgt=salt_minion.id + ) + if output_fmt == "json": + assert 13 in ret.data + else: + ret.stdout.matcher.fnmatch_lines(["*- 13*"]) diff --git a/pkg/tests/integration/test_salt_pillar.py b/pkg/tests/integration/test_salt_pillar.py new file mode 100644 index 000000000000..43656fce4e50 --- /dev/null +++ b/pkg/tests/integration/test_salt_pillar.py @@ -0,0 +1,6 @@ +def test_salt_pillar(salt_cli, salt_minion): + """ + Test pillar.items + """ + ret = salt_cli.run("pillar.items", minion_tgt=salt_minion.id) + assert "info" in ret.data diff --git a/pkg/tests/integration/test_salt_state_file.py b/pkg/tests/integration/test_salt_state_file.py new file mode 100644 index 000000000000..585167a7e550 --- /dev/null +++ b/pkg/tests/integration/test_salt_state_file.py @@ -0,0 +1,16 @@ +import sys + + +def test_salt_state_file(salt_cli, salt_minion): + """ + Test state file + """ + if sys.platform.startswith("win"): + ret = salt_cli.run("state.apply", "win_states", minion_tgt=salt_minion.id) + else: + ret = salt_cli.run("state.apply", "states", minion_tgt=salt_minion.id) + + assert ret.data, ret + sls_ret = ret.data[next(iter(ret.data))] + assert "changes" in sls_ret + assert "name" in sls_ret diff --git a/pkg/tests/integration/test_systemd_config.py b/pkg/tests/integration/test_systemd_config.py new file mode 100644 index 000000000000..c8f1312526d6 --- /dev/null +++ b/pkg/tests/integration/test_systemd_config.py @@ -0,0 +1,43 @@ +import subprocess + +import pytest + + +@pytest.mark.skip_on_windows(reason="Linux test only") +def test_system_config(salt_cli, salt_minion): + """ + Test system config + """ + get_family = salt_cli.run("grains.get", "os_family", minion_tgt=salt_minion.id) + assert get_family.returncode == 0 + get_finger = salt_cli.run("grains.get", "osfinger", minion_tgt=salt_minion.id) + assert get_finger.returncode == 0 + + if get_family.data == "RedHat": + if get_finger.data in ( + "CentOS Stream-8", + "CentOS Linux-8", + "CentOS Stream-9", + "Fedora Linux-36", + ): + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == 0 + else: + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == 1 + + elif "Debian" in get_family.stdout: + if "Debian-9" in get_finger.stdout: + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == 1 + else: + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == 0 diff --git a/pkg/tests/integration/test_version.py b/pkg/tests/integration/test_version.py new file mode 100644 index 000000000000..5f9b6239eaec --- /dev/null +++ b/pkg/tests/integration/test_version.py @@ -0,0 +1,110 @@ +import sys + +import pytest +from pytestskipmarkers.utils import platform + + +def test_salt_version(version, install_salt): + """ + Test version outputed from salt --version + """ + ret = install_salt.proc.run(*install_salt.binary_paths["salt"], "--version") + assert ret.stdout.strip() == f"salt {version}" + + +def test_salt_versions_report_master(install_salt): + """ + Test running --versions-report on master + """ + ret = install_salt.proc.run( + *install_salt.binary_paths["master"], "--versions-report" + ) + ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"]) + if sys.platform == "win32": + ret.stdout.matcher.fnmatch_lines(["*Python: 3.8.16*"]) + else: + ret.stdout.matcher.fnmatch_lines(["*Python: 3.9.16*"]) + + +def test_salt_versions_report_minion(salt_cli, salt_minion): + """ + Test running test.versions_report on minion + """ + ret = salt_cli.run("test.versions_report", minion_tgt=salt_minion.id) + ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"]) + + +@pytest.mark.parametrize( + "binary", ["master", "cloud", "syndic", "minion", "call", "api"] +) +def test_compare_versions(version, binary, install_salt): + """ + Test compare versions + """ + if platform.is_windows() and install_salt.singlebin: + pytest.skip( + "Already tested in `test_salt_version`. No need to repeat " + "for windows single binary installs." + ) + if binary in ["master", "cloud", "syndic"]: + if sys.platform.startswith("win"): + pytest.skip(f"{binary} not installed on windows") + + ret = install_salt.proc.run(*install_salt.binary_paths[binary], "--version") + ret.stdout.matcher.fnmatch_lines([f"*{version}*"]) + + +@pytest.mark.skip_unless_on_darwin() +@pytest.mark.parametrize( + "symlink", + [ + # We can't create a salt symlink because there is a salt directory + # "salt", + "salt-api", + "salt-call", + "salt-cloud", + "salt-cp", + "salt-key", + "salt-master", + "salt-minion", + "salt-proxy", + "salt-run", + "salt-spm", + "salt-ssh", + "salt-syndic", + ], +) +def test_symlinks_created(version, symlink, install_salt): + """ + Test symlinks created + """ + if not install_salt.installer_pkg: + pytest.skip( + "This test is for the installer package only (pkg). It does not " + "apply to the tarball" + ) + ret = install_salt.proc.run(install_salt.bin_dir / symlink, "--version") + ret.stdout.matcher.fnmatch_lines([f"*{version}*"]) + + +def test_compare_pkg_versions_redhat_rc(version, install_salt): + """ + Test compare pkg versions for redhat RC packages. + A tilde should be included in RC Packages and it + should test to be a lower version than a non RC package + of the same version. For example, v3004~rc1 should be + less than v3004. + """ + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): + pytest.skip("Only tests rpm packages") + + pkg = [x for x in install_salt.pkgs if "rpm" in x] + if not pkg: + pytest.skip("Not testing rpm packages") + pkg = pkg[0].split("/")[-1] + if "rc" not in pkg: + pytest.skip("Not testing an RC package") + assert "~" in pkg + comp_pkg = pkg.split("~")[0] + ret = install_salt.proc.run("rpmdev-vercmp", pkg, comp_pkg) + ret.stdout.matcher.fnmatch_lines([f"{pkg} < {comp_pkg}"]) diff --git a/pkg/tests/support/__init__.py b/pkg/tests/support/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/pkg/tests/support/coverage/sitecustomize.py b/pkg/tests/support/coverage/sitecustomize.py new file mode 100644 index 000000000000..bee2ff80f2f5 --- /dev/null +++ b/pkg/tests/support/coverage/sitecustomize.py @@ -0,0 +1,11 @@ +""" +Python will always try to import sitecustomize. +We use that fact to try and support code coverage for sub-processes +""" + +try: + import coverage + + coverage.process_startup() +except ImportError: + pass diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py new file mode 100644 index 000000000000..cd346d44ce2d --- /dev/null +++ b/pkg/tests/support/helpers.py @@ -0,0 +1,1404 @@ +import atexit +import contextlib +import logging +import os +import pathlib +import pprint +import re +import shutil +import tarfile +import textwrap +import time +from typing import TYPE_CHECKING, Any, Dict, List +from zipfile import ZipFile + +import attr +import distro +import psutil +import pytest +import requests +from pytestshellutils.shell import DaemonImpl, Subprocess +from pytestshellutils.utils.processes import ( + ProcessResult, + _get_cmdline, + terminate_process, +) +from pytestskipmarkers.utils import platform +from saltfactories.bases import SystemdSaltDaemonImpl +from saltfactories.cli import call, key, salt +from saltfactories.daemons import api, master, minion + +try: + import crypt + + HAS_CRYPT = True +except ImportError: + HAS_CRYPT = False +try: + import pwd + + HAS_PWD = True +except ImportError: + HAS_PWD = False + + +TESTS_DIR = pathlib.Path(__file__).resolve().parent.parent +CODE_DIR = TESTS_DIR.parent +ARTIFACTS_DIR = CODE_DIR / "artifacts" + +log = logging.getLogger(__name__) + + +@attr.s(kw_only=True, slots=True) +class SaltPkgInstall: + conf_dir: pathlib.Path = attr.ib() + system_service: bool = attr.ib(default=False) + proc: Subprocess = attr.ib(init=False) + pkgs: List[str] = attr.ib(factory=list) + onedir: bool = attr.ib(default=False) + singlebin: bool = attr.ib(default=False) + compressed: bool = attr.ib(default=False) + hashes: Dict[str, Dict[str, Any]] = attr.ib() + root: pathlib.Path = attr.ib(default=None) + run_root: pathlib.Path = attr.ib(default=None) + ssm_bin: pathlib.Path = attr.ib(default=None) + bin_dir: pathlib.Path = attr.ib(default=None) + # The artifact is an installer (exe, pkg, rpm, deb) + installer_pkg: bool = attr.ib(default=False) + upgrade: bool = attr.ib(default=False) + # install salt or not. This allows someone + # to test a currently installed version of salt + no_install: bool = attr.ib(default=False) + no_uninstall: bool = attr.ib(default=False) + + distro_id: str = attr.ib(init=False) + pkg_mngr: str = attr.ib(init=False) + rm_pkg: str = attr.ib(init=False) + salt_pkgs: List[str] = attr.ib(init=False) + binary_paths: List[pathlib.Path] = attr.ib(init=False) + + @proc.default + def _default_proc(self): + return Subprocess() + + @hashes.default + def _default_hashes(self): + return { + "BLAKE2B": {"file": None, "tool": "-blake2b512"}, + "SHA3_512": {"file": None, "tool": "-sha3-512"}, + "SHA512": {"file": None, "tool": "-sha512"}, + } + + @distro_id.default + def _default_distro_id(self): + return distro.id().lower() + + @pkg_mngr.default + def _default_pkg_mngr(self): + if self.distro_id in ("centos", "redhat", "amzn", "fedora"): + return "yum" + elif self.distro_id in ("ubuntu", "debian"): + ret = self.proc.run("apt-get", "update") + self._check_retcode(ret) + return "apt-get" + + @rm_pkg.default + def _default_rm_pkg(self): + if self.distro_id in ("centos", "redhat", "amzn", "fedora"): + return "remove" + elif self.distro_id in ("ubuntu", "debian"): + return "purge" + + @salt_pkgs.default + def _default_salt_pkgs(self): + salt_pkgs = [ + "salt-api", + "salt-syndic", + "salt-ssh", + "salt-master", + "salt-cloud", + "salt-minion", + ] + if self.distro_id in ("centos", "redhat", "amzn", "fedora"): + salt_pkgs.append("salt") + elif self.distro_id in ("ubuntu", "debian"): + salt_pkgs.append("salt-common") + return salt_pkgs + + def __attrs_post_init__(self): + file_ext_re = r"tar\.gz" + if platform.is_darwin(): + file_ext_re = r"tar\.gz|pkg" + if platform.is_windows(): + file_ext_re = "zip|exe" + for f_path in ARTIFACTS_DIR.glob("**/*.*"): + f_path = str(f_path) + if re.search(f"salt-(.*).({file_ext_re})$", f_path): + # Compressed can be zip, tar.gz, exe, or pkg. All others are + # deb and rpm + self.compressed = True + file_ext = os.path.splitext(f_path)[1].strip(".") + if file_ext == "gz": + if f_path.endswith("tar.gz"): + file_ext = "tar.gz" + self.pkgs.append(f_path) + if platform.is_windows(): + self.root = pathlib.Path(os.getenv("LocalAppData")).resolve() + if file_ext == "zip": + with ZipFile(f_path, "r") as zip: + first = zip.infolist()[0] + if first.filename == "salt/ssm.exe": + self.onedir = True + self.bin_dir = self.root / "salt" / "salt" + self.run_root = self.bin_dir / "salt.exe" + self.ssm_bin = self.root / "salt" / "ssm.exe" + elif first.filename == "salt.exe": + self.singlebin = True + self.run_root = self.root / "salt.exe" + self.ssm_bin = self.root / "ssm.exe" + else: + log.error( + "Unexpected archive layout. First: %s", + first.filename, + ) + elif file_ext == "exe": + self.onedir = True + self.installer_pkg = True + install_dir = pathlib.Path( + os.getenv("ProgramFiles"), "Salt Project", "Salt" + ).resolve() + self.bin_dir = install_dir / "bin" + self.run_root = self.bin_dir / "salt.exe" + self.ssm_bin = self.bin_dir / "ssm.exe" + else: + log.error("Unexpected file extension: %s", file_ext) + else: + if platform.is_darwin(): + self.root = pathlib.Path(os.sep, "opt") + else: + self.root = pathlib.Path(os.sep, "usr", "local", "bin") + + if file_ext == "pkg": + self.onedir = True + self.installer_pkg = True + self.bin_dir = self.root / "salt" / "bin" + self.run_root = self.bin_dir / "run" + elif file_ext == "tar.gz": + with tarfile.open(f_path) as tar: + # The first item will be called salt + first = next(iter(tar.getmembers())) + if first.name == "salt" and first.isdir(): + self.onedir = True + self.bin_dir = self.root / "salt" / "run" + self.run_root = self.bin_dir / "run" + elif first.name == "salt" and first.isfile(): + self.singlebin = True + self.run_root = self.root / "salt" + else: + log.error( + "Unexpected archive layout. First: %s (isdir: %s, isfile: %s)", + first.name, + first.isdir(), + first.isfile(), + ) + else: + log.error("Unexpected file extension: %s", file_ext) + + if re.search(r"salt(.*)(x86_64|all|amd64|aarch64)\.(rpm|deb)$", f_path): + self.installer_pkg = True + self.pkgs.append(f_path) + + if not self.pkgs: + pytest.fail("Could not find Salt Artifacts") + + if not self.compressed: + self.binary_paths = { + "salt": ["salt"], + "api": ["salt-api"], + "call": ["salt-call"], + "cloud": ["salt-cloud"], + "cp": ["salt-cp"], + "key": ["salt-key"], + "master": ["salt-master"], + "minion": ["salt-minion"], + "proxy": ["salt-proxy"], + "run": ["salt-run"], + "ssh": ["salt-ssh"], + "syndic": ["salt-syndic"], + "spm": ["spm"], + "pip": ["salt-pip"], + } + else: + self.binary_paths = { + "salt": [str(self.run_root)], + "api": [str(self.run_root), "api"], + "call": [str(self.run_root), "call"], + "cloud": [str(self.run_root), "cloud"], + "cp": [str(self.run_root), "cp"], + "key": [str(self.run_root), "key"], + "master": [str(self.run_root), "master"], + "minion": [str(self.run_root), "minion"], + "proxy": [str(self.run_root), "proxy"], + "run": [str(self.run_root), "run"], + "ssh": [str(self.run_root), "ssh"], + "syndic": [str(self.run_root), "syndic"], + "spm": [str(self.run_root), "spm"], + "pip": [str(self.run_root), "pip"], + } + + @staticmethod + def salt_factories_root_dir(system_service: bool = False) -> pathlib.Path: + if system_service is False: + return None + if platform.is_windows(): + return pathlib.Path("C:/salt") + if platform.is_darwin(): + return pathlib.Path("/opt/salt") + return pathlib.Path("/") + + def _check_retcode(self, ret): + """ + helper function ot check subprocess.run + returncode equals 0, if not raise assertionerror + """ + if ret.returncode != 0: + log.error(ret) + assert ret.returncode == 0 + return True + + @property + def salt_hashes(self): + for _hash in self.hashes.keys(): + for fpath in ARTIFACTS_DIR.glob(f"**/*{_hash}*"): + fpath = str(fpath) + if re.search(f"{_hash}", fpath): + self.hashes[_hash]["file"] = fpath + + return self.hashes + + def _install_ssm_service(self): + # Register the services + # run_root and ssm_bin are configured in helper.py to point to the + # correct binary location + log.debug("Installing master service") + ret = self.proc.run( + str(self.ssm_bin), + "install", + "salt-master", + str(self.run_root), + "master", + "-c", + str(self.conf_dir), + ) + self._check_retcode(ret) + log.debug("Installing minion service") + ret = self.proc.run( + str(self.ssm_bin), + "install", + "salt-minion", + str(self.run_root), + "minion", + "-c", + str(self.conf_dir), + ) + self._check_retcode(ret) + log.debug("Installing api service") + ret = self.proc.run( + str(self.ssm_bin), + "install", + "salt-api", + str(self.run_root), + "api", + "-c", + str(self.conf_dir), + ) + self._check_retcode(ret) + + def _install_compressed(self, upgrade=False): + pkg = self.pkgs[0] + log.info("Installing %s", pkg) + if platform.is_windows(): + if pkg.endswith("zip"): + # Extract the files + log.debug("Extracting zip file") + with ZipFile(pkg, "r") as zip: + zip.extractall(path=self.root) + elif pkg.endswith("exe"): + # Install the package + log.debug("Installing: %s", str(pkg)) + if upgrade: + ret = self.proc.run(str(pkg), "/S") + else: + ret = self.proc.run(str(pkg), "/start-minion=0", "/S") + self._check_retcode(ret) + # Remove the service installed by the installer + log.debug("Removing installed salt-minion service") + self.proc.run( + str(self.ssm_bin), + "remove", + "salt-minion", + "confirm", + ) + else: + log.error("Unknown package type: %s", pkg) + if self.system_service: + self._install_ssm_service() + elif platform.is_darwin(): + if pkg.endswith("pkg"): + daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + service_name = "com.saltstack.salt.minion" + plist_file = daemons_dir / f"{service_name}.plist" + log.debug("Installing: %s", str(pkg)) + ret = self.proc.run("installer", "-pkg", str(pkg), "-target", "/") + self._check_retcode(ret) + # Stop the service installed by the installer + self.proc.run( + "launchctl", + "disable", + f"system/{service_name}", + ) + self.proc.run("launchctl", "bootout", "system", str(plist_file)) + else: + log.debug("Extracting tarball into %s", self.root) + with tarfile.open(pkg) as tar: # , "r:gz") + tar.extractall(path=str(self.root)) + else: + log.debug("Extracting tarball into %s", self.root) + with tarfile.open(pkg) as tar: # , "r:gz") + tar.extractall(path=str(self.root)) + + def _install_pkgs(self, upgrade=False): + if upgrade: + log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) + if self.distro_id in ("ubuntu", "debian"): + # --allow-downgrades and yum's downgrade is a workaround since + # dpkg/yum is seeing 3005 version as a greater version than our nightly builds. + # Also this helps work around the situation when the Salt + # branch has not been updated with code so the versions might + # be the same and you can still install and test the new + # package. + ret = self.proc.run( + self.pkg_mngr, "upgrade", "-y", "--allow-downgrades", *self.pkgs + ) + else: + ret = self.proc.run(self.pkg_mngr, "upgrade", "-y", *self.pkgs) + if ( + ret.returncode != 0 + or "does not update installed package" in ret.stdout + or "cannot update it" in ret.stderr + ): + log.info( + "The new packages version is not returning as new. Attempting to downgrade" + ) + ret = self.proc.run(self.pkg_mngr, "downgrade", "-y", *self.pkgs) + if ret.returncode != 0: + log.error("Could not install the packages") + return False + else: + log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) + ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs) + log.info(ret) + self._check_retcode(ret) + + def install(self, upgrade=False): + if self.compressed: + self._install_compressed(upgrade=upgrade) + else: + self._install_pkgs(upgrade=upgrade) + + def install_previous(self): + """ + Install previous version. This is used for + upgrade tests. + """ + major_ver = "3005" + min_ver = f"{major_ver}" + os_name, version, code_name = distro.linux_distribution() + if os_name: + os_name = os_name.split()[0].lower() + if os_name == "centos" or os_name == "fedora": + os_name = "redhat" + # TODO: When tiamat is considered production we need to update these + # TODO: paths to the tiamat paths instead of the old package paths. + if os_name.lower() in ["redhat", "centos", "amazon", "fedora"]: + for fp in pathlib.Path("/etc", "yum.repos.d").glob("epel*"): + fp.unlink() + ret = self.proc.run( + "rpm", + "--import", + f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/x86_64/{major_ver}/SALTSTACK-GPG-KEY.pub", + ) + self._check_retcode(ret) + ret = self.proc.run( + "curl", + "-fsSL", + f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/x86_64/{major_ver}.repo", + "-o", + f"/etc/yum.repos.d/salt-{os_name}.repo", + ) + self._check_retcode(ret) + ret = self.proc.run(self.pkg_mngr, "clean", "expire-cache") + self._check_retcode(ret) + ret = self.proc.run( + self.pkg_mngr, + "install", + *self.salt_pkgs, + "-y", + ) + self._check_retcode(ret) + + elif os_name.lower() in ["debian", "ubuntu"]: + ret = self.proc.run(self.pkg_mngr, "install", "curl", "-y") + self._check_retcode(ret) + ret = self.proc.run(self.pkg_mngr, "install", "apt-transport-https", "-y") + self._check_retcode(ret) + ret = self.proc.run( + "curl", + "-fsSL", + "-o", + "/usr/share/keyrings/salt-archive-keyring.gpg", + f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/amd64/{major_ver}/salt-archive-keyring.gpg", + ) + self._check_retcode(ret) + with open( + pathlib.Path("/etc", "apt", "sources.list.d", "salt.list"), "w" + ) as fp: + fp.write( + "deb [signed-by=/usr/share/keyrings/salt-archive-keyring.gpg arch=amd64] " + f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/amd64/{major_ver} {code_name} main" + ) + ret = self.proc.run(self.pkg_mngr, "update") + self._check_retcode(ret) + ret = self.proc.run( + self.pkg_mngr, + "install", + *self.salt_pkgs, + "-y", + ) + self._check_retcode(ret) + + elif platform.is_windows(): + win_pkg = f"salt-{min_ver}-1-windows-amd64.exe" + win_pkg_url = ( + f"https://repo.saltproject.io/salt/py3/windows/{major_ver}/{win_pkg}" + ) + pkg_path = pathlib.Path(r"C:\TEMP", win_pkg) + pkg_path.parent.mkdir(exist_ok=True) + ret = requests.get(win_pkg_url) + with open(pkg_path, "wb") as fp: + fp.write(ret.content) + ret = self.proc.run(pkg_path, "/start-minion=0", "/S") + self._check_retcode(ret) + log.debug("Removing installed salt-minion service") + self.proc.run( + "cmd", "/c", str(self.ssm_bin), "remove", "salt-minion", "confirm" + ) + + if self.system_service: + self._install_system_service() + + self.onedir = True + self.installer_pkg = True + install_dir = pathlib.Path( + os.getenv("ProgramFiles"), "Salt Project", "Salt" + ).resolve() + self.bin_dir = install_dir / "bin" + self.run_root = self.bin_dir / "salt.exe" + self.ssm_bin = self.bin_dir / "ssm.exe" + + def _uninstall_compressed(self): + if platform.is_windows(): + if self.system_service: + # Uninstall the services + log.debug("Uninstalling master service") + self.proc.run( + str(self.ssm_bin), + "stop", + "salt-master", + ) + self.proc.run( + str(self.ssm_bin), + "remove", + "salt-master", + "confirm", + ) + log.debug("Uninstalling minion service") + self.proc.run( + str(self.ssm_bin), + "stop", + "salt-minion", + ) + self.proc.run( + str(self.ssm_bin), + "remove", + "salt-minion", + "confirm", + ) + log.debug("Uninstalling api service") + self.proc.run( + str(self.ssm_bin), + "stop", + "salt-api", + ) + self.proc.run( + str(self.ssm_bin), + "remove", + "salt-api", + "confirm", + ) + log.debug("Removing the Salt Service Manager") + if self.ssm_bin: + try: + self.ssm_bin.unlink() + except PermissionError: + atexit.register(self.ssm_bin.unlink) + if platform.is_darwin(): + # From here: https://stackoverflow.com/a/46118276/4581998 + daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + for service in ("minion", "master", "api", "syndic"): + service_name = f"com.saltstack.salt.{service}" + plist_file = daemons_dir / f"{service_name}.plist" + # Stop the services + self.proc.run("launchctl", "disable", f"system/{service_name}") + self.proc.run("launchctl", "bootout", "system", str(plist_file)) + + # Remove Symlink to salt-config + if os.path.exists("/usr/local/sbin/salt-config"): + os.unlink("/usr/local/sbin/salt-config") + + # Remove supporting files + self.proc.run( + "pkgutil", + "--only-files", + "--files", + "com.saltstack.salt", + "|", + "grep", + "-v", + "opt", + "|", + "tr", + "'\n'", + "' '", + "|", + "xargs", + "-0", + "rm", + "-f", + ) + + # Remove directories + if os.path.exists("/etc/salt"): + shutil.rmtree("/etc/salt") + + # Remove path + if os.path.exists("/etc/paths.d/salt"): + os.remove("/etc/paths.d/salt") + + # Remove receipt + self.proc.run("pkgutil", "--forget", "com.saltstack.salt") + + if self.singlebin: + log.debug("Deleting the salt binary: %s", self.run_root) + if self.run_root: + try: + self.run_root.unlink() + except PermissionError: + atexit.register(self.run_root.unlink) + else: + log.debug("Deleting the onedir directory: %s", self.root / "salt") + shutil.rmtree(str(self.root / "salt")) + + def _uninstall_pkgs(self): + log.debug("Un-Installing packages:\n%s", pprint.pformat(self.salt_pkgs)) + ret = self.proc.run(self.pkg_mngr, self.rm_pkg, "-y", *self.salt_pkgs) + self._check_retcode(ret) + + def uninstall(self): + if self.compressed: + self._uninstall_compressed() + else: + self._uninstall_pkgs() + + def assert_uninstalled(self): + """ + Assert that the paths in /opt/saltstack/ were correctly + removed or not removed + """ + return + if platform.is_windows(): + # I'm not sure where the /opt/saltstack path is coming from + # This is the path we're using to test windows + opt_path = pathlib.Path(os.getenv("LocalAppData"), "salt", "pypath") + else: + opt_path = pathlib.Path(os.sep, "opt", "saltstack", "salt", "pypath") + if not opt_path.exists(): + if platform.is_windows(): + assert not opt_path.parent.exists() + else: + assert not opt_path.parent.parent.exists() + else: + opt_path_contents = list(opt_path.rglob("*")) + if not opt_path_contents: + pytest.fail( + f"The path '{opt_path}' exists but there are no files in it." + ) + else: + for path in list(opt_path_contents): + if path.name in (".installs.json", "__pycache__"): + opt_path_contents.remove(path) + if opt_path_contents: + pytest.fail( + "The test left some files behind: {}".format( + ", ".join([str(p) for p in opt_path_contents]) + ) + ) + + def write_launchd_conf(self, service): + service_name = f"com.saltstack.salt.{service}" + ret = self.proc.run("launchctl", "list", service_name) + # 113 means it couldn't find a service with that name + if ret.returncode == 113: + daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + plist_file = daemons_dir / f"{service_name}.plist" + # Make sure we're using this plist file + if plist_file.exists(): + log.warning("Removing existing plist file for service: %s", service) + plist_file.unlink() + + log.debug("Creating plist file for service: %s", service) + contents = textwrap.dedent( + f"""\ + + + + + Label + {service_name} + RunAtLoad + + KeepAlive + + ProgramArguments + + {self.run_root} + {service} + -c + {self.conf_dir} + + SoftResourceLimits + + NumberOfFiles + 100000 + + HardResourceLimits + + NumberOfFiles + 100000 + + + + """ + ) + plist_file.write_text(contents, encoding="utf-8") + contents = plist_file.read_text() + log.debug("Created '%s'. Contents:\n%s", plist_file, contents) + + # Delete the plist file upon completion + atexit.register(plist_file.unlink) + + def write_systemd_conf(self, service, binary): + ret = self.proc.run("systemctl", "daemon-reload") + self._check_retcode(ret) + ret = self.proc.run("systemctl", "status", service) + if ret.returncode in (3, 4): + log.warning( + "No systemd unit file was found for service %s. Creating one.", service + ) + contents = textwrap.dedent( + """\ + [Unit] + Description={service} + + [Service] + KillMode=process + Type=notify + NotifyAccess=all + LimitNOFILE=8192 + ExecStart={tgt} -c {conf_dir} + + [Install] + WantedBy=multi-user.target + """ + ) + if isinstance(binary, list) and len(binary) == 1: + binary = shutil.which(binary[0]) or binary[0] + elif isinstance(binary, list): + binary = " ".join(binary) + unit_path = pathlib.Path( + os.sep, "etc", "systemd", "system", f"{service}.service" + ) + contents = contents.format( + service=service, tgt=binary, conf_dir=self.conf_dir + ) + log.info("Created '%s'. Contents:\n%s", unit_path, contents) + unit_path.write_text(contents, encoding="utf-8") + ret = self.proc.run("systemctl", "daemon-reload") + atexit.register(unit_path.unlink) + self._check_retcode(ret) + + def __enter__(self): + if not self.no_install: + if self.upgrade: + self.install_previous() + else: + self.install() + return self + + def __exit__(self, *_): + if not self.no_uninstall: + self.uninstall() + self.assert_uninstalled() + + +class PkgSystemdSaltDaemonImpl(SystemdSaltDaemonImpl): + def get_service_name(self): + if self._service_name is None: + self._service_name = self.factory.script_name + return self._service_name + + +@attr.s(kw_only=True) +class PkgLaunchdSaltDaemonImpl(PkgSystemdSaltDaemonImpl): + + plist_file = attr.ib() + + @plist_file.default + def _default_plist_file(self): + daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + return daemons_dir / f"{self.get_service_name()}.plist" + + def get_service_name(self): + if self._service_name is None: + service_name = super().get_service_name() + if "-" in service_name: + service_name = service_name.split("-")[-1] + self._service_name = f"com.saltstack.salt.{service_name}" + return self._service_name + + def cmdline(self, *args): # pylint: disable=arguments-differ + """ + Construct a list of arguments to use when starting the subprocess. + + :param str args: + Additional arguments to use when starting the subprocess + + """ + if args: # pragma: no cover + log.debug( + "%s.run() is ignoring the passed in arguments: %r", + self.__class__.__name__, + args, + ) + self._internal_run( + "launchctl", + "enable", + f"system/{self.get_service_name()}", + ) + return ( + "launchctl", + "bootstrap", + "system", + str(self.plist_file), + ) + + def is_running(self): + """ + Returns true if the sub-process is alive. + """ + if self._process is None: + ret = self._internal_run("launchctl", "list", self.get_service_name()) + if ret.stdout == "": + return False + + if "PID" not in ret.stdout: + return False + + pid = None + # PID in a line that looks like this + # "PID" = 445; + for line in ret.stdout.splitlines(): + if "PID" in line: + pid = line.rstrip(";").split(" = ")[1] + + if pid is None: + return False + + self._process = psutil.Process(int(pid)) + + return self._process.is_running() + + def _terminate(self): + """ + This method actually terminates the started daemon. + """ + # We completely override the parent class method because we're not using + # the self._terminal property, it's a launchd service + if self._process is None: # pragma: no cover + if TYPE_CHECKING: + # Make mypy happy + assert self._terminal_result + return ( + self._terminal_result + ) # pylint: disable=access-member-before-definition + + atexit.unregister(self.terminate) + log.info("Stopping %s", self.factory) + pid = self.pid + # Collect any child processes information before terminating the process + with contextlib.suppress(psutil.NoSuchProcess): + for child in psutil.Process(pid).children(recursive=True): + if ( + child not in self._children + ): # pylint: disable=access-member-before-definition + self._children.append( + child + ) # pylint: disable=access-member-before-definition + + if self._process.is_running(): # pragma: no cover + cmdline = _get_cmdline(self._process) + else: + cmdline = [] + + # Disable the service + self._internal_run( + "launchctl", + "disable", + f"system/{self.get_service_name()}", + ) + # Unload the service + self._internal_run("launchctl", "bootout", "system", str(self.plist_file)) + + if self._process.is_running(): # pragma: no cover + try: + self._process.wait() + except psutil.TimeoutExpired: + self._process.terminate() + try: + self._process.wait() + except psutil.TimeoutExpired: + pass + + exitcode = self._process.wait() or 0 + + # Dereference the internal _process attribute + self._process = None + # Lets log and kill any child processes left behind, including the main subprocess + # if it failed to properly stop + terminate_process( + pid=pid, + kill_children=True, + children=self._children, # pylint: disable=access-member-before-definition + slow_stop=self.factory.slow_stop, + ) + + if self._terminal_stdout is not None: + self._terminal_stdout.close() # pylint: disable=access-member-before-definition + if self._terminal_stderr is not None: + self._terminal_stderr.close() # pylint: disable=access-member-before-definition + stdout = stderr = "" + try: + self._terminal_result = ProcessResult( + returncode=exitcode, stdout=stdout, stderr=stderr, cmdline=cmdline + ) + log.info("%s %s", self.factory.__class__.__name__, self._terminal_result) + return self._terminal_result + finally: + self._terminal = None + self._terminal_stdout = None + self._terminal_stderr = None + self._terminal_timeout = None + self._children = [] + + +@attr.s(kw_only=True) +class PkgSsmSaltDaemonImpl(PkgSystemdSaltDaemonImpl): + def cmdline(self, *args): # pylint: disable=arguments-differ + """ + Construct a list of arguments to use when starting the subprocess. + + :param str args: + Additional arguments to use when starting the subprocess + + """ + if args: # pragma: no cover + log.debug( + "%s.run() is ignoring the passed in arguments: %r", + self.__class__.__name__, + args, + ) + return ( + str(self.factory.salt_pkg_install.ssm_bin), + "start", + self.get_service_name(), + ) + + def is_running(self): + """ + Returns true if the sub-process is alive. + """ + if self._process is None: + n = 1 + while True: + if self._process is not None: + break + time.sleep(1) + ret = self._internal_run( + str(self.factory.salt_pkg_install.ssm_bin), + "processes", + self.get_service_name(), + ) + log.warning(ret) + if not ret.stdout or (ret.stdout and not ret.stdout.strip()): + if n >= 120: + return False + n += 1 + continue + for line in ret.stdout.splitlines(): + log.warning("Line: %s", line) + if not line.strip(): + continue + mainpid = line.strip().split()[0] + self._process = psutil.Process(int(mainpid)) + break + return self._process.is_running() + + def _terminate(self): + """ + This method actually terminates the started daemon. + """ + # We completely override the parent class method because we're not using the + # self._terminal property, it's a systemd service + if self._process is None: # pragma: no cover + if TYPE_CHECKING: + # Make mypy happy + assert self._terminal_result + return ( + self._terminal_result + ) # pylint: disable=access-member-before-definition + + atexit.unregister(self.terminate) + log.info("Stopping %s", self.factory) + pid = self.pid + # Collect any child processes information before terminating the process + with contextlib.suppress(psutil.NoSuchProcess): + for child in psutil.Process(pid).children(recursive=True): + if ( + child not in self._children + ): # pylint: disable=access-member-before-definition + self._children.append( + child + ) # pylint: disable=access-member-before-definition + + if self._process.is_running(): # pragma: no cover + cmdline = _get_cmdline(self._process) + else: + cmdline = [] + + # Tell ssm to stop the service + try: + self._internal_run( + str(self.factory.salt_pkg_install.ssm_bin), + "stop", + self.get_service_name(), + ) + except FileNotFoundError: + pass + + if self._process.is_running(): # pragma: no cover + try: + self._process.wait() + except psutil.TimeoutExpired: + self._process.terminate() + try: + self._process.wait() + except psutil.TimeoutExpired: + pass + + exitcode = self._process.wait() or 0 + + # Dereference the internal _process attribute + self._process = None + # Lets log and kill any child processes left behind, including the main subprocess + # if it failed to properly stop + terminate_process( + pid=pid, + kill_children=True, + children=self._children, # pylint: disable=access-member-before-definition + slow_stop=self.factory.slow_stop, + ) + + if self._terminal_stdout is not None: + self._terminal_stdout.close() # pylint: disable=access-member-before-definition + if self._terminal_stderr is not None: + self._terminal_stderr.close() # pylint: disable=access-member-before-definition + stdout = stderr = "" + try: + self._terminal_result = ProcessResult( + returncode=exitcode, stdout=stdout, stderr=stderr, cmdline=cmdline + ) + log.info("%s %s", self.factory.__class__.__name__, self._terminal_result) + return self._terminal_result + finally: + self._terminal = None + self._terminal_stdout = None + self._terminal_stderr = None + self._terminal_timeout = None + self._children = [] + + +@attr.s(kw_only=True) +class PkgMixin: + salt_pkg_install: SaltPkgInstall = attr.ib() + + def get_script_path(self): + if self.salt_pkg_install.compressed: + return str(self.salt_pkg_install.run_root) + return super().get_script_path() + + def get_base_script_args(self): + base_script_args = [] + if self.salt_pkg_install.compressed: + if self.script_name == "spm": + base_script_args.append(self.script_name) + elif self.script_name != "salt": + base_script_args.append(self.script_name.split("salt-")[-1]) + base_script_args.extend(super().get_base_script_args()) + return base_script_args + + def cmdline(self, *args, **kwargs): + _cmdline = super().cmdline(*args, **kwargs) + if self.salt_pkg_install.compressed is False: + return _cmdline + if _cmdline[0] == self.python_executable: + _cmdline.pop(0) + return _cmdline + + +@attr.s(kw_only=True) +class DaemonPkgMixin(PkgMixin): + def __attrs_post_init__(self): + if not platform.is_windows() and self.salt_pkg_install.system_service: + if platform.is_darwin(): + self.write_launchd_conf() + else: + self.write_systemd_conf() + + def get_service_name(self): + return self.script_name + + def write_launchd_conf(self): + raise NotImplementedError + + def write_systemd_conf(self): + raise NotImplementedError + + +@attr.s(kw_only=True) +class SaltMaster(DaemonPkgMixin, master.SaltMaster): + """ + Subclassed just to tweak the binary paths if needed and factory classes. + """ + + def __attrs_post_init__(self): + self.script_name = "salt-master" + master.SaltMaster.__attrs_post_init__(self) + DaemonPkgMixin.__attrs_post_init__(self) + + def _get_impl_class(self): + if self.system_install and self.salt_pkg_install.system_service: + if platform.is_windows(): + return PkgSsmSaltDaemonImpl + if platform.is_darwin(): + return PkgLaunchdSaltDaemonImpl + return PkgSystemdSaltDaemonImpl + return DaemonImpl + + def write_launchd_conf(self): + self.salt_pkg_install.write_launchd_conf("master") + + def write_systemd_conf(self): + self.salt_pkg_install.write_systemd_conf( + "salt-master", self.salt_pkg_install.binary_paths["master"] + ) + + def salt_minion_daemon(self, minion_id, **kwargs): + return super().salt_minion_daemon( + minion_id, + factory_class=SaltMinion, + salt_pkg_install=self.salt_pkg_install, + **kwargs, + ) + + def salt_api_daemon(self, **kwargs): + return super().salt_api_daemon( + factory_class=SaltApi, salt_pkg_install=self.salt_pkg_install, **kwargs + ) + + def salt_key_cli(self, **factory_class_kwargs): + return super().salt_key_cli( + factory_class=SaltKey, + salt_pkg_install=self.salt_pkg_install, + **factory_class_kwargs, + ) + + def salt_cli(self, **factory_class_kwargs): + return super().salt_cli( + factory_class=SaltCli, + salt_pkg_install=self.salt_pkg_install, + **factory_class_kwargs, + ) + + +@attr.s(kw_only=True, slots=True) +class SaltMinion(DaemonPkgMixin, minion.SaltMinion): + """ + Subclassed just to tweak the binary paths if needed and factory classes. + """ + + def __attrs_post_init__(self): + self.script_name = "salt-minion" + minion.SaltMinion.__attrs_post_init__(self) + DaemonPkgMixin.__attrs_post_init__(self) + + def _get_impl_class(self): + if self.system_install and self.salt_pkg_install.system_service: + if platform.is_windows(): + return PkgSsmSaltDaemonImpl + if platform.is_darwin(): + return PkgLaunchdSaltDaemonImpl + return PkgSystemdSaltDaemonImpl + return DaemonImpl + + def write_launchd_conf(self): + self.salt_pkg_install.write_launchd_conf("minion") + + def write_systemd_conf(self): + self.salt_pkg_install.write_systemd_conf( + "salt-minion", self.salt_pkg_install.binary_paths["minion"] + ) + + def salt_call_cli(self, **factory_class_kwargs): + return super().salt_call_cli( + factory_class=SaltCall, + salt_pkg_install=self.salt_pkg_install, + **factory_class_kwargs, + ) + + +@attr.s(kw_only=True, slots=True) +class SaltApi(DaemonPkgMixin, api.SaltApi): + """ + Subclassed just to tweak the binary paths if needed. + """ + + def __attrs_post_init__(self): + self.script_name = "salt-api" + api.SaltApi.__attrs_post_init__(self) + DaemonPkgMixin.__attrs_post_init__(self) + + def _get_impl_class(self): + if self.system_install and self.salt_pkg_install.system_service: + if platform.is_windows(): + return PkgSsmSaltDaemonImpl + if platform.is_darwin(): + return PkgLaunchdSaltDaemonImpl + return PkgSystemdSaltDaemonImpl + return DaemonImpl + + def write_launchd_conf(self): + self.salt_pkg_install.write_launchd_conf("api") + + def write_systemd_conf(self): + self.salt_pkg_install.write_systemd_conf( + "salt-api", + self.salt_pkg_install.binary_paths["api"], + ) + + +@attr.s(kw_only=True, slots=True) +class SaltCall(PkgMixin, call.SaltCall): + """ + Subclassed just to tweak the binary paths if needed. + """ + + def __attrs_post_init__(self): + call.SaltCall.__attrs_post_init__(self) + self.script_name = "salt-call" + + +@attr.s(kw_only=True, slots=True) +class SaltCli(PkgMixin, salt.SaltCli): + """ + Subclassed just to tweak the binary paths if needed. + """ + + def __attrs_post_init__(self): + self.script_name = "salt" + salt.SaltCli.__attrs_post_init__(self) + + +@attr.s(kw_only=True, slots=True) +class SaltKey(PkgMixin, key.SaltKey): + """ + Subclassed just to tweak the binary paths if needed. + """ + + def __attrs_post_init__(self): + self.script_name = "salt-key" + key.SaltKey.__attrs_post_init__(self) + + +@attr.s(kw_only=True, slots=True) +class TestUser: + """ + Add a test user + """ + + salt_call_cli = attr.ib() + + username = attr.ib(default="saltdev") + # Must follow Windows Password Complexity requirements + password = attr.ib(default="P@ssW0rd") + _pw_record = attr.ib(init=False, repr=False, default=None) + + def salt_call_local(self, *args): + ret = self.salt_call_cli.run("--local", *args) + if ret.returncode != 0: + log.error(ret) + assert ret.returncode == 0 + return ret.data + + def add_user(self): + log.debug("Adding system account %r", self.username) + if platform.is_windows(): + self.salt_call_local("user.add", self.username, self.password) + else: + self.salt_call_local("user.add", self.username) + hash_passwd = crypt.crypt(self.password, crypt.mksalt(crypt.METHOD_SHA512)) + self.salt_call_local("shadow.set_password", self.username, hash_passwd) + assert self.username in self.salt_call_local("user.list_users") + + def remove_user(self): + log.debug("Removing system account %r", self.username) + if platform.is_windows(): + self.salt_call_local( + "user.delete", self.username, "purge=True", "force=True" + ) + else: + self.salt_call_local("user.delete", self.username, "remove=True") + + @property + def pw_record(self): + if self._pw_record is None and HAS_PWD: + self._pw_record = pwd.getpwnam(self.username) + return self._pw_record + + @property + def uid(self): + if HAS_PWD: + return self.pw_record.pw_uid + return None + + @property + def gid(self): + if HAS_PWD: + return self.pw_record.pw_gid + return None + + @property + def env(self): + environ = os.environ.copy() + environ["LOGNAME"] = environ["USER"] = self.username + environ["HOME"] = self.pw_record.pw_dir + return environ + + def __enter__(self): + self.add_user() + return self + + def __exit__(self, *_): + self.remove_user() + + +@attr.s(kw_only=True, slots=True) +class ApiRequest: + salt_api: SaltApi = attr.ib(repr=False) + test_account: TestUser = attr.ib(repr=False) + session: requests.Session = attr.ib(init=False, repr=False) + api_uri: str = attr.ib(init=False) + auth_data: Dict[str, str] = attr.ib(init=False) + + @session.default + def _default_session(self): + return requests.Session() + + @api_uri.default + def _default_api_uri(self): + return f"http://localhost:{self.salt_api.config['rest_cherrypy']['port']}" + + @auth_data.default + def _default_auth_data(self): + return { + "username": self.test_account.username, + "password": self.test_account.password, + "eauth": "auto", + "out": "json", + } + + def post(self, url, data): + post_data = dict(**self.auth_data, **data) + resp = self.session.post(f"{self.api_uri}/run", data=post_data).json() + minion = next(iter(resp["return"][0])) + return resp["return"][0][minion] + + def __enter__(self): + self.session.__enter__() + return self + + def __exit__(self, *args): + self.session.__exit__(*args) + + +@pytest.helpers.register +def remove_stale_minion_key(master, minion_id): + key_path = os.path.join(master.config["pki_dir"], "minions", minion_id) + if os.path.exists(key_path): + os.unlink(key_path) + else: + log.debug("The minion(id=%r) key was not found at %s", minion_id, key_path) + + +@pytest.helpers.register +def remove_stale_master_key(master): + keys_path = os.path.join(master.config["pki_dir"], "master") + for key_name in ("master.pem", "master.pub"): + key_path = os.path.join(keys_path, key_name) + if os.path.exists(key_path): + os.unlink(key_path) + else: + log.debug( + "The master(id=%r) %s key was not found at %s", + master.id, + key_name, + key_path, + ) + key_path = os.path.join(master.config["pki_dir"], "minion", "minion_master.pub") + if os.path.exists(key_path): + os.unlink(key_path) + else: + log.debug( + "The master(id=%r) minion_master.pub key was not found at %s", + master.id, + key_path, + ) diff --git a/pkg/tests/upgrade/test_salt_upgrade.py b/pkg/tests/upgrade/test_salt_upgrade.py new file mode 100644 index 000000000000..eb802e734273 --- /dev/null +++ b/pkg/tests/upgrade/test_salt_upgrade.py @@ -0,0 +1,70 @@ +import pytest + + +@pytest.mark.skip_on_windows( + reason="Salt Master scripts not included in old windows packages" +) +def test_salt_upgrade(salt_call_cli, salt_minion, install_salt): + """ + Test upgrade of Salt + """ + if not install_salt.upgrade: + pytest.skip("Not testing an upgrade, do not run") + # verify previous install version is setup correctly and works + ret = salt_call_cli.run("test.ping") + assert ret.returncode == 0 + assert ret.data + + # test pip install before an upgrade + dep = "PyGithub" + repo = "https://github.com/saltstack/salt.git" + install = salt_call_cli.run("--local", "pip.install", dep) + assert install.returncode == 0 + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr + # upgrade Salt from previous version and test + install_salt.install(upgrade=True) + ret = salt_call_cli.run("test.ping") + assert ret.returncode == 0 + assert ret.data + + # test pip install after an upgrade + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr + + +@pytest.mark.skip_unless_on_windows() +def test_salt_upgrade_windows_1(install_salt, salt_call_cli): + """ + Test upgrade of Salt on windows + """ + if not install_salt.upgrade: + pytest.skip("Not testing an upgrade, do not run") + # verify previous install version is setup correctly and works + ret = salt_call_cli.run("--local", "test.ping") + assert ret.data is True + assert ret.returncode == 0 + # test pip install before an upgrade + dep = "PyGithub" + repo = "https://github.com/saltstack/salt.git" + install = salt_call_cli.run("--local", "pip.install", dep) + assert install.returncode == 0 + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr + + +@pytest.mark.skip_unless_on_windows() +def test_salt_upgrade_windows_2(salt_call_cli, salt_minion, install_salt): + """ + Test upgrade of Salt on windows + """ + if install_salt.no_uninstall: + pytest.skip("Not testing an upgrade, do not run") + # upgrade Salt from previous version and test + install_salt.install(upgrade=True) + ret = salt_call_cli.run("test.ping") + assert ret.returncode == 0 + assert ret.data + repo = "https://github.com/saltstack/salt.git" + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr diff --git a/requirements/static/ci/pkgtests.in b/requirements/static/ci/pkgtests.in new file mode 100644 index 000000000000..96daaa071754 --- /dev/null +++ b/requirements/static/ci/pkgtests.in @@ -0,0 +1,2 @@ +pytest-pudb +cherrypy diff --git a/requirements/static/ci/py3.10/pkgtests.txt b/requirements/static/ci/py3.10/pkgtests.txt new file mode 100644 index 000000000000..eefe2630ccbd --- /dev/null +++ b/requirements/static/ci/py3.10/pkgtests.txt @@ -0,0 +1,204 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.10/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# +attrs==22.2.0 + # via + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via jaraco.text +certifi==2022.12.7 + # via requests +charset-normalizer==3.0.1 + # via requests +cheroot==9.0.0 + # via cherrypy +cherrypy==18.8.0 + # via + # -r requirements/base.txt + # -r requirements/static/ci/pkgtests.in +contextvars==2.4 + # via -r requirements/base.txt +distlib==0.3.6 + # via virtualenv +distro==1.8.0 + # via + # -r requirements/base.txt + # pytest-skip-markers +docker==6.0.1 + # via pytest-salt-factories +exceptiongroup==1.1.0 + # via pytest +filelock==3.9.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +idna==3.4 + # via requests +immutables==0.19 + # via contextvars +inflect==6.0.2 + # via jaraco.text +iniconfig==2.0.0 + # via pytest +jaraco.classes==3.2.3 + # via jaraco.collections +jaraco.collections==3.8.0 + # via cherrypy +jaraco.context==4.2.0 + # via jaraco.text +jaraco.functools==3.5.2 + # via + # cheroot + # jaraco.text +jaraco.text==3.11.0 + # via jaraco.collections +jedi==0.18.2 + # via pudb +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.3 + # via -r requirements/base.txt +markupsafe==2.1.1 + # via + # -r requirements/base.txt + # jinja2 + # werkzeug +mock==5.0.1 + # via -r requirements/pytest.txt +more-itertools==9.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools + # jaraco.text +msgpack==1.0.4 + # via + # -r requirements/base.txt + # pytest-salt-factories +packaging==23.0 + # via + # -r requirements/base.txt + # docker + # pudb + # pytest +parso==0.8.3 + # via jedi +platformdirs==2.6.2 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via cherrypy +psutil==5.9.4 + # via + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pudb==2022.1.3 + # via pytest-pudb +pycryptodomex==3.16.0 + # via -r requirements/crypto.txt +pydantic==1.10.4 + # via inflect +pygments==2.14.0 + # via pudb +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.6 + # via -r requirements/pytest.txt +pytest-pudb==0.7.0 + # via -r requirements/static/ci/pkgtests.in +pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" + # via -r requirements/pytest.txt +pytest-shell-utilities==1.7.0 + # via pytest-salt-factories +pytest-skip-markers==1.4.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.9.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-tempdir==2019.10.12 + # via + # -r requirements/pytest.txt + # pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.1 ; python_version > "3.6" + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-pudb + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-tempdir + # pytest-timeout +pytz==2022.7.1 + # via tempora +pyyaml==6.0 + # via + # -r requirements/base.txt + # pytest-salt-factories +pyzmq==25.0.0 ; python_version >= "3.9" + # via + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.28.2 + # via + # -r requirements/base.txt + # docker +six==1.16.0 + # via cheroot +tempora==5.2.0 + # via portend +tomli==2.0.1 + # via pytest +typing-extensions==4.4.0 + # via + # pydantic + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.14 + # via + # docker + # requests +urwid-readline==0.13 + # via pudb +urwid==2.1.2 + # via + # pudb + # urwid-readline +virtualenv==20.17.1 + # via pytest-salt-factories +websocket-client==1.4.2 + # via docker +werkzeug==2.2.2 + # via pytest-httpserver +zc.lockfile==2.0 + # via cherrypy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.7/pkgtests.txt b/requirements/static/ci/py3.7/pkgtests.txt new file mode 100644 index 000000000000..814089fe42a8 --- /dev/null +++ b/requirements/static/ci/py3.7/pkgtests.txt @@ -0,0 +1,219 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.7/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# +attrs==22.2.0 + # via + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via jaraco.text +certifi==2022.12.7 + # via requests +charset-normalizer==3.0.1 + # via requests +cheroot==9.0.0 + # via cherrypy +cherrypy==18.8.0 + # via + # -r requirements/base.txt + # -r requirements/static/ci/pkgtests.in +contextvars==2.4 + # via -r requirements/base.txt +distlib==0.3.6 + # via virtualenv +distro==1.8.0 + # via + # -r requirements/base.txt + # pytest-skip-markers +docker==6.0.1 + # via pytest-salt-factories +exceptiongroup==1.1.0 + # via pytest +filelock==3.9.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +idna==3.4 + # via requests +immutables==0.19 + # via contextvars +importlib-metadata==6.0.0 + # via + # cheroot + # pluggy + # pytest + # virtualenv +importlib-resources==5.10.2 + # via jaraco.text +inflect==6.0.2 + # via jaraco.text +iniconfig==2.0.0 + # via pytest +jaraco.classes==3.2.3 + # via jaraco.collections +jaraco.collections==3.8.0 + # via cherrypy +jaraco.context==4.2.0 + # via jaraco.text +jaraco.functools==3.5.2 + # via + # cheroot + # jaraco.text +jaraco.text==3.11.0 + # via jaraco.collections +jedi==0.18.2 + # via pudb +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.3 + # via -r requirements/base.txt +markupsafe==2.1.1 + # via + # -r requirements/base.txt + # jinja2 + # werkzeug +mock==5.0.1 + # via -r requirements/pytest.txt +more-itertools==9.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools + # jaraco.text +msgpack==1.0.4 + # via + # -r requirements/base.txt + # pytest-salt-factories +packaging==23.0 + # via + # -r requirements/base.txt + # docker + # pudb + # pytest +parso==0.8.3 + # via jedi +platformdirs==2.6.2 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via cherrypy +psutil==5.9.4 + # via + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pudb==2022.1.3 + # via pytest-pudb +pycryptodomex==3.16.0 + # via -r requirements/crypto.txt +pydantic==1.10.4 + # via inflect +pygments==2.14.0 + # via pudb +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.6 + # via -r requirements/pytest.txt +pytest-pudb==0.7.0 + # via -r requirements/static/ci/pkgtests.in +pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" + # via -r requirements/pytest.txt +pytest-shell-utilities==1.7.0 + # via pytest-salt-factories +pytest-skip-markers==1.4.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.9.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-tempdir==2019.10.12 + # via + # -r requirements/pytest.txt + # pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.1 ; python_version > "3.6" + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-pudb + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-tempdir + # pytest-timeout +pytz==2022.7.1 + # via tempora +pyyaml==6.0 + # via + # -r requirements/base.txt + # pytest-salt-factories +pyzmq==25.0.0 ; python_version < "3.9" + # via + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.28.2 + # via + # -r requirements/base.txt + # docker +six==1.16.0 + # via cheroot +tempora==5.2.0 + # via portend +tomli==2.0.1 + # via pytest +typing-extensions==4.4.0 + # via + # immutables + # importlib-metadata + # platformdirs + # pydantic + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.14 + # via + # docker + # requests +urwid-readline==0.13 + # via pudb +urwid==2.1.2 + # via + # pudb + # urwid-readline +virtualenv==20.17.1 + # via pytest-salt-factories +websocket-client==1.4.2 + # via docker +werkzeug==2.2.2 + # via pytest-httpserver +zc.lockfile==2.0 + # via cherrypy +zipp==3.11.0 + # via + # importlib-metadata + # importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.8/pkgtests.txt b/requirements/static/ci/py3.8/pkgtests.txt new file mode 100644 index 000000000000..8f2a1d87ec27 --- /dev/null +++ b/requirements/static/ci/py3.8/pkgtests.txt @@ -0,0 +1,208 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.8/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# +attrs==22.2.0 + # via + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via jaraco.text +certifi==2022.12.7 + # via requests +charset-normalizer==3.0.1 + # via requests +cheroot==9.0.0 + # via cherrypy +cherrypy==18.8.0 + # via + # -r requirements/base.txt + # -r requirements/static/ci/pkgtests.in +contextvars==2.4 + # via -r requirements/base.txt +distlib==0.3.6 + # via virtualenv +distro==1.8.0 + # via + # -r requirements/base.txt + # pytest-skip-markers +docker==6.0.1 + # via pytest-salt-factories +exceptiongroup==1.1.0 + # via pytest +filelock==3.9.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +idna==3.4 + # via requests +immutables==0.19 + # via contextvars +importlib-resources==5.10.2 + # via jaraco.text +inflect==6.0.2 + # via jaraco.text +iniconfig==2.0.0 + # via pytest +jaraco.classes==3.2.3 + # via jaraco.collections +jaraco.collections==3.8.0 + # via cherrypy +jaraco.context==4.2.0 + # via jaraco.text +jaraco.functools==3.5.2 + # via + # cheroot + # jaraco.text +jaraco.text==3.11.0 + # via jaraco.collections +jedi==0.18.2 + # via pudb +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.3 + # via -r requirements/base.txt +markupsafe==2.1.1 + # via + # -r requirements/base.txt + # jinja2 + # werkzeug +mock==5.0.1 + # via -r requirements/pytest.txt +more-itertools==9.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools + # jaraco.text +msgpack==1.0.4 + # via + # -r requirements/base.txt + # pytest-salt-factories +packaging==23.0 + # via + # -r requirements/base.txt + # docker + # pudb + # pytest +parso==0.8.3 + # via jedi +platformdirs==2.6.2 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via cherrypy +psutil==5.9.4 + # via + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pudb==2022.1.3 + # via pytest-pudb +pycryptodomex==3.16.0 + # via -r requirements/crypto.txt +pydantic==1.10.4 + # via inflect +pygments==2.14.0 + # via pudb +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.6 + # via -r requirements/pytest.txt +pytest-pudb==0.7.0 + # via -r requirements/static/ci/pkgtests.in +pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" + # via -r requirements/pytest.txt +pytest-shell-utilities==1.7.0 + # via pytest-salt-factories +pytest-skip-markers==1.4.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.9.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-tempdir==2019.10.12 + # via + # -r requirements/pytest.txt + # pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.1 ; python_version > "3.6" + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-pudb + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-tempdir + # pytest-timeout +pytz==2022.7.1 + # via tempora +pyyaml==6.0 + # via + # -r requirements/base.txt + # pytest-salt-factories +pyzmq==25.0.0 ; python_version < "3.9" + # via + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.28.2 + # via + # -r requirements/base.txt + # docker +six==1.16.0 + # via cheroot +tempora==5.2.0 + # via portend +tomli==2.0.1 + # via pytest +typing-extensions==4.4.0 + # via + # pydantic + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.14 + # via + # docker + # requests +urwid-readline==0.13 + # via pudb +urwid==2.1.2 + # via + # pudb + # urwid-readline +virtualenv==20.17.1 + # via pytest-salt-factories +websocket-client==1.4.2 + # via docker +werkzeug==2.2.2 + # via pytest-httpserver +zc.lockfile==2.0 + # via cherrypy +zipp==3.11.0 + # via importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.9/pkgtests.txt b/requirements/static/ci/py3.9/pkgtests.txt new file mode 100644 index 000000000000..f80941506715 --- /dev/null +++ b/requirements/static/ci/py3.9/pkgtests.txt @@ -0,0 +1,204 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.9/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# +attrs==22.2.0 + # via + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via jaraco.text +certifi==2022.12.7 + # via requests +charset-normalizer==3.0.1 + # via requests +cheroot==9.0.0 + # via cherrypy +cherrypy==18.8.0 + # via + # -r requirements/base.txt + # -r requirements/static/ci/pkgtests.in +contextvars==2.4 + # via -r requirements/base.txt +distlib==0.3.6 + # via virtualenv +distro==1.8.0 + # via + # -r requirements/base.txt + # pytest-skip-markers +docker==6.0.1 + # via pytest-salt-factories +exceptiongroup==1.1.0 + # via pytest +filelock==3.9.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +idna==3.4 + # via requests +immutables==0.19 + # via contextvars +inflect==6.0.2 + # via jaraco.text +iniconfig==2.0.0 + # via pytest +jaraco.classes==3.2.3 + # via jaraco.collections +jaraco.collections==3.8.0 + # via cherrypy +jaraco.context==4.2.0 + # via jaraco.text +jaraco.functools==3.5.2 + # via + # cheroot + # jaraco.text +jaraco.text==3.11.0 + # via jaraco.collections +jedi==0.18.2 + # via pudb +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.3 + # via -r requirements/base.txt +markupsafe==2.1.1 + # via + # -r requirements/base.txt + # jinja2 + # werkzeug +mock==5.0.1 + # via -r requirements/pytest.txt +more-itertools==9.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools + # jaraco.text +msgpack==1.0.4 + # via + # -r requirements/base.txt + # pytest-salt-factories +packaging==23.0 + # via + # -r requirements/base.txt + # docker + # pudb + # pytest +parso==0.8.3 + # via jedi +platformdirs==2.6.2 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via cherrypy +psutil==5.9.4 + # via + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pudb==2022.1.3 + # via pytest-pudb +pycryptodomex==3.16.0 + # via -r requirements/crypto.txt +pydantic==1.10.4 + # via inflect +pygments==2.14.0 + # via pudb +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.6 + # via -r requirements/pytest.txt +pytest-pudb==0.7.0 + # via -r requirements/static/ci/pkgtests.in +pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" + # via -r requirements/pytest.txt +pytest-shell-utilities==1.7.0 + # via pytest-salt-factories +pytest-skip-markers==1.4.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.9.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-tempdir==2019.10.12 + # via + # -r requirements/pytest.txt + # pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.1 ; python_version > "3.6" + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-pudb + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-tempdir + # pytest-timeout +pytz==2022.7.1 + # via tempora +pyyaml==6.0 + # via + # -r requirements/base.txt + # pytest-salt-factories +pyzmq==25.0.0 ; python_version >= "3.9" + # via + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.28.2 + # via + # -r requirements/base.txt + # docker +six==1.16.0 + # via cheroot +tempora==5.2.0 + # via portend +tomli==2.0.1 + # via pytest +typing-extensions==4.4.0 + # via + # pydantic + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.14 + # via + # docker + # requests +urwid-readline==0.13 + # via pudb +urwid==2.1.2 + # via + # pudb + # urwid-readline +virtualenv==20.17.1 + # via pytest-salt-factories +websocket-client==1.4.2 + # via docker +werkzeug==2.2.2 + # via pytest-httpserver +zc.lockfile==2.0 + # via cherrypy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools From 91ad0aee2815203bbe1ad526e54d3449aea811e3 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Tue, 17 Jan 2023 13:14:25 -0700 Subject: [PATCH 05/55] Use and older version of pytest-salt-factories for pkg tests --- .gitignore | 1 + .pre-commit-config.yaml | 14 ++--- pkg/tests/conftest.py | 12 ++-- requirements/static/ci/pkgtests.in | 2 +- requirements/static/ci/py3.10/pkgtests.txt | 69 +++------------------- requirements/static/ci/py3.7/pkgtests.txt | 69 +++------------------- requirements/static/ci/py3.8/pkgtests.txt | 69 +++------------------- requirements/static/ci/py3.9/pkgtests.txt | 69 +++------------------- 8 files changed, 45 insertions(+), 260 deletions(-) diff --git a/.gitignore b/.gitignore index b5ec74906444..f4076ae84be1 100644 --- a/.gitignore +++ b/.gitignore @@ -117,6 +117,7 @@ kitchen.local.yml .bundle/ Gemfile.lock /artifacts/ +/pkg/artifacts/ requirements/static/*/py*/*.log # Vim's default session file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c919ca2d18fa..bc10eca43182 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1039,56 +1039,52 @@ repos: - id: pip-tools-compile alias: compile-ci-pkg-3.7-requirements name: PKG tests CI Py3.7 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.7/pkgtests\.in)))$ pass_filenames: false args: - -v - --py-version=3.7 - --include=requirements/base.txt - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - requirements/static/ci/pkgtests.in - id: pip-tools-compile alias: compile-ci-pkg-3.8-requirements name: PKG tests CI Py3.8 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.8/pkgtests\.in)))$ pass_filenames: false args: - -v - --py-version=3.8 - --include=requirements/base.txt - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - requirements/static/ci/pkgtests.in - id: pip-tools-compile alias: compile-ci-pkg-3.9-requirements name: PKG tests CI Py3.9 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.9/pkgtests\.in)))$ pass_filenames: false args: - -v - --py-version=3.9 - --include=requirements/base.txt - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - requirements/static/ci/pkgtests.in - id: pip-tools-compile alias: compile-ci-pkg-3.10-requirements name: PKG tests CI Py3.10 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkg|common)\.in|py3\.7/pkg\.txt)))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.10/pkgtests\.in)))$ pass_filenames: false args: - -v - --py-version=3.10 - --include=requirements/base.txt - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - requirements/static/ci/pkgtests.in - + # <---- PKG ci requirements----------------------------------------------------------------------------------------- # ----- Tools ----------------------------------------------------------------------------------------------------> diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 47d6ebd1acb6..921e5ad9e6d2 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -43,12 +43,12 @@ def pytest_addoption(parser): register argparse-style options and ini-style config values. """ test_selection_group = parser.getgroup("Tests Runtime Selection") - # test_selection_group.addoption( - # "--system-service", - # default=False, - # action="store_true", - # help="Run the daemons as system services", - # ) + test_selection_group.addoption( + "--system-service", + default=False, + action="store_true", + help="Run the daemons as system services", + ) test_selection_group.addoption( "--upgrade", default=False, diff --git a/requirements/static/ci/pkgtests.in b/requirements/static/ci/pkgtests.in index 96daaa071754..e40f7d075e23 100644 --- a/requirements/static/ci/pkgtests.in +++ b/requirements/static/ci/pkgtests.in @@ -1,2 +1,2 @@ -pytest-pudb cherrypy +pytest-salt-factories==1.0.0rc17 diff --git a/requirements/static/ci/py3.10/pkgtests.txt b/requirements/static/ci/py3.10/pkgtests.txt index eefe2630ccbd..17d2fdd1f91b 100644 --- a/requirements/static/ci/py3.10/pkgtests.txt +++ b/requirements/static/ci/py3.10/pkgtests.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# pip-compile --output-file=requirements/static/ci/py3.10/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt # attrs==22.2.0 # via @@ -20,9 +20,7 @@ charset-normalizer==3.0.1 cheroot==9.0.0 # via cherrypy cherrypy==18.8.0 - # via - # -r requirements/base.txt - # -r requirements/static/ci/pkgtests.in + # via -r requirements/static/ci/pkgtests.in contextvars==2.4 # via -r requirements/base.txt distlib==0.3.6 @@ -31,14 +29,10 @@ distro==1.8.0 # via # -r requirements/base.txt # pytest-skip-markers -docker==6.0.1 - # via pytest-salt-factories exceptiongroup==1.1.0 # via pytest filelock==3.9.0 # via virtualenv -flaky==3.7.0 - # via -r requirements/pytest.txt idna==3.4 # via requests immutables==0.19 @@ -59,8 +53,6 @@ jaraco.functools==3.5.2 # jaraco.text jaraco.text==3.11.0 # via jaraco.collections -jedi==0.18.2 - # via pudb jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 @@ -71,9 +63,6 @@ markupsafe==2.1.1 # via # -r requirements/base.txt # jinja2 - # werkzeug -mock==5.0.1 - # via -r requirements/pytest.txt more-itertools==9.0.0 # via # cheroot @@ -88,11 +77,7 @@ msgpack==1.0.4 packaging==23.0 # via # -r requirements/base.txt - # docker - # pudb # pytest -parso==0.8.3 - # via jedi platformdirs==2.6.2 # via virtualenv pluggy==1.0.0 @@ -105,27 +90,16 @@ psutil==5.9.4 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pudb==2022.1.3 - # via pytest-pudb pycryptodomex==3.16.0 # via -r requirements/crypto.txt pydantic==1.10.4 # via inflect -pygments==2.14.0 - # via pudb -pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt pytest-helpers-namespace==2021.12.29 # via - # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.6 - # via -r requirements/pytest.txt -pytest-pudb==0.7.0 +pytest-salt-factories==1.0.0rc17 # via -r requirements/static/ci/pkgtests.in -pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" - # via -r requirements/pytest.txt pytest-shell-utilities==1.7.0 # via pytest-salt-factories pytest-skip-markers==1.4.0 @@ -133,43 +107,28 @@ pytest-skip-markers==1.4.0 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.9.0 - # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-tempdir==2019.10.12 + # via pytest-salt-factories +pytest==7.2.1 # via - # -r requirements/pytest.txt - # pytest-salt-factories -pytest-timeout==2.1.0 - # via -r requirements/pytest.txt -pytest==7.2.1 ; python_version > "3.6" - # via - # -r requirements/pytest.txt - # pytest-custom-exit-code # pytest-helpers-namespace - # pytest-pudb # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics # pytest-tempdir - # pytest-timeout pytz==2022.7.1 # via tempora pyyaml==6.0 - # via - # -r requirements/base.txt - # pytest-salt-factories + # via -r requirements/base.txt pyzmq==25.0.0 ; python_version >= "3.9" # via # -r requirements/zeromq.txt # pytest-salt-factories requests==2.28.2 - # via - # -r requirements/base.txt - # docker + # via -r requirements/base.txt six==1.16.0 # via cheroot tempora==5.2.0 @@ -182,21 +141,9 @@ typing-extensions==4.4.0 # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.14 - # via - # docker - # requests -urwid-readline==0.13 - # via pudb -urwid==2.1.2 - # via - # pudb - # urwid-readline + # via requests virtualenv==20.17.1 # via pytest-salt-factories -websocket-client==1.4.2 - # via docker -werkzeug==2.2.2 - # via pytest-httpserver zc.lockfile==2.0 # via cherrypy diff --git a/requirements/static/ci/py3.7/pkgtests.txt b/requirements/static/ci/py3.7/pkgtests.txt index 814089fe42a8..60daeb146ae9 100644 --- a/requirements/static/ci/py3.7/pkgtests.txt +++ b/requirements/static/ci/py3.7/pkgtests.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# pip-compile --output-file=requirements/static/ci/py3.7/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt # attrs==22.2.0 # via @@ -20,9 +20,7 @@ charset-normalizer==3.0.1 cheroot==9.0.0 # via cherrypy cherrypy==18.8.0 - # via - # -r requirements/base.txt - # -r requirements/static/ci/pkgtests.in + # via -r requirements/static/ci/pkgtests.in contextvars==2.4 # via -r requirements/base.txt distlib==0.3.6 @@ -31,14 +29,10 @@ distro==1.8.0 # via # -r requirements/base.txt # pytest-skip-markers -docker==6.0.1 - # via pytest-salt-factories exceptiongroup==1.1.0 # via pytest filelock==3.9.0 # via virtualenv -flaky==3.7.0 - # via -r requirements/pytest.txt idna==3.4 # via requests immutables==0.19 @@ -67,8 +61,6 @@ jaraco.functools==3.5.2 # jaraco.text jaraco.text==3.11.0 # via jaraco.collections -jedi==0.18.2 - # via pudb jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 @@ -79,9 +71,6 @@ markupsafe==2.1.1 # via # -r requirements/base.txt # jinja2 - # werkzeug -mock==5.0.1 - # via -r requirements/pytest.txt more-itertools==9.0.0 # via # cheroot @@ -96,11 +85,7 @@ msgpack==1.0.4 packaging==23.0 # via # -r requirements/base.txt - # docker - # pudb # pytest -parso==0.8.3 - # via jedi platformdirs==2.6.2 # via virtualenv pluggy==1.0.0 @@ -113,27 +98,16 @@ psutil==5.9.4 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pudb==2022.1.3 - # via pytest-pudb pycryptodomex==3.16.0 # via -r requirements/crypto.txt pydantic==1.10.4 # via inflect -pygments==2.14.0 - # via pudb -pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt pytest-helpers-namespace==2021.12.29 # via - # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.6 - # via -r requirements/pytest.txt -pytest-pudb==0.7.0 +pytest-salt-factories==1.0.0rc17 # via -r requirements/static/ci/pkgtests.in -pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" - # via -r requirements/pytest.txt pytest-shell-utilities==1.7.0 # via pytest-salt-factories pytest-skip-markers==1.4.0 @@ -141,43 +115,28 @@ pytest-skip-markers==1.4.0 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.9.0 - # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-tempdir==2019.10.12 + # via pytest-salt-factories +pytest==7.2.1 # via - # -r requirements/pytest.txt - # pytest-salt-factories -pytest-timeout==2.1.0 - # via -r requirements/pytest.txt -pytest==7.2.1 ; python_version > "3.6" - # via - # -r requirements/pytest.txt - # pytest-custom-exit-code # pytest-helpers-namespace - # pytest-pudb # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics # pytest-tempdir - # pytest-timeout pytz==2022.7.1 # via tempora pyyaml==6.0 - # via - # -r requirements/base.txt - # pytest-salt-factories + # via -r requirements/base.txt pyzmq==25.0.0 ; python_version < "3.9" # via # -r requirements/zeromq.txt # pytest-salt-factories requests==2.28.2 - # via - # -r requirements/base.txt - # docker + # via -r requirements/base.txt six==1.16.0 # via cheroot tempora==5.2.0 @@ -193,21 +152,9 @@ typing-extensions==4.4.0 # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.14 - # via - # docker - # requests -urwid-readline==0.13 - # via pudb -urwid==2.1.2 - # via - # pudb - # urwid-readline + # via requests virtualenv==20.17.1 # via pytest-salt-factories -websocket-client==1.4.2 - # via docker -werkzeug==2.2.2 - # via pytest-httpserver zc.lockfile==2.0 # via cherrypy zipp==3.11.0 diff --git a/requirements/static/ci/py3.8/pkgtests.txt b/requirements/static/ci/py3.8/pkgtests.txt index 8f2a1d87ec27..afe9e74796f9 100644 --- a/requirements/static/ci/py3.8/pkgtests.txt +++ b/requirements/static/ci/py3.8/pkgtests.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# pip-compile --output-file=requirements/static/ci/py3.8/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt # attrs==22.2.0 # via @@ -20,9 +20,7 @@ charset-normalizer==3.0.1 cheroot==9.0.0 # via cherrypy cherrypy==18.8.0 - # via - # -r requirements/base.txt - # -r requirements/static/ci/pkgtests.in + # via -r requirements/static/ci/pkgtests.in contextvars==2.4 # via -r requirements/base.txt distlib==0.3.6 @@ -31,14 +29,10 @@ distro==1.8.0 # via # -r requirements/base.txt # pytest-skip-markers -docker==6.0.1 - # via pytest-salt-factories exceptiongroup==1.1.0 # via pytest filelock==3.9.0 # via virtualenv -flaky==3.7.0 - # via -r requirements/pytest.txt idna==3.4 # via requests immutables==0.19 @@ -61,8 +55,6 @@ jaraco.functools==3.5.2 # jaraco.text jaraco.text==3.11.0 # via jaraco.collections -jedi==0.18.2 - # via pudb jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 @@ -73,9 +65,6 @@ markupsafe==2.1.1 # via # -r requirements/base.txt # jinja2 - # werkzeug -mock==5.0.1 - # via -r requirements/pytest.txt more-itertools==9.0.0 # via # cheroot @@ -90,11 +79,7 @@ msgpack==1.0.4 packaging==23.0 # via # -r requirements/base.txt - # docker - # pudb # pytest -parso==0.8.3 - # via jedi platformdirs==2.6.2 # via virtualenv pluggy==1.0.0 @@ -107,27 +92,16 @@ psutil==5.9.4 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pudb==2022.1.3 - # via pytest-pudb pycryptodomex==3.16.0 # via -r requirements/crypto.txt pydantic==1.10.4 # via inflect -pygments==2.14.0 - # via pudb -pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt pytest-helpers-namespace==2021.12.29 # via - # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.6 - # via -r requirements/pytest.txt -pytest-pudb==0.7.0 +pytest-salt-factories==1.0.0rc17 # via -r requirements/static/ci/pkgtests.in -pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" - # via -r requirements/pytest.txt pytest-shell-utilities==1.7.0 # via pytest-salt-factories pytest-skip-markers==1.4.0 @@ -135,43 +109,28 @@ pytest-skip-markers==1.4.0 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.9.0 - # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-tempdir==2019.10.12 + # via pytest-salt-factories +pytest==7.2.1 # via - # -r requirements/pytest.txt - # pytest-salt-factories -pytest-timeout==2.1.0 - # via -r requirements/pytest.txt -pytest==7.2.1 ; python_version > "3.6" - # via - # -r requirements/pytest.txt - # pytest-custom-exit-code # pytest-helpers-namespace - # pytest-pudb # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics # pytest-tempdir - # pytest-timeout pytz==2022.7.1 # via tempora pyyaml==6.0 - # via - # -r requirements/base.txt - # pytest-salt-factories + # via -r requirements/base.txt pyzmq==25.0.0 ; python_version < "3.9" # via # -r requirements/zeromq.txt # pytest-salt-factories requests==2.28.2 - # via - # -r requirements/base.txt - # docker + # via -r requirements/base.txt six==1.16.0 # via cheroot tempora==5.2.0 @@ -184,21 +143,9 @@ typing-extensions==4.4.0 # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.14 - # via - # docker - # requests -urwid-readline==0.13 - # via pudb -urwid==2.1.2 - # via - # pudb - # urwid-readline + # via requests virtualenv==20.17.1 # via pytest-salt-factories -websocket-client==1.4.2 - # via docker -werkzeug==2.2.2 - # via pytest-httpserver zc.lockfile==2.0 # via cherrypy zipp==3.11.0 diff --git a/requirements/static/ci/py3.9/pkgtests.txt b/requirements/static/ci/py3.9/pkgtests.txt index f80941506715..dee0f80444ac 100644 --- a/requirements/static/ci/py3.9/pkgtests.txt +++ b/requirements/static/ci/py3.9/pkgtests.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/pkgtests.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# pip-compile --output-file=requirements/static/ci/py3.9/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt # attrs==22.2.0 # via @@ -20,9 +20,7 @@ charset-normalizer==3.0.1 cheroot==9.0.0 # via cherrypy cherrypy==18.8.0 - # via - # -r requirements/base.txt - # -r requirements/static/ci/pkgtests.in + # via -r requirements/static/ci/pkgtests.in contextvars==2.4 # via -r requirements/base.txt distlib==0.3.6 @@ -31,14 +29,10 @@ distro==1.8.0 # via # -r requirements/base.txt # pytest-skip-markers -docker==6.0.1 - # via pytest-salt-factories exceptiongroup==1.1.0 # via pytest filelock==3.9.0 # via virtualenv -flaky==3.7.0 - # via -r requirements/pytest.txt idna==3.4 # via requests immutables==0.19 @@ -59,8 +53,6 @@ jaraco.functools==3.5.2 # jaraco.text jaraco.text==3.11.0 # via jaraco.collections -jedi==0.18.2 - # via pudb jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 @@ -71,9 +63,6 @@ markupsafe==2.1.1 # via # -r requirements/base.txt # jinja2 - # werkzeug -mock==5.0.1 - # via -r requirements/pytest.txt more-itertools==9.0.0 # via # cheroot @@ -88,11 +77,7 @@ msgpack==1.0.4 packaging==23.0 # via # -r requirements/base.txt - # docker - # pudb # pytest -parso==0.8.3 - # via jedi platformdirs==2.6.2 # via virtualenv pluggy==1.0.0 @@ -105,27 +90,16 @@ psutil==5.9.4 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pudb==2022.1.3 - # via pytest-pudb pycryptodomex==3.16.0 # via -r requirements/crypto.txt pydantic==1.10.4 # via inflect -pygments==2.14.0 - # via pudb -pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt pytest-helpers-namespace==2021.12.29 # via - # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.6 - # via -r requirements/pytest.txt -pytest-pudb==0.7.0 +pytest-salt-factories==1.0.0rc17 # via -r requirements/static/ci/pkgtests.in -pytest-salt-factories[docker]==1.0.0rc23 ; sys_platform != "win32" - # via -r requirements/pytest.txt pytest-shell-utilities==1.7.0 # via pytest-salt-factories pytest-skip-markers==1.4.0 @@ -133,43 +107,28 @@ pytest-skip-markers==1.4.0 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.9.0 - # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-tempdir==2019.10.12 + # via pytest-salt-factories +pytest==7.2.1 # via - # -r requirements/pytest.txt - # pytest-salt-factories -pytest-timeout==2.1.0 - # via -r requirements/pytest.txt -pytest==7.2.1 ; python_version > "3.6" - # via - # -r requirements/pytest.txt - # pytest-custom-exit-code # pytest-helpers-namespace - # pytest-pudb # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics # pytest-tempdir - # pytest-timeout pytz==2022.7.1 # via tempora pyyaml==6.0 - # via - # -r requirements/base.txt - # pytest-salt-factories + # via -r requirements/base.txt pyzmq==25.0.0 ; python_version >= "3.9" # via # -r requirements/zeromq.txt # pytest-salt-factories requests==2.28.2 - # via - # -r requirements/base.txt - # docker + # via -r requirements/base.txt six==1.16.0 # via cheroot tempora==5.2.0 @@ -182,21 +141,9 @@ typing-extensions==4.4.0 # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.14 - # via - # docker - # requests -urwid-readline==0.13 - # via pudb -urwid==2.1.2 - # via - # pudb - # urwid-readline + # via requests virtualenv==20.17.1 # via pytest-salt-factories -websocket-client==1.4.2 - # via docker -werkzeug==2.2.2 - # via pytest-httpserver zc.lockfile==2.0 # via cherrypy From 0b1406788e762362a89fe08705cfb9480a108470 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Wed, 18 Jan 2023 12:12:37 -0700 Subject: [PATCH 06/55] Fix tests and enable local salt-api client --- pkg/tests/conftest.py | 1 + pkg/tests/files/redhatbased.sls | 10 ------- pkg/tests/integration/test_help.py | 9 ++++-- pkg/tests/integration/test_pip.py | 45 +++++++++++++++++----------- pkg/tests/integration/test_python.py | 8 ++--- pkg/tests/support/helpers.py | 25 +++++++++++----- 6 files changed, 55 insertions(+), 43 deletions(-) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 921e5ad9e6d2..e468c9b60842 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -248,6 +248,7 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): "file_roots": state_tree.as_dict(), "pillar_roots": pillar_tree.as_dict(), "rest_cherrypy": {"port": 8000, "disable_ssl": True}, + "netapi_enable_clients": ["local"], "external_auth": {"auto": {"saltdev": [".*"]}}, } if (platform.is_windows() or platform.is_darwin()) and install_salt.singlebin: diff --git a/pkg/tests/files/redhatbased.sls b/pkg/tests/files/redhatbased.sls index 1ea16e95c9f6..cffa8a6146aa 100644 --- a/pkg/tests/files/redhatbased.sls +++ b/pkg/tests/files/redhatbased.sls @@ -5,20 +5,10 @@ check_services_enabled_{{ service }}: service.enabled: - name: {{ service }} -run_if_changes_{{ service }}: - cmd.run: - - name: failtest service is enabled - - onchanges: - - service: check_services_enabled_{{ service }} {% endfor %} {% for service in services_disabled %} check_services_disabled_{{ service }}: service.disabled: - name: {{ service }} -run_if_changes_{{ service }}: - cmd.run: - - name: failtest service is disabled - - onchanges: - - service: check_services_disabled_{{ service }} {% endfor %} diff --git a/pkg/tests/integration/test_help.py b/pkg/tests/integration/test_help.py index 7379f2e915d2..bb9ae24a0210 100644 --- a/pkg/tests/integration/test_help.py +++ b/pkg/tests/integration/test_help.py @@ -3,10 +3,13 @@ def test_help(install_salt): Test --help works for all salt cmds """ for cmd in install_salt.binary_paths.values(): - if "salt-cloud" in cmd: - assert True - elif "salt-ssh" in cmd: + # TODO: add back salt-cloud and salt-ssh when its fixed + cmd = [str(x) for x in cmd] + if any(x in ["salt-cloud", "salt-ssh"] for x in cmd): assert True + elif "python" in cmd[0]: + ret = install_salt.proc.run(*cmd, "--version") + assert "Python" in ret.stdout else: ret = install_salt.proc.run(*cmd, "--help") assert "Usage" in ret.stdout diff --git a/pkg/tests/integration/test_pip.py b/pkg/tests/integration/test_pip.py index e118fcf4a827..1837dcb6652a 100644 --- a/pkg/tests/integration/test_pip.py +++ b/pkg/tests/integration/test_pip.py @@ -10,23 +10,23 @@ @pytest.fixture def pypath(): if platform.is_windows(): - return pathlib.Path(os.getenv("LocalAppData"), "salt", "pypath") - return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "pypath") + return pathlib.Path(os.getenv("LocalAppData"), "salt", "bin") + return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "bin") @pytest.fixture(autouse=True) -def wipe_pypath(pypath): +def wipe_pydeps(pypath, install_salt): try: yield finally: - # Let's make sure pypath is clean after each test, since it's contents - # are not actually part of the test suite, and they break other test - # suite assumptions - for path in pypath.glob("*"): - if path.is_dir(): - shutil.rmtree(path, ignore_errors=True) - else: - path.unlink() + for dep in ["pep8", "PyGithub"]: + subprocess.run( + install_salt.binary_paths["pip"] + ["uninstall", dep], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) def test_pip_install(salt_call_cli): @@ -60,10 +60,17 @@ def result(): @pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows") def test_pip_non_root(install_salt, test_account, pypath): - # Let's make sure pypath does not exist - shutil.rmtree(pypath) + check_path = pypath / "pep8" + # Lets make sure pep8 is not currently installed + subprocess.run( + install_salt.binary_paths["pip"] + ["uninstall", "pep8"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) - assert not pypath.exists() + assert not check_path.exists() # We should be able to issue a --help without being root ret = subprocess.run( install_salt.binary_paths["salt"] + ["--help"], @@ -76,7 +83,7 @@ def test_pip_non_root(install_salt, test_account, pypath): ) assert ret.returncode == 0, ret.stderr assert "Usage" in ret.stdout - assert not pypath.exists() + assert not check_path.exists() # Try to pip install something, should fail ret = subprocess.run( @@ -89,8 +96,8 @@ def test_pip_non_root(install_salt, test_account, pypath): universal_newlines=True, ) assert ret.returncode == 1, ret.stderr - assert f"The path '{pypath}' does not exist or could not be created." in ret.stderr - assert not pypath.exists() + assert "Could not install packages due to an OSError" in ret.stderr + assert not check_path.exists() # Let tiamat-pip create the pypath directory for us ret = subprocess.run( @@ -113,7 +120,6 @@ def test_pip_non_root(install_salt, test_account, pypath): universal_newlines=True, ) assert ret.returncode != 0, ret.stderr - # But we should be able to install as root ret = subprocess.run( install_salt.binary_paths["pip"] + ["install", "pep8"], @@ -122,4 +128,7 @@ def test_pip_non_root(install_salt, test_account, pypath): check=False, universal_newlines=True, ) + + assert check_path.exists() + assert ret.returncode == 0, ret.stderr diff --git a/pkg/tests/integration/test_python.py b/pkg/tests/integration/test_python.py index 878905e54840..e6ed5c2c34f7 100644 --- a/pkg/tests/integration/test_python.py +++ b/pkg/tests/integration/test_python.py @@ -8,8 +8,8 @@ @pytest.mark.parametrize("exp_ret,user_arg", [(1, "false"), (0, "true")]) def test_python_script(install_salt, exp_ret, user_arg): ret = subprocess.run( - install_salt.binary_paths["salt"] - + ["python", str(TESTS_DIR / "files" / "check_python.py"), user_arg], + install_salt.binary_paths["python"] + + [str(TESTS_DIR / "files" / "check_python.py"), user_arg], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False, @@ -21,8 +21,8 @@ def test_python_script(install_salt, exp_ret, user_arg): def test_python_script_exception(install_salt): ret = subprocess.run( - install_salt.binary_paths["salt"] - + ["python", str(TESTS_DIR / "files" / "check_python.py"), "raise"], + install_salt.binary_paths["python"] + + [str(TESTS_DIR / "files" / "check_python.py"), "raise"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False, diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index cd346d44ce2d..fbffe0ea17e9 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -75,6 +75,7 @@ class SaltPkgInstall: pkg_mngr: str = attr.ib(init=False) rm_pkg: str = attr.ib(init=False) salt_pkgs: List[str] = attr.ib(init=False) + install_dir: pathlib.Path = attr.ib(init=False) binary_paths: List[pathlib.Path] = attr.ib(init=False) @proc.default @@ -125,6 +126,19 @@ def _default_salt_pkgs(self): salt_pkgs.append("salt-common") return salt_pkgs + @install_dir.default + def _default_install_dir(self): + if platform.is_windows(): + install_dir = pathlib.Path( + os.getenv("ProgramFiles"), "Salt Project", "Salt" + ).resolve() + elif platform.is_darwin(): + # TODO: Add mac install dir path + install_dir = "" + else: + install_dir = pathlib.Path("/opt", "saltstack", "salt") + return install_dir + def __attrs_post_init__(self): file_ext_re = r"tar\.gz" if platform.is_darwin(): @@ -164,10 +178,7 @@ def __attrs_post_init__(self): elif file_ext == "exe": self.onedir = True self.installer_pkg = True - install_dir = pathlib.Path( - os.getenv("ProgramFiles"), "Salt Project", "Salt" - ).resolve() - self.bin_dir = install_dir / "bin" + self.bin_dir = self.install_dir / "bin" self.run_root = self.bin_dir / "salt.exe" self.ssm_bin = self.bin_dir / "ssm.exe" else: @@ -227,6 +238,7 @@ def __attrs_post_init__(self): "syndic": ["salt-syndic"], "spm": ["spm"], "pip": ["salt-pip"], + "python": [self.install_dir / "bin" / "python3"], } else: self.binary_paths = { @@ -499,10 +511,7 @@ def install_previous(self): self.onedir = True self.installer_pkg = True - install_dir = pathlib.Path( - os.getenv("ProgramFiles"), "Salt Project", "Salt" - ).resolve() - self.bin_dir = install_dir / "bin" + self.bin_dir = self.install_dir / "bin" self.run_root = self.bin_dir / "salt.exe" self.ssm_bin = self.bin_dir / "ssm.exe" From 42f5847c9f20023005dbb3d22bcbf567c92d3d29 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 18 Jan 2023 20:14:05 -0500 Subject: [PATCH 07/55] fix test_version packages tests --- noxfile.py | 4 +--- pkg/tests/conftest.py | 2 +- pkg/tests/integration/test_version.py | 15 +++++++++++++-- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/noxfile.py b/noxfile.py index c36542301d52..47cb997c7af9 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1776,7 +1776,5 @@ def test_pkgs(session, coverage): install_command = ["--progress-bar=off", "-r", requirements_file] session.install(*install_command, silent=PIP_INSTALL_SILENT) - cmd_args = [ - "pkg/tests/", - ] + session.posargs + cmd_args = ["pkg/tests/"] + session.posargs _pytest(session, coverage, cmd_args) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index e468c9b60842..c00a287cb3e6 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -29,7 +29,7 @@ def version(): _version = "" for artifact in ARTIFACTS_DIR.glob("**/*.*"): _version = re.search( - r"([0-9].*)(\-[0-9].fc|\-[0-9].el|\+ds|\-[0-9].am|\-[0-9]-[a-z]*-[a-z]*[0-9_]*.(tar.gz|zip|exe|pkg|rpm))", + r"([0-9].*)(\-[0-9].fc|\-[0-9].el|\+ds|\-[0-9].am|(\-[0-9]-[a-z]*-[a-z]*[0-9_]*.|\-[0-9]*.*)(tar.gz|zip|exe|pkg|rpm))", artifact.name, ) if _version: diff --git a/pkg/tests/integration/test_version.py b/pkg/tests/integration/test_version.py index 5f9b6239eaec..f319261f9161 100644 --- a/pkg/tests/integration/test_version.py +++ b/pkg/tests/integration/test_version.py @@ -1,3 +1,5 @@ +import pathlib +import subprocess import sys import pytest @@ -21,9 +23,18 @@ def test_salt_versions_report_master(install_salt): ) ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"]) if sys.platform == "win32": - ret.stdout.matcher.fnmatch_lines(["*Python: 3.8.16*"]) + python_executable = pathlib.Path( + r"C:\Program Files\Salt Project\Salt\Scripts\python.exe" + ) else: - ret.stdout.matcher.fnmatch_lines(["*Python: 3.9.16*"]) + python_executable = pathlib.Path("/opt/saltstack/salt/bin/python3") + py_version = subprocess.run( + [str(python_executable), "--version"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ).stdout + py_version = py_version.decode().strip().replace(" ", ": ") + ret.stdout.matcher.fnmatch_lines([f"*{py_version}*"]) def test_salt_versions_report_minion(salt_cli, salt_minion): From b4006c36173f7bf716db625c6d9b5e9c0d191d59 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 18 Jan 2023 13:04:40 -0500 Subject: [PATCH 08/55] add py3.6 requirements for centos --- .pre-commit-config.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bc10eca43182..1e21397fe7ab 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1036,6 +1036,18 @@ repos: # <---- Invoke ----------------------------------------------------------------------------------------------------- # <---- PKG ci requirements----------------------------------------------------------------------------------------- + - id: pip-tools-compile + alias: compile-ci-pkg-3.6-requirements + name: PKG tests CI Py3.6 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.6/pkgtests\.in)))$ + pass_filenames: false + args: + - -v + - --py-version=3.6 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - requirements/static/ci/pkgtests.in + - id: pip-tools-compile alias: compile-ci-pkg-3.7-requirements name: PKG tests CI Py3.7 Requirements From 9690715a92cb5aa7f4a8d61609fc8939184b720f Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 18 Jan 2023 19:38:06 -0500 Subject: [PATCH 09/55] add py 3.6 requirements for centos package tests --- requirements/static/ci/py3.6/pkgtests.txt | 162 ++++++++++++++++++++++ 1 file changed, 162 insertions(+) create mode 100644 requirements/static/ci/py3.6/pkgtests.txt diff --git a/requirements/static/ci/py3.6/pkgtests.txt b/requirements/static/ci/py3.6/pkgtests.txt new file mode 100644 index 000000000000..a1cd3f7ad0b5 --- /dev/null +++ b/requirements/static/ci/py3.6/pkgtests.txt @@ -0,0 +1,162 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.6/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt +# +attrs==22.2.0 + # via + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +certifi==2022.12.7 + # via requests +charset-normalizer==2.0.12 + # via requests +cheroot==9.0.0 + # via cherrypy +cherrypy==18.8.0 + # via -r requirements/static/ci/pkgtests.in +contextvars==2.4 + # via -r requirements/base.txt +distlib==0.3.6 + # via virtualenv +distro==1.8.0 + # via + # -r requirements/base.txt + # pytest-skip-markers +filelock==3.4.1 + # via virtualenv +idna==3.4 + # via requests +immutables==0.19 + # via contextvars +importlib-metadata==4.8.3 + # via + # cheroot + # pluggy + # pytest + # virtualenv +importlib-resources==5.4.0 + # via + # jaraco.text + # virtualenv +iniconfig==1.1.1 + # via pytest +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.context==4.1.1 + # via jaraco.text +jaraco.functools==3.4.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.7.0 + # via jaraco.collections +jinja2==3.0.3 + # via -r requirements/base.txt +jmespath==0.10.0 + # via -r requirements/base.txt +looseversion==1.0.3 + # via -r requirements/base.txt +markupsafe==2.0.1 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.14.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.4 + # via + # -r requirements/base.txt + # pytest-salt-factories +packaging==21.3 + # via + # -r requirements/base.txt + # pytest +platformdirs==2.4.0 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.0.0 + # via cherrypy +psutil==5.9.4 + # via + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +py==1.11.0 + # via pytest +pycryptodomex==3.16.0 + # via -r requirements/crypto.txt +pyparsing==3.0.9 + # via packaging +pytest-helpers-namespace==2021.12.29 + # via + # pytest-salt-factories + # pytest-shell-utilities +pytest-salt-factories==1.0.0rc17 + # via -r requirements/static/ci/pkgtests.in +pytest-shell-utilities==1.7.0 + # via pytest-salt-factories +pytest-skip-markers==1.3.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-tempdir==2019.10.12 + # via pytest-salt-factories +pytest==7.0.1 + # via + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics + # pytest-tempdir +pytz==2022.7.1 + # via tempora +pyyaml==6.0 + # via -r requirements/base.txt +pyzmq==25.0.0 ; python_version < "3.9" + # via + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.27.1 + # via -r requirements/base.txt +six==1.16.0 + # via cheroot +tempora==4.1.2 + # via portend +tomli==1.2.3 + # via pytest +typing-extensions==4.1.1 + # via + # immutables + # importlib-metadata + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.14 + # via requests +virtualenv==20.17.1 + # via pytest-salt-factories +zc.lockfile==2.0 + # via cherrypy +zipp==3.6.0 + # via + # importlib-metadata + # importlib-resources + +# The following packages are considered to be unsafe in a requirements file: +# setuptools From af248d802cfc88806137d86e0d4b714ec67a1746 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Fri, 20 Jan 2023 10:00:36 -0700 Subject: [PATCH 10/55] fix merge wart --- .github/actions/build-onedir-bare/action.yml | 36 ------ .github/workflows/build-salt-action.yml | 124 ------------------- .github/workflows/test-action.yml | 15 --- noxfile.py | 3 - 4 files changed, 178 deletions(-) delete mode 100644 .github/actions/build-onedir-bare/action.yml delete mode 100644 .github/workflows/build-salt-action.yml diff --git a/.github/actions/build-onedir-bare/action.yml b/.github/actions/build-onedir-bare/action.yml deleted file mode 100644 index be262f9362df..000000000000 --- a/.github/actions/build-onedir-bare/action.yml +++ /dev/null @@ -1,36 +0,0 @@ ---- -name: build-onedir-bare -description: Build Bare Onedir Package -inputs: - platform: - required: true - type: string - description: The platform to build - arch: - required: true - type: string - description: The platform arch to build - package-name: - required: false - type: string - description: The onedir package name to create - default: salt - -runs: - using: composite - - steps: - - - name: Cache Bare Onedir Package Directory - id: onedir-pkg-cache - uses: actions/cache@v3 - with: - path: artifacts/${{ inputs.package-name }} - key: relenv|${{ env.RELENV_VERSION }}|bare|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles('.relenv/**/*.xz') }} - - - name: Create Onedir Directory - shell: bash - if: steps.onedir-pkg-cache.outputs.cache-hit != 'true' - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - python3 -m relenv create --arch=${{ inputs.arch }} artifacts/${{ inputs.package-name }} diff --git a/.github/workflows/build-salt-action.yml b/.github/workflows/build-salt-action.yml deleted file mode 100644 index 28a2e6f03741..000000000000 --- a/.github/workflows/build-salt-action.yml +++ /dev/null @@ -1,124 +0,0 @@ ---- -name: Build Salt Onedir - -on: - - workflow_call - -jobs: - - linux: - name: Linux - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - - aarch64 - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - steps: - - uses: actions/checkout@v3 - - name: Setup Relenv - uses: ./.github/actions/setup-relenv - with: - platform: linux - arch: ${{ matrix.arch }} - - name: Install Salt into Relenv Onedir - uses: ./.github/actions/build-onedir-pkg - with: - platform: linux - arch: ${{ matrix.arch }} - - - name: Set Exit Status - if: always() - run: | - python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - echo "${{ job.status }}" > exitstatus/${{ github.job }}-linux-${{ matrix.arch }} - - - name: Upload Exit Status - if: always() - uses: actions/upload-artifact@v3 - with: - name: exitstatus - path: exitstatus - if-no-files-found: error - - windows: - name: Windows - strategy: - fail-fast: false - matrix: - arch: - - amd64 - runs-on: windows-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - name: Setup Relenv - uses: ./.github/actions/setup-relenv - with: - platform: windows - arch: ${{ matrix.arch }} - - name: Install Salt into Relenv Onedir - uses: ./.github/actions/build-onedir-pkg - with: - platform: windows - arch: ${{ matrix.arch }} - - - name: Set Exit Status - if: always() - run: | - python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - echo "${{ job.status }}" > exitstatus/${{ github.job }}-windows-${{ matrix.arch }} - - - name: Upload Exit Status - if: always() - uses: actions/upload-artifact@v3 - with: - name: exitstatus - path: exitstatus - if-no-files-found: error - - macos: - name: macOS - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - runs-on: macos-12 - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - name: Setup Relenv - uses: ./.github/actions/setup-relenv - with: - platform: darwin - arch: ${{ matrix.arch }} - - name: Install Salt into Relenv Onedir - uses: ./.github/actions/build-onedir-pkg - with: - platform: darwin - arch: ${{ matrix.arch }} - - - name: Set Exit Status - if: always() - run: | - python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - echo "${{ job.status }}" > exitstatus/${{ github.job }}-macos-${{ matrix.arch }} - - - name: Upload Exit Status - if: always() - uses: actions/upload-artifact@v3 - with: - name: exitstatus - path: exitstatus - if-no-files-found: error diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 0e31c68185fc..9b6e1e0b3ff0 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -102,21 +102,6 @@ jobs: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.arch }}-${{ inputs.platform }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.arch }}-${{ inputs.platform }}.tar.xz - # Skip jobs if nox.*.tar.* is already cached - name: Download Onedir Tarball as an Artifact if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' diff --git a/noxfile.py b/noxfile.py index 47cb997c7af9..eab9303231c3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -329,9 +329,6 @@ def _install_requirements( if not _upgrade_pip_setuptools_and_wheel(session, onedir=onedir): return False - if onedir and not IS_WINDOWS and not IS_DARWIN and not IS_FREEBSD: - session_run_always(session, "python3", "-m", "relenv", "toolchain", "fetch") - # Install requirements requirements_file = _get_pip_requirements_file( session, transport, requirements_type=requirements_type From 89585e3025430d0afcfdacd0eb93a4513e2ed557 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Mon, 23 Jan 2023 08:24:35 -0700 Subject: [PATCH 11/55] Add upgrade tests --- noxfile.py | 35 ++++++++++++++++++++++++++++++ pkg/tests/conftest.py | 6 +++++ pkg/tests/integration/test_help.py | 4 +--- pkg/tests/support/helpers.py | 13 +++++++---- 4 files changed, 51 insertions(+), 7 deletions(-) diff --git a/noxfile.py b/noxfile.py index eab9303231c3..8c5867a812f3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,6 +17,8 @@ import tarfile import tempfile +import nox.command + # fmt: off if __name__ == "__main__": sys.stderr.write( @@ -1775,3 +1777,36 @@ def test_pkgs(session, coverage): cmd_args = ["pkg/tests/"] + session.posargs _pytest(session, coverage, cmd_args) + + +@nox.session(python=_PYTHON_VERSIONS, name="test-upgrade-pkgs") +@nox.parametrize("coverage", [False, True]) +@nox.parametrize("classic", [False, True]) +def test_upgrade_pkgs(session, coverage, classic): + """ + pytest pkg upgrade tests session + """ + pydir = _get_pydir(session) + # Install requirements + if _upgrade_pip_setuptools_and_wheel(session): + requirements_file = os.path.join( + "requirements", "static", "ci", _get_pydir(session), "pkgtests.txt" + ) + + install_command = ["--progress-bar=off", "-r", requirements_file] + session.install(*install_command, silent=PIP_INSTALL_SILENT) + + cmd_args = [ + "pkg/tests/upgrade/test_salt_upgrade.py::test_salt_upgrade", + "--upgrade", + "--no-uninstall", + ] + session.posargs + if classic: + cmd_args = cmd_args + ["--classic"] + try: + _pytest(session, coverage, cmd_args) + except nox.command.CommandFailed: + sys.exit(0) + + cmd_args = ["pkg/tests/", "--no-install"] + session.posargs + _pytest(session, coverage, cmd_args) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index c00a287cb3e6..4e14e05bb395 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -67,6 +67,12 @@ def pytest_addoption(parser): action="store_true", help="Do not uninstall salt packages after test run is complete", ) + test_selection_group.addoption( + "--classic", + default=False, + action="store_true", + help="Test an upgrade from the classic packages.", + ) @pytest.fixture(scope="session") diff --git a/pkg/tests/integration/test_help.py b/pkg/tests/integration/test_help.py index bb9ae24a0210..2f701c624943 100644 --- a/pkg/tests/integration/test_help.py +++ b/pkg/tests/integration/test_help.py @@ -5,9 +5,7 @@ def test_help(install_salt): for cmd in install_salt.binary_paths.values(): # TODO: add back salt-cloud and salt-ssh when its fixed cmd = [str(x) for x in cmd] - if any(x in ["salt-cloud", "salt-ssh"] for x in cmd): - assert True - elif "python" in cmd[0]: + if "python" in cmd[0]: ret = install_salt.proc.run(*cmd, "--version") assert "Python" in ret.stdout else: diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index fbffe0ea17e9..f867770f5968 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -77,6 +77,7 @@ class SaltPkgInstall: salt_pkgs: List[str] = attr.ib(init=False) install_dir: pathlib.Path = attr.ib(init=False) binary_paths: List[pathlib.Path] = attr.ib(init=False) + classic: bool = attr.ib(default=False) @proc.default def _default_proc(self): @@ -430,21 +431,25 @@ def install_previous(self): os_name = os_name.split()[0].lower() if os_name == "centos" or os_name == "fedora": os_name = "redhat" - # TODO: When tiamat is considered production we need to update these - # TODO: paths to the tiamat paths instead of the old package paths. if os_name.lower() in ["redhat", "centos", "amazon", "fedora"]: for fp in pathlib.Path("/etc", "yum.repos.d").glob("epel*"): fp.unlink() + gpg_key = "SALTSTACK-GPG-KEY.pub" + if version == "9": + gpg_key = "SALTSTACK-GPG-KEY2.pub" + root_url = "salt/py3/" + if self.classic: + root_url = "py3/" ret = self.proc.run( "rpm", "--import", - f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/x86_64/{major_ver}/SALTSTACK-GPG-KEY.pub", + f"https://repo.saltproject.io/{root_url}{os_name}/{version}/x86_64/{major_ver}/{gpg_key}", ) self._check_retcode(ret) ret = self.proc.run( "curl", "-fsSL", - f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/x86_64/{major_ver}.repo", + f"https://repo.saltproject.io/{root_url}{os_name}/{version}/x86_64/{major_ver}.repo", "-o", f"/etc/yum.repos.d/salt-{os_name}.repo", ) From 1a7ecc84dc5b5f647fe0d201f6a60c52ef356bc3 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Jan 2023 13:39:34 -0500 Subject: [PATCH 12/55] Compress the RPM packages into a tarball with the version in its name --- .github/workflows/build-rpm-packages.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index 4b1ec05ddcbc..d3cf7a07c769 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -61,7 +61,7 @@ jobs: echo "${{ inputs.salt-version }}" > salt/_version.txt rpmbuild -bb --define="_salt_src $(pwd)" "$(pwd)/pkg/rpm/salt.spec" - - name: Upload RPMs + - name: Upload RPMs Archive uses: actions/upload-artifact@v3 with: name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-pkgs From 4b0762c22aa7b36afb503f210924961255c28740 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Jan 2023 13:40:35 -0500 Subject: [PATCH 13/55] First test of the new GH actions package testing pipeline --- .github/workflows/ci.yml | 15 + .github/workflows/test-packages-action.yml | 430 +++++++++++++++++++++ 2 files changed, 445 insertions(+) create mode 100644 .github/workflows/test-packages-action.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3bd27c52252d..cc5ec4900fdf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -276,6 +276,21 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }} + centos-7-pkg-tests: + name: CentOS 7 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centos-7 + nox-session: test-pkgs + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + package-type: rpm + windows-2016: name: Windows 2016 if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml new file mode 100644 index 000000000000..d04310afd66f --- /dev/null +++ b/.github/workflows/test-packages-action.yml @@ -0,0 +1,430 @@ +name: Test Artifact + +on: + workflow_call: + inputs: + distro-slug: + required: true + type: string + description: The OS slug to run tests against + nox-session: + required: true + type: string + description: The nox session to run + platform: + required: true + type: string + description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested + package-type: + required: true + type: string + description: The platform arch being tested + salt-version: + type: string + required: true + description: The Salt version of the packages to install and test + + +env: + NOX_VERSION: "2022.8.7" + COLUMNS: 160 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + +jobs: + + # dependencies: + # name: Setup Test Dependencies + # needs: + # - generate-matrix + # runs-on: + # - self-hosted + # - linux + # - bastion + # timeout-minutes: 90 + # strategy: + # fail-fast: false + # matrix: + # include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} + # steps: + # - name: Checkout Source Code + # uses: actions/checkout@v3 + + # - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + # id: nox-dependencies-cache + # uses: actions/cache@v3 + # with: + # path: nox.${{ inputs.distro-slug }}.tar.* + # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + + # # Skip jobs if nox.*.tar.* is already cached + # - name: Download Onedir Tarball as an Artifact + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # uses: actions/download-artifact@v3 + # with: + # name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + # path: artifacts/ + + # - name: Decompress Onedir Tarball + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # shell: bash + # run: | + # python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + # cd artifacts + # tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + + # - name: PyPi Proxy + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + + # - name: Setup Python Tools Scripts + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # uses: ./.github/actions/setup-python-tools-scripts + + # - name: Define Nox Session + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # if [ "${{ matrix.transport }}" != "tcp" ]; then + # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" + # else + # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" + # fi + + # - name: Start VM + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # id: spin-up-vm + # run: | + # tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }} + + # - name: List Free Space + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + # - name: Upload Checkout To VM + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm rsync ${{ inputs.distro-slug }} + + # - name: Install Dependencies + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm install-dependencies --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} + + # - name: Cleanup .nox Directory + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} + + # - name: Compress .nox Directory + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} + + # - name: Download Compressed .nox Directory + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} + + # - name: Destroy VM + # if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm destroy ${{ inputs.distro-slug }} + + # - name: Set Exit Status + # if: always() + # run: | + # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps + + # - name: Upload Exit Status + # if: always() + # uses: actions/upload-artifact@v3 + # with: + # name: exitstatus + # path: exitstatus + # if-no-files-found: error + + test: + name: Test + runs-on: + - self-hosted + - linux + - bastion + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + # needs: + # - dependencies + # - generate-matrix + strategy: + fail-fast: false + # matrix: + # include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} + + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + # - name: Setup Salt Version + # run: | + # echo "${{ inputs.salt-version }}" > salt/_version.txt + + - name: Download Packages + uses: actions/download-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpms + path: pkg/artifacts/ + + # - name: Decompress Packages Archive + # shell: bash + # run: | + # python3 -c "import os; os.makedirs('pkg/artifacts', exist_ok=True)" + # cd pkg/artifacts + + # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + # uses: actions/cache@v3 + # with: + # path: nox.${{ inputs.distro-slug }}.tar.* + # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + + - name: PyPi Proxy + run: | + sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + # - name: Define Nox Session + # run: | + # if [ "${{ matrix.transport }}" != "tcp" ]; then + # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" + # else + # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" + # fi + + # - name: Download testrun-changed-files.txt + # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + # uses: actions/download-artifact@v3 + # with: + # name: testrun-changed-files.txt + + - name: Start VM + id: spin-up-vm + env: + TESTS_CHUNK: ${{ matrix.tests-chunk }} + run: | + tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }} + + - name: List Free Space + run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + run: | + tools --timestamps vm rsync ${{ inputs.distro-slug }} + + # - name: Decompress .nox Directory + # run: | + # tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} + + # - name: Show System Info & Test Plan + # run: | + # tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + # --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \ + # ${{ matrix.tests-chunk }} + + # - name: Run Slow/Changed Tests + # id: run-slow-changed-tests + # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + # run: | + # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ + # ${{ matrix.tests-chunk }} -- --run-slow --suppress-no-test-exit-code \ + # --from-filenames=testrun-changed-files.txt + + # - name: Run Fast Tests + # id: run-fast-tests + # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + # run: | + # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ + # ${{ matrix.tests-chunk }} + + - name: Run Full Tests + id: run-full-tests + if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ + --nox-session=test-pkgs --rerun-failures ${{ inputs.distro-slug }} -- --run-slow + # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ + # ${{ matrix.tests-chunk }} -- --run-slow + + # - name: Combine Coverage Reports + # if: always() && steps.spin-up-vm.outcome == 'success' + # run: | + # tools --timestamps vm combine-coverage ${{ inputs.distro-slug }} + + # - name: Download Test Run Artifacts + # id: download-artifacts-from-vm + # if: always() && steps.spin-up-vm.outcome == 'success' + # run: | + # tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} + # # Delete the salt onedir, we won't need it anymore and it will prevent + # # from it showing in the tree command below + # rm -rf artifacts/salt* + # tree -a artifacts + # mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }} + # echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV + + - name: Destroy VM + if: always() + run: | + tools --timestamps vm destroy ${{ inputs.distro-slug }} || true + + # - name: Upload Test Run Artifacts + # if: always() && steps.download-artifacts-from-vm.outcome == 'success' + # uses: actions/upload-artifact@v3 + # with: + # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + # path: | + # artifacts + # !artifacts/salt/* + # !artifacts/salt-*.tar.* + + # - name: Install Nox + # if: always() && steps.download-artifacts-from-vm.outcome == 'success' + # env: + # PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + # PIP_EXTRA_INDEX_URL: https://pypi.org/simple + # run: | + # python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + # - name: Report Salt Code Coverage(${{ matrix.tests-chunk }}) + # if: always() && steps.download-artifacts-from-vm.outcome == 'success' + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage -- salt + + # - name: Report Tests Code Coverage(${{ matrix.tests-chunk }}) + # if: always() && steps.download-artifacts-from-vm.outcome == 'success' + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage -- tests + + # - name: Report Combined Code Coverage(${{ matrix.tests-chunk }}) + # if: always() && steps.download-artifacts-from-vm.outcome == 'success' + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage + + # - name: Set Exit Status + # if: always() + # run: | + # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-${{ matrix.tests-chunk }}-tests + + # - name: Upload Exit Status + # if: always() + # uses: actions/upload-artifact@v3 + # with: + # name: exitstatus + # path: exitstatus + # if-no-files-found: error + + # report: + # name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) + # runs-on: + # - self-hosted + # - linux + # - x86_64 + # if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + # needs: + # - test + # - generate-matrix + # strategy: + # fail-fast: false + # matrix: + # include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} + + # steps: + # - name: Checkout Source Code + # uses: actions/checkout@v3 + + # - name: Define Nox Session + # run: | + # if [ "${{ matrix.transport }}" != "tcp" ]; then + # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" + # else + # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" + # fi + + # - name: Download Test Run Artifacts + # id: download-test-run-artifacts + # uses: actions/download-artifact@v3 + # with: + # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + # path: artifacts + + # - name: Show Test Run Artifacts + # if: always() && steps.download-test-run-artifacts.outcome == 'success' + # run: | + # tree -a artifacts + + # - name: Upload Code Coverage DB + # if: always() && steps.download-test-run-artifacts.outcome == 'success' + # uses: actions/upload-artifact@v3 + # with: + # name: code-coverage + # path: artifacts/coverage + + # - name: Set up Python 3.9 + # uses: actions/setup-python@v4 + # with: + # python-version: "3.9" + + # - name: Install Nox + # run: | + # python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + # - name: Report Salt Code Coverage + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage -- salt + + # - name: Report Tests Code Coverage + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage -- tests + + # - name: Report Combined Code Coverage + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage + + # - name: Publish Test Report + # uses: mikepenz/action-junit-report@v3 + # # always run even if the previous steps fails + # if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' + # with: + # check_name: Overall Test Results(${{ inputs.distro-slug }}) + # report_paths: 'artifacts/xml-unittests-output/*.xml' + # annotate_only: true + + # - name: Set Exit Status + # if: always() + # run: | + # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report + + # - name: Upload Exit Status + # if: always() + # uses: actions/upload-artifact@v3 + # with: + # name: exitstatus + # path: exitstatus + # if-no-files-found: error From 80769b16d26d74197b6fdc974a7e9bd0839eb82b Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Jan 2023 15:07:18 -0500 Subject: [PATCH 14/55] Supply the correct artifact name for the rpm archive --- .github/workflows/test-packages-action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index d04310afd66f..ddb3d093858d 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -176,7 +176,7 @@ jobs: - name: Download Packages uses: actions/download-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpms + name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpms.tar.gz path: pkg/artifacts/ # - name: Decompress Packages Archive From 0eb79a4ddf0b2230774f9091616adc453e074147 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Jan 2023 16:33:33 -0500 Subject: [PATCH 15/55] it's inputs not matrix --- .github/workflows/test-packages-action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index ddb3d093858d..1faafa963986 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -176,7 +176,7 @@ jobs: - name: Download Packages uses: actions/download-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpms.tar.gz + name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpms.tar.gz path: pkg/artifacts/ # - name: Decompress Packages Archive From 1d8cd16103e1b2545778262730a4f0802227f8ca Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Jan 2023 17:45:48 -0500 Subject: [PATCH 16/55] Run the package tests always, for now at least --- .github/workflows/test-packages-action.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 1faafa963986..89d04ef3fb5b 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -254,12 +254,10 @@ jobs: # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ # ${{ matrix.tests-chunk }} - - name: Run Full Tests - id: run-full-tests - if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }} + - name: Run Package Tests run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ - --nox-session=test-pkgs --rerun-failures ${{ inputs.distro-slug }} -- --run-slow + --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- --run-slow # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ # ${{ matrix.tests-chunk }} -- --run-slow From 852857c9d560fd2f2eeb7150afa8c19b2b37e6fc Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Jan 2023 18:58:04 -0500 Subject: [PATCH 17/55] Remove invalid --run-slow for package test command --- .github/workflows/test-packages-action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 89d04ef3fb5b..be01c607a0fb 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -257,7 +257,7 @@ jobs: - name: Run Package Tests run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ - --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- --run-slow + --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ # ${{ matrix.tests-chunk }} -- --run-slow From b7e61fc1dfc68799014daa9c32823a7ab63a1f9c Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Wed, 25 Jan 2023 09:51:53 -0700 Subject: [PATCH 18/55] Stop services if on debian --- pkg/tests/support/helpers.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index f867770f5968..fd3f8e0faa8f 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -418,6 +418,26 @@ def install(self, upgrade=False): self._install_compressed(upgrade=upgrade) else: self._install_pkgs(upgrade=upgrade) + if self.distro_id in ("ubuntu", "debian"): + self.stop_services() + + def stop_services(self): + """ + Debian distros automatically start the services + We want to ensure our tests start with the config + settings we have set. This will also verify the expected + services are up and running. + """ + for service in ["salt-syndic", "salt-master", "salt-minion"]: + check_run = self.proc.run("systemctl", "status", service) + if check_run.returncode != 0: + # The system was not started automatically and we + # are expecting it to be on install + log.debug("The service %s was not started on install.", service) + return False + stop_service = self.proc.run("systemctl", "stop", service) + self._check_retcode(stop_service) + return True def install_previous(self): """ From f449f340cf529d9d81dbe3d8108d65ec4ea87431 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 25 Jan 2023 15:32:26 -0500 Subject: [PATCH 19/55] include the package artifacts when we rsync --- tools/vm.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/vm.py b/tools/vm.py index 8e1eed6cac50..b93e99748848 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -990,6 +990,8 @@ def upload_checkout(self, verbose=True): "artifacts/", "--include", "artifacts/salt", + "--include", + "pkg/artifacts/*", # But we also want to exclude all other entries under artifacts/ "--exclude", "artifacts/*", From d83ac559fd86b7cab6f52a115553d8595db4da30 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 25 Jan 2023 16:34:59 -0500 Subject: [PATCH 20/55] decompress the packages tarball first --- .github/workflows/test-packages-action.yml | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index be01c607a0fb..27773c19d212 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -179,11 +179,14 @@ jobs: name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpms.tar.gz path: pkg/artifacts/ - # - name: Decompress Packages Archive - # shell: bash - # run: | - # python3 -c "import os; os.makedirs('pkg/artifacts', exist_ok=True)" - # cd pkg/artifacts + - name: Decompress Packages Archive + shell: bash + run: | + python3 -c "import os; os.makedirs('pkg/artifacts', exist_ok=True)" + cd pkg/artifacts + tree . + tar xzvf salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpms.tar.gz + tree . # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} # uses: actions/cache@v3 From f6816a1edcf90c36efc08be0276b5ce7e41f9d37 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 25 Jan 2023 18:21:42 -0500 Subject: [PATCH 21/55] try with the old way of uploading artifacts --- .github/workflows/build-rpm-packages.yml | 2 +- .github/workflows/test-packages-action.yml | 56 ++-------------------- 2 files changed, 5 insertions(+), 53 deletions(-) diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index d3cf7a07c769..4b1ec05ddcbc 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -61,7 +61,7 @@ jobs: echo "${{ inputs.salt-version }}" > salt/_version.txt rpmbuild -bb --define="_salt_src $(pwd)" "$(pwd)/pkg/rpm/salt.spec" - - name: Upload RPMs Archive + - name: Upload RPMs uses: actions/upload-artifact@v3 with: name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-pkgs diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 27773c19d212..b7d9c9cf102a 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -169,24 +169,19 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v3 - # - name: Setup Salt Version - # run: | - # echo "${{ inputs.salt-version }}" > salt/_version.txt - - name: Download Packages uses: actions/download-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpms.tar.gz + name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpm-packages path: pkg/artifacts/ - - name: Decompress Packages Archive - shell: bash + - name: List package artifacts run: | - python3 -c "import os; os.makedirs('pkg/artifacts', exist_ok=True)" cd pkg/artifacts tree . - tar xzvf salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpms.tar.gz + mv salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpm-packages/* . tree . + rm -rf salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpm-packages # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} # uses: actions/cache@v3 @@ -240,23 +235,6 @@ jobs: # --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \ # ${{ matrix.tests-chunk }} - # - name: Run Slow/Changed Tests - # id: run-slow-changed-tests - # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} - # run: | - # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ - # ${{ matrix.tests-chunk }} -- --run-slow --suppress-no-test-exit-code \ - # --from-filenames=testrun-changed-files.txt - - # - name: Run Fast Tests - # id: run-fast-tests - # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} - # run: | - # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ - # ${{ matrix.tests-chunk }} - - name: Run Package Tests run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ @@ -297,32 +275,6 @@ jobs: # !artifacts/salt/* # !artifacts/salt-*.tar.* - # - name: Install Nox - # if: always() && steps.download-artifacts-from-vm.outcome == 'success' - # env: - # PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - # PIP_EXTRA_INDEX_URL: https://pypi.org/simple - # run: | - # python3 -m pip install 'nox==${{ env.NOX_VERSION }}' - - # - name: Report Salt Code Coverage(${{ matrix.tests-chunk }}) - # if: always() && steps.download-artifacts-from-vm.outcome == 'success' - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage -- salt - - # - name: Report Tests Code Coverage(${{ matrix.tests-chunk }}) - # if: always() && steps.download-artifacts-from-vm.outcome == 'success' - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage -- tests - - # - name: Report Combined Code Coverage(${{ matrix.tests-chunk }}) - # if: always() && steps.download-artifacts-from-vm.outcome == 'success' - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage - # - name: Set Exit Status # if: always() # run: | From ae18ce255731ca80563a145d31d8817d3da56128 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 25 Jan 2023 19:33:37 -0500 Subject: [PATCH 22/55] Create a matrix of nox sessions and realize that the artifacts were being downloaded correctly all along --- .github/workflows/ci.yml | 1 - .github/workflows/test-packages-action.yml | 30 ++++++---------------- 2 files changed, 8 insertions(+), 23 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cc5ec4900fdf..c244d931107b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -285,7 +285,6 @@ jobs: uses: ./.github/workflows/test-packages-action.yml with: distro-slug: centos-7 - nox-session: test-pkgs platform: linux arch: x86_64 salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index b7d9c9cf102a..b140093bac53 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -7,10 +7,6 @@ on: required: true type: string description: The OS slug to run tests against - nox-session: - required: true - type: string - description: The nox session to run platform: required: true type: string @@ -19,7 +15,7 @@ on: required: true type: string description: The platform arch being tested - package-type: + pkg-type: required: true type: string description: The platform arch being tested @@ -162,8 +158,10 @@ jobs: # - generate-matrix strategy: fail-fast: false - # matrix: - # include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} + matrix: + nox-session: + - test-pkgs + - test-upgrade-pkgs steps: - name: Checkout Source Code @@ -172,16 +170,12 @@ jobs: - name: Download Packages uses: actions/download-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpm-packages + name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}-packages path: pkg/artifacts/ - name: List package artifacts run: | - cd pkg/artifacts - tree . - mv salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpm-packages/* . - tree . - rm -rf salt-${{ inputs.salt-version }}-${{ inputs.arch }}-rpm-packages + tree pkg/artifacts # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} # uses: actions/cache@v3 @@ -196,14 +190,6 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts - # - name: Define Nox Session - # run: | - # if [ "${{ matrix.transport }}" != "tcp" ]; then - # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - # else - # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - # fi - # - name: Download testrun-changed-files.txt # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} # uses: actions/download-artifact@v3 @@ -238,7 +224,7 @@ jobs: - name: Run Package Tests run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ - --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} + --nox-session=${{ matrix.nox-session }} --rerun-failures ${{ inputs.distro-slug }} # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ # ${{ matrix.tests-chunk }} -- --run-slow From eb280fb7b1a09740b0cbeea0a8d456a43779f9c8 Mon Sep 17 00:00:00 2001 From: Caleb Beard <53276404+MKLeb@users.noreply.github.com> Date: Wed, 25 Jan 2023 19:50:58 -0500 Subject: [PATCH 23/55] make sure we give pkg-type as an input --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c244d931107b..0b7478302da9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -288,7 +288,7 @@ jobs: platform: linux arch: x86_64 salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" - package-type: rpm + pkg-type: rpm windows-2016: name: Windows 2016 From dbea473622d0e7009538ff08a0f67cdfd61a8aaa Mon Sep 17 00:00:00 2001 From: Caleb Beard <53276404+MKLeb@users.noreply.github.com> Date: Wed, 25 Jan 2023 20:55:28 -0500 Subject: [PATCH 24/55] install rpmdevtools first --- .github/workflows/test-packages-action.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index b140093bac53..7d4e3e3fb6b2 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -173,8 +173,9 @@ jobs: name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}-packages path: pkg/artifacts/ - - name: List package artifacts + - name: List Packages run: | + yum install rpmdevtools -y tree pkg/artifacts # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} From 35ac16fa36717efc675edbd6aa9a63fd9bd6bc93 Mon Sep 17 00:00:00 2001 From: Caleb Beard <53276404+MKLeb@users.noreply.github.com> Date: Wed, 25 Jan 2023 21:30:55 -0500 Subject: [PATCH 25/55] Actually install rpmdevtools on the right machine this time --- .github/workflows/test-packages-action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 7d4e3e3fb6b2..3aa8d1bb6121 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -175,7 +175,6 @@ jobs: - name: List Packages run: | - yum install rpmdevtools -y tree pkg/artifacts # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} @@ -224,6 +223,7 @@ jobs: - name: Run Package Tests run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- yum install rpmdevtools -y || true tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ --nox-session=${{ matrix.nox-session }} --rerun-failures ${{ inputs.distro-slug }} # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ From 7b3cde8b527bb3fc69a967ab9489019428b5928c Mon Sep 17 00:00:00 2001 From: Caleb Beard <53276404+MKLeb@users.noreply.github.com> Date: Wed, 25 Jan 2023 22:17:24 -0500 Subject: [PATCH 26/55] install rpmdevtools as sudo --- .github/workflows/test-packages-action.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 3aa8d1bb6121..9d622ab4e286 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -223,7 +223,7 @@ jobs: - name: Run Package Tests run: | - tools --timestamps vm ssh ${{ inputs.distro-slug }} -- yum install rpmdevtools -y || true + tools --timestamps vm ssh --sudo ${{ inputs.distro-slug }} -- yum install rpmdevtools -y || true tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ --nox-session=${{ matrix.nox-session }} --rerun-failures ${{ inputs.distro-slug }} # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ From e01e8d68ad71715895f7b5c108aef5a051705591 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 12:04:15 -0500 Subject: [PATCH 27/55] Try running the package tests on centosstream 8 and 9 --- .github/workflows/ci.yml | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0b7478302da9..398a124128a2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -290,6 +290,35 @@ jobs: salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" pkg-type: rpm + centosstream-8-pkg-tests: + name: CentOS 8 Stream Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-8 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: rpm + + centosstream-9-pkg-tests: + name: CentOS 9 Stream Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: rpm + + windows-2016: name: Windows 2016 if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} From 5608c56be33e01f22980fb9067e773f4b15e8b54 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 13:44:48 -0500 Subject: [PATCH 28/55] Build RPMs on centos 7 and add fedora 36 test --- .github/workflows/build-rpm-packages.yml | 2 +- .github/workflows/ci.yml | 14 ++++++++++++++ noxfile.py | 12 +++++------- 3 files changed, 20 insertions(+), 8 deletions(-) diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index 4b1ec05ddcbc..c476555ba589 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -27,7 +27,7 @@ jobs: - aarch64 container: - image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9 + image: centos:7 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 398a124128a2..0061dca18766 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -319,6 +319,20 @@ jobs: pkg-type: rpm + fedora-36-pkg-tests: + name: Fedora 36 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-onedir + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: fedora-36 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: rpm + windows-2016: name: Windows 2016 if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} diff --git a/noxfile.py b/noxfile.py index 8c5867a812f3..c0ff3991cdc6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1760,8 +1760,7 @@ def build(session): @nox.session(python=_PYTHON_VERSIONS, name="test-pkgs") -@nox.parametrize("coverage", [False, True]) -def test_pkgs(session, coverage): +def test_pkgs(session): """ pytest pkg tests session """ @@ -1776,13 +1775,12 @@ def test_pkgs(session, coverage): session.install(*install_command, silent=PIP_INSTALL_SILENT) cmd_args = ["pkg/tests/"] + session.posargs - _pytest(session, coverage, cmd_args) + _pytest(session, False, cmd_args) @nox.session(python=_PYTHON_VERSIONS, name="test-upgrade-pkgs") -@nox.parametrize("coverage", [False, True]) @nox.parametrize("classic", [False, True]) -def test_upgrade_pkgs(session, coverage, classic): +def test_upgrade_pkgs(session, classic): """ pytest pkg upgrade tests session """ @@ -1804,9 +1802,9 @@ def test_upgrade_pkgs(session, coverage, classic): if classic: cmd_args = cmd_args + ["--classic"] try: - _pytest(session, coverage, cmd_args) + _pytest(session, False, cmd_args) except nox.command.CommandFailed: sys.exit(0) cmd_args = ["pkg/tests/", "--no-install"] + session.posargs - _pytest(session, coverage, cmd_args) + _pytest(session, False, cmd_args) From c335552824db57996541ea1bc739cb86f60a39c5 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 13:47:07 -0500 Subject: [PATCH 29/55] don't install rpmdevtools anymore --- .github/workflows/test-packages-action.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 9d622ab4e286..0e68b5afa720 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -223,7 +223,6 @@ jobs: - name: Run Package Tests run: | - tools --timestamps vm ssh --sudo ${{ inputs.distro-slug }} -- yum install rpmdevtools -y || true tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ --nox-session=${{ matrix.nox-session }} --rerun-failures ${{ inputs.distro-slug }} # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ From 4d5b71e63b89373dad7fe20268d0905f090f5e57 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Thu, 26 Jan 2023 08:57:56 -0700 Subject: [PATCH 30/55] Do not use SLS file for enabled/disabled pkg test --- pkg/tests/files/debianbased.sls | 24 ------------ pkg/tests/files/redhatbased.sls | 14 ------- .../integration/test_enabled_disabled.py | 37 +++++-------------- 3 files changed, 10 insertions(+), 65 deletions(-) delete mode 100644 pkg/tests/files/debianbased.sls delete mode 100644 pkg/tests/files/redhatbased.sls diff --git a/pkg/tests/files/debianbased.sls b/pkg/tests/files/debianbased.sls deleted file mode 100644 index 2d1fb4cb35f5..000000000000 --- a/pkg/tests/files/debianbased.sls +++ /dev/null @@ -1,24 +0,0 @@ -{% set services_enabled = ['salt-master', 'salt-minion', 'salt-syndic', 'salt-api'] %} -{% set services_disabled = [] %} - -{% for service in services_enabled %} -check_services_enabled_{{ service }}: - service.enabled: - - name: {{ service }} -run_if_changes_{{ service }}: - cmd.run: - - name: failtest service is enabled - - onchanges: - - service: check_services_enabled_{{ service }} -{% endfor %} - -{% for service in services_disabled %} -check_services_disabled_{{ service }}: - service.disabled: - - name: {{ service }} -run_if_changes_{{ service }}: - cmd.run: - - name: failtest service is disabled - - onchanges: - - service: check_services_disabled_{{ service }} -{% endfor %} diff --git a/pkg/tests/files/redhatbased.sls b/pkg/tests/files/redhatbased.sls deleted file mode 100644 index cffa8a6146aa..000000000000 --- a/pkg/tests/files/redhatbased.sls +++ /dev/null @@ -1,14 +0,0 @@ -{% set services_enabled = [] %} -{% set services_disabled = ['salt-master', 'salt-minion', 'salt-syndic', 'salt-api'] %} - -{% for service in services_enabled %} -check_services_enabled_{{ service }}: - service.enabled: - - name: {{ service }} -{% endfor %} - -{% for service in services_disabled %} -check_services_disabled_{{ service }}: - service.disabled: - - name: {{ service }} -{% endfor %} diff --git a/pkg/tests/integration/test_enabled_disabled.py b/pkg/tests/integration/test_enabled_disabled.py index 887da53e1c05..6257766e2d54 100644 --- a/pkg/tests/integration/test_enabled_disabled.py +++ b/pkg/tests/integration/test_enabled_disabled.py @@ -10,34 +10,17 @@ def test_services(install_salt, salt_cli, salt_minion): if install_salt.compressed: pytest.skip("Skip test on single binary and onedir package") - ret = salt_cli.run("grains.get", "os_family", minion_tgt=salt_minion.id) - assert ret.returncode == 0 - assert ret.data - - state_name = desired_state = None - os_family = ret.data - - if os_family == "Debian": - state_name = "debianbased" - desired_state = "enabled" - elif os_family == "RedHat": - state_name = "redhatbased" - desired_state = "disabled" + if install_salt.distro_id in ("ubuntu", "debian"): + services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] + services_disabled = [] + elif install_salt.distro_id in ("centos", "redhat", "amzn", "fedora"): + services_enabled = [] + services_disabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] else: pytest.fail(f"Don't know how to handle os_family={os_family}") - ret = salt_cli.run("state.apply", state_name, minion_tgt=salt_minion.id) - assert ret.returncode == 0 - assert ret.data - - expected_in_comment = f"is already {desired_state}, and is in the desired state" + for service in services_enabled: + assert salt_cli.run("service.enabled") - result = MultiStateResult(raw=ret.data) - for state_ret in result: - assert state_ret.result is True - if "__id__" not in state_ret.full_return: - # This is a state requirement - # For example: - # State was not run because none of the onchanges reqs changed - continue - assert expected_in_comment in state_ret.comment + for service in services_disabled: + assert salt_cli.run("service.disabled") From 8751e8602a42ea9f288905add00c5cdb2d7df186 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Wed, 25 Jan 2023 17:21:05 -0800 Subject: [PATCH 31/55] Changes for running pkg tests on Mac. --- pkg/tests/conftest.py | 17 +++++++++-- pkg/tests/integration/test_hash.py | 42 --------------------------- pkg/tests/integration/test_pip.py | 5 +++- pkg/tests/integration/test_version.py | 8 +++-- pkg/tests/support/helpers.py | 39 +++++++++++-------------- 5 files changed, 41 insertions(+), 70 deletions(-) delete mode 100644 pkg/tests/integration/test_hash.py diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 4e14e05bb395..2b3dfe85f976 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -33,7 +33,12 @@ def version(): artifact.name, ) if _version: - _version = _version.groups()[0].replace("_", "-").replace("~", "") + _version = ( + _version.groups()[0] + .replace("_", "-") + .replace("~", "") + .replace("-py3-x86-64", "") + ) break return _version @@ -83,7 +88,11 @@ def salt_factories_root_dir(request, tmp_path_factory): if root_dir is not None: yield root_dir else: - root_dir = tmp_path_factory.mktemp("salt-tests") + if platform.is_darwin(): + root_dir = pathlib.Path("/tmp/salt-tests-tmpdir") + root_dir.mkdir(mode=0o777, parents=True, exist_ok=True) + else: + root_dir = tmp_path_factory.mktemp("salt-tests") try: yield root_dir finally: @@ -249,6 +258,8 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): str(salt_factories.get_salt_log_handlers_path()), ], } + if platform.is_darwin(): + config_defaults["enable_fqdns_grains"] = False config_overrides = { "timeout": 30, "file_roots": state_tree.as_dict(), @@ -291,6 +302,8 @@ def salt_minion(salt_master, install_salt): "engines_dirs": salt_master.config["engines_dirs"].copy(), "log_handlers_dirs": salt_master.config["log_handlers_dirs"].copy(), } + if platform.is_darwin(): + config_defaults["enable_fqdns_grains"] = False config_overrides = { "id": minion_id, "file_roots": salt_master.config["file_roots"].copy(), diff --git a/pkg/tests/integration/test_hash.py b/pkg/tests/integration/test_hash.py deleted file mode 100644 index 026246e68088..000000000000 --- a/pkg/tests/integration/test_hash.py +++ /dev/null @@ -1,42 +0,0 @@ -import hashlib -import logging -import sys - -import pytest - -log = logging.getLogger(__name__) - - -@pytest.mark.usefixtures("version") -def test_hashes(install_salt, salt_cli, salt_minion): - """ - Test the hashes generated for both single binary - and the onedir packages. - """ - if not install_salt.compressed: - pytest.skip("This test requires the single binary or onedir package") - - hashes = install_salt.salt_hashes - pkg = install_salt.pkgs[0] - - with open(pkg, "rb") as fh: - file_bytes = fh.read() - - delimiter = "/" - if sys.platform.startswith("win"): - delimiter = "\\" - - for _hash in hashes.keys(): - hash_file = hashes[_hash]["file"] - found_hash = False - with open(hash_file) as fp: - for line in fp: - if pkg.rsplit(delimiter, 1)[-1] in line: - found_hash = True - assert ( - getattr(hashlib, _hash.lower())(file_bytes).hexdigest() - == line.split()[0] - ) - - if not found_hash: - assert False, f"A {_hash} hash was not found in {hash_file} for pkg {pkg}" diff --git a/pkg/tests/integration/test_pip.py b/pkg/tests/integration/test_pip.py index 1837dcb6652a..b72477370314 100644 --- a/pkg/tests/integration/test_pip.py +++ b/pkg/tests/integration/test_pip.py @@ -11,7 +11,10 @@ def pypath(): if platform.is_windows(): return pathlib.Path(os.getenv("LocalAppData"), "salt", "bin") - return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "bin") + elif platform.is_darwin(): + return pathlib.Path(f"{os.sep}opt", "salt", "bin") + else: + return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "bin") @pytest.fixture(autouse=True) diff --git a/pkg/tests/integration/test_version.py b/pkg/tests/integration/test_version.py index f319261f9161..aef2b0158f3b 100644 --- a/pkg/tests/integration/test_version.py +++ b/pkg/tests/integration/test_version.py @@ -26,6 +26,8 @@ def test_salt_versions_report_master(install_salt): python_executable = pathlib.Path( r"C:\Program Files\Salt Project\Salt\Scripts\python.exe" ) + elif sys.platform == "darwin": + python_executable = pathlib.Path("/opt/salt/bin/python3") else: python_executable = pathlib.Path("/opt/saltstack/salt/bin/python3") py_version = subprocess.run( @@ -70,7 +72,7 @@ def test_compare_versions(version, binary, install_salt): "symlink", [ # We can't create a salt symlink because there is a salt directory - # "salt", + "salt", "salt-api", "salt-call", "salt-cloud", @@ -80,7 +82,7 @@ def test_compare_versions(version, binary, install_salt): "salt-minion", "salt-proxy", "salt-run", - "salt-spm", + "spm", "salt-ssh", "salt-syndic", ], @@ -94,7 +96,7 @@ def test_symlinks_created(version, symlink, install_salt): "This test is for the installer package only (pkg). It does not " "apply to the tarball" ) - ret = install_salt.proc.run(install_salt.bin_dir / symlink, "--version") + ret = install_salt.proc.run(pathlib.Path("/usr/local/sbin") / symlink, "--version") ret.stdout.matcher.fnmatch_lines([f"*{version}*"]) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index fd3f8e0faa8f..0b54651be9be 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -135,7 +135,7 @@ def _default_install_dir(self): ).resolve() elif platform.is_darwin(): # TODO: Add mac install dir path - install_dir = "" + install_dir = pathlib.Path("/opt", "salt") else: install_dir = pathlib.Path("/opt", "saltstack", "salt") return install_dir @@ -194,7 +194,6 @@ def __attrs_post_init__(self): self.onedir = True self.installer_pkg = True self.bin_dir = self.root / "salt" / "bin" - self.run_root = self.bin_dir / "run" elif file_ext == "tar.gz": with tarfile.open(f_path) as tar: # The first item will be called salt @@ -243,20 +242,21 @@ def __attrs_post_init__(self): } else: self.binary_paths = { - "salt": [str(self.run_root)], - "api": [str(self.run_root), "api"], - "call": [str(self.run_root), "call"], - "cloud": [str(self.run_root), "cloud"], - "cp": [str(self.run_root), "cp"], - "key": [str(self.run_root), "key"], - "master": [str(self.run_root), "master"], - "minion": [str(self.run_root), "minion"], - "proxy": [str(self.run_root), "proxy"], - "run": [str(self.run_root), "run"], - "ssh": [str(self.run_root), "ssh"], - "syndic": [str(self.run_root), "syndic"], - "spm": [str(self.run_root), "spm"], - "pip": [str(self.run_root), "pip"], + "salt": [self.install_dir / "salt"], + "api": [self.install_dir / "salt-api"], + "call": [self.install_dir / "salt-call"], + "cloud": [self.install_dir / "salt-cloud"], + "cp": [self.install_dir / "salt-cp"], + "key": [self.install_dir / "salt-key"], + "master": [self.install_dir / "salt-master"], + "minion": [self.install_dir / "salt-minion"], + "proxy": [self.install_dir / "salt-proxy"], + "run": [self.install_dir / "salt-run"], + "ssh": [self.install_dir / "salt-ssh"], + "syndic": [self.install_dir / "salt-syndic"], + "spm": [self.install_dir / "spm"], + "pip": [self.install_dir / "salt-pip"], + "python": [self.install_dir / "bin" / "python3"], } @staticmethod @@ -1097,16 +1097,11 @@ class PkgMixin: def get_script_path(self): if self.salt_pkg_install.compressed: - return str(self.salt_pkg_install.run_root) + return str(self.salt_pkg_install.install_dir / self.script_name) return super().get_script_path() def get_base_script_args(self): base_script_args = [] - if self.salt_pkg_install.compressed: - if self.script_name == "spm": - base_script_args.append(self.script_name) - elif self.script_name != "salt": - base_script_args.append(self.script_name.split("salt-")[-1]) base_script_args.extend(super().get_base_script_args()) return base_script_args From b66d4f2df94949517afe6026e5c754a994382fc2 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 26 Jan 2023 09:22:35 -0700 Subject: [PATCH 32/55] Update regex to allow for Debian family architectures and tarballs correctly --- pkg/tests/conftest.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 2b3dfe85f976..3283b6cd73eb 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -29,7 +29,7 @@ def version(): _version = "" for artifact in ARTIFACTS_DIR.glob("**/*.*"): _version = re.search( - r"([0-9].*)(\-[0-9].fc|\-[0-9].el|\+ds|\-[0-9].am|(\-[0-9]-[a-z]*-[a-z]*[0-9_]*.|\-[0-9]*.*)(tar.gz|zip|exe|pkg|rpm))", + r"([0-9].*)(\-[0-9].fc|\-[0-9].el|\+ds|\_all|\_any|\_amd64|\_arm64|\-[0-9].am|(\-[0-9]-[a-z]*-[a-z]*[0-9_]*.|\-[0-9]*.*)(tar.gz|tar.xz|zip|exe|pkg|rpm|deb))", artifact.name, ) if _version: @@ -37,8 +37,8 @@ def version(): _version.groups()[0] .replace("_", "-") .replace("~", "") - .replace("-py3-x86-64", "") ) + _version = _version.split("-")[0] break return _version From dbb001a4c0738d0bbae895cd1760fb634458f70c Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 15:11:05 -0500 Subject: [PATCH 33/55] Add the rest of the linux package tests to the pipeline --- .github/workflows/ci.yml | 127 ++++++++++++++++++++- .github/workflows/test-packages-action.yml | 4 +- 2 files changed, 124 insertions(+), 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0061dca18766..61f5f330d590 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -276,6 +276,23 @@ jobs: self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }} +# <-------------------------------- PACKAGE TESTS --------------------------------> +# TODO: Extract these out later + + amazonlinux-2-pkg-tests: + name: Amazon Linux 2 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: rpm + centos-7-pkg-tests: name: CentOS 7 Package Tests if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} @@ -318,20 +335,120 @@ jobs: salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" pkg-type: rpm + debian-10-pkg-tests: + name: Debian 10 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-10 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: deb - fedora-36-pkg-tests: - name: Fedora 36 Package Tests + debian-11-pkg-tests: + name: Debian 11 Package Tests if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-onedir + - build-salt-pkgs uses: ./.github/workflows/test-packages-action.yml with: - distro-slug: fedora-36 + distro-slug: debian-11 platform: linux arch: x86_64 salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" - pkg-type: rpm + pkg-type: deb + + debian-11-arm64-pkg-tests: + name: Debian 11 Arm64 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11-arm64 + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: deb + + + ubuntu-1804-pkg-tests: + name: Ubuntu 18.04 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-18.04 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: deb + + ubuntu-2004-pkg-tests: + name: Ubuntu 20.04 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-20.04 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: deb + + ubuntu-2004-arm64-pkg-tests: + name: Ubuntu 20.04 Arm64 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: deb + + ubuntu-2204-pkg-tests: + name: Ubuntu 22.04 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-22.04 + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: deb + + ubuntu-2204-arm64-pkg-tests: + name: Ubuntu 22.04 Arm64 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-ci + - build-salt-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: deb + +# <-------------------------------- PACKAGE TESTS --------------------------------> windows-2016: name: Windows 2016 diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 0e68b5afa720..74010ed814fa 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -160,8 +160,8 @@ jobs: fail-fast: false matrix: nox-session: - - test-pkgs - - test-upgrade-pkgs + - test-pkgs-3 + - test-upgrade-pkgs-3 steps: - name: Checkout Source Code From 1d9e732f80dbde76b4211118c7f70e6802af8fdc Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 15:19:26 -0500 Subject: [PATCH 34/55] build-pkgs not build-salt-pkgs --- .github/workflows/ci.yml | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 61f5f330d590..8f77c54a5961 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -284,7 +284,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: amazonlinux-2 @@ -340,7 +340,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: debian-10 @@ -354,7 +354,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: debian-11 @@ -368,7 +368,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: debian-11-arm64 @@ -383,7 +383,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-18.04 @@ -397,7 +397,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04 @@ -411,7 +411,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04-arm64 @@ -425,7 +425,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04 @@ -439,7 +439,7 @@ jobs: if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} needs: - prepare-ci - - build-salt-pkgs + - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04-arm64 From 8f420648881f6284fca6bb603a48eb38d440de66 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 17:09:59 -0500 Subject: [PATCH 35/55] Standardize package bucket naming and implement v1 of the macos package tests --- .github/workflows/ci.yml | 14 + .github/workflows/test-packages-action.yml | 10 +- .../workflows/test-packages-macos-action.yml | 364 ++++++++++++++++++ 3 files changed, 379 insertions(+), 9 deletions(-) create mode 100644 .github/workflows/test-packages-macos-action.yml diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8f77c54a5961..c7f9dcacf312 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -448,6 +448,20 @@ jobs: salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" pkg-type: deb + macos-12: + name: macOS 12 Package Tests + if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }} + needs: + - prepare-ci + - build-pkgs + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-12 + platform: darwin + arch: x86_64 + salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + pkg-type: macos + # <-------------------------------- PACKAGE TESTS --------------------------------> windows-2016: diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 74010ed814fa..46fdf504cd07 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -170,7 +170,7 @@ jobs: - name: Download Packages uses: actions/download-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}-packages + name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }} path: pkg/artifacts/ - name: List Packages @@ -225,14 +225,6 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ --nox-session=${{ matrix.nox-session }} --rerun-failures ${{ inputs.distro-slug }} - # tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - # --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ inputs.distro-slug }} \ - # ${{ matrix.tests-chunk }} -- --run-slow - - # - name: Combine Coverage Reports - # if: always() && steps.spin-up-vm.outcome == 'success' - # run: | - # tools --timestamps vm combine-coverage ${{ inputs.distro-slug }} # - name: Download Test Run Artifacts # id: download-artifacts-from-vm diff --git a/.github/workflows/test-packages-macos-action.yml b/.github/workflows/test-packages-macos-action.yml new file mode 100644 index 000000000000..bec641b7149b --- /dev/null +++ b/.github/workflows/test-packages-macos-action.yml @@ -0,0 +1,364 @@ +name: Test Artifact + +on: + workflow_call: + inputs: + distro-slug: + required: true + type: string + description: The OS slug to run tests against + platform: + required: true + type: string + description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested + pkg-type: + required: true + type: string + description: The platform arch being tested + salt-version: + type: string + required: true + description: The Salt version of the packages to install and test + python-version: + required: false + type: string + description: The python version to run tests with + default: "3.9" + + +env: + NOX_VERSION: "2022.8.7" + COLUMNS: 160 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + +jobs: + + # dependencies: + # name: Setup Test Dependencies + # needs: + # - generate-matrix + # runs-on: + # - self-hosted + # - linux + # - bastion + # timeout-minutes: 90 + # strategy: + # fail-fast: false + # matrix: + # include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} + # steps: + # - name: Checkout Source Code + # uses: actions/checkout@v3 + + # - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + # id: nox-dependencies-cache + # uses: actions/cache@v3 + # with: + # path: nox.${{ inputs.distro-slug }}.tar.* + # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + + # # Skip jobs if nox.*.tar.* is already cached + # - name: Download Onedir Tarball as an Artifact + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # uses: actions/download-artifact@v3 + # with: + # name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + # path: artifacts/ + + # - name: Decompress Onedir Tarball + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # shell: bash + # run: | + # python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + # cd artifacts + # tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + + # - name: PyPi Proxy + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + + # - name: Setup Python Tools Scripts + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # uses: ./.github/actions/setup-python-tools-scripts + + # - name: Define Nox Session + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # if [ "${{ matrix.transport }}" != "tcp" ]; then + # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" + # else + # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" + # fi + + # - name: Start VM + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # id: spin-up-vm + # run: | + # tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }} + + # - name: List Free Space + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + # - name: Upload Checkout To VM + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm rsync ${{ inputs.distro-slug }} + + # - name: Install Dependencies + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm install-dependencies --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} + + # - name: Cleanup .nox Directory + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} + + # - name: Compress .nox Directory + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} + + # - name: Download Compressed .nox Directory + # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} + + # - name: Destroy VM + # if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' + # run: | + # tools --timestamps vm destroy ${{ inputs.distro-slug }} + + # - name: Set Exit Status + # if: always() + # run: | + # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps + + # - name: Upload Exit Status + # if: always() + # uses: actions/upload-artifact@v3 + # with: + # name: exitstatus + # path: exitstatus + # if-no-files-found: error + + test: + name: Test + runs-on: ${{ inputs.distro-slug }} + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + # needs: + # - dependencies + # - generate-matrix + strategy: + fail-fast: false + matrix: + nox-session: + - test-pkgs-3 + - test-upgrade-pkgs-3 + + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Download Packages + uses: actions/download-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }} + path: pkg/artifacts/ + + - name: Install System Dependencies + run: | + brew install tree + + - name: List Packages + run: | + tree pkg/artifacts + + - name: Set up Python ${{ inputs.python-version }} + uses: actions/setup-python@v4 + with: + python-version: "${{ inputs.python-version }}" + + - name: Install Nox + run: | + python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + # uses: actions/cache@v3 + # with: + # path: nox.${{ inputs.distro-slug }}.tar.* + # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + + - name: PyPi Proxy + run: | + sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + + # - name: Download testrun-changed-files.txt + # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + # uses: actions/download-artifact@v3 + # with: + # name: testrun-changed-files.txt + + # - name: Decompress .nox Directory + # run: | + # tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} + + # - name: Show System Info & Test Plan + # run: | + # tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + # --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \ + # ${{ matrix.tests-chunk }} + + - name: Run Package Tests + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + run: | + sudo -E nox -e ${{ matrix.nox-session }} + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + + # - name: Download Test Run Artifacts + # id: download-artifacts-from-vm + # if: always() && steps.spin-up-vm.outcome == 'success' + # run: | + # tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} + # # Delete the salt onedir, we won't need it anymore and it will prevent + # # from it showing in the tree command below + # rm -rf artifacts/salt* + # tree -a artifacts + # mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }} + # echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV + + # - name: Upload Test Run Artifacts + # if: always() && steps.download-artifacts-from-vm.outcome == 'success' + # uses: actions/upload-artifact@v3 + # with: + # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + # path: | + # artifacts + # !artifacts/salt/* + # !artifacts/salt-*.tar.* + + # - name: Set Exit Status + # if: always() + # run: | + # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-${{ matrix.tests-chunk }}-tests + + # - name: Upload Exit Status + # if: always() + # uses: actions/upload-artifact@v3 + # with: + # name: exitstatus + # path: exitstatus + # if-no-files-found: error + + # report: + # name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) + # runs-on: + # - self-hosted + # - linux + # - x86_64 + # if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + # needs: + # - test + # - generate-matrix + # strategy: + # fail-fast: false + # matrix: + # include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} + + # steps: + # - name: Checkout Source Code + # uses: actions/checkout@v3 + + # - name: Define Nox Session + # run: | + # if [ "${{ matrix.transport }}" != "tcp" ]; then + # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" + # else + # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" + # fi + + # - name: Download Test Run Artifacts + # id: download-test-run-artifacts + # uses: actions/download-artifact@v3 + # with: + # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + # path: artifacts + + # - name: Show Test Run Artifacts + # if: always() && steps.download-test-run-artifacts.outcome == 'success' + # run: | + # tree -a artifacts + + # - name: Upload Code Coverage DB + # if: always() && steps.download-test-run-artifacts.outcome == 'success' + # uses: actions/upload-artifact@v3 + # with: + # name: code-coverage + # path: artifacts/coverage + + # - name: Set up Python 3.9 + # uses: actions/setup-python@v4 + # with: + # python-version: "3.9" + + # - name: Install Nox + # run: | + # python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + # - name: Report Salt Code Coverage + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage -- salt + + # - name: Report Tests Code Coverage + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage -- tests + + # - name: Report Combined Code Coverage + # continue-on-error: true + # run: | + # nox --force-color -e report-coverage + + # - name: Publish Test Report + # uses: mikepenz/action-junit-report@v3 + # # always run even if the previous steps fails + # if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' + # with: + # check_name: Overall Test Results(${{ inputs.distro-slug }}) + # report_paths: 'artifacts/xml-unittests-output/*.xml' + # annotate_only: true + + # - name: Set Exit Status + # if: always() + # run: | + # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report + + # - name: Upload Exit Status + # if: always() + # uses: actions/upload-artifact@v3 + # with: + # name: exitstatus + # path: exitstatus + # if-no-files-found: error From 8de753f0c4b05126bacd388891ed24277c1a1fa5 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 17:12:19 -0500 Subject: [PATCH 36/55] Give the mac package tests a unique job name --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c7f9dcacf312..9082b32faa5d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -448,7 +448,7 @@ jobs: salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" pkg-type: deb - macos-12: + macos-12-pkg-tests: name: macOS 12 Package Tests if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }} needs: From e5ecdd8154514a469f4bacafd1aa2ca08941c12d Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 26 Jan 2023 17:14:00 -0500 Subject: [PATCH 37/55] Give the mac package test file the correct name --- ...t-packages-macos-action.yml => test-packages-action-macos.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{test-packages-macos-action.yml => test-packages-action-macos.yml} (100%) diff --git a/.github/workflows/test-packages-macos-action.yml b/.github/workflows/test-packages-action-macos.yml similarity index 100% rename from .github/workflows/test-packages-macos-action.yml rename to .github/workflows/test-packages-action-macos.yml From 6b1788000d6a9833fcf3676bb94c4bd833f6d082 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 27 Jan 2023 11:57:56 -0500 Subject: [PATCH 38/55] Fix macos pypi proxy logic --- .github/workflows/test-packages-action-macos.yml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index bec641b7149b..d27c729b8275 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -189,6 +189,9 @@ jobs: python-version: "${{ inputs.python-version }}" - name: Install Nox + env: + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' @@ -198,10 +201,6 @@ jobs: # path: nox.${{ inputs.distro-slug }}.tar.* # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - name: PyPi Proxy - run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt - # - name: Download testrun-changed-files.txt # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} # uses: actions/download-artifact@v3 @@ -227,6 +226,8 @@ jobs: RERUN_FAILURES: "1" GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple run: | sudo -E nox -e ${{ matrix.nox-session }} From 41444fdbfc6a3d0c9e38ff37e3bed94c8bb25411 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Thu, 26 Jan 2023 15:33:22 -0800 Subject: [PATCH 39/55] Updates to allow upgrade tests to work on Mac. --- pkg/tests/support/helpers.py | 92 ++++++++++++++++++++------ pkg/tests/upgrade/test_salt_upgrade.py | 4 ++ 2 files changed, 77 insertions(+), 19 deletions(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 0b54651be9be..80742d108a2c 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -241,23 +241,43 @@ def __attrs_post_init__(self): "python": [self.install_dir / "bin" / "python3"], } else: - self.binary_paths = { - "salt": [self.install_dir / "salt"], - "api": [self.install_dir / "salt-api"], - "call": [self.install_dir / "salt-call"], - "cloud": [self.install_dir / "salt-cloud"], - "cp": [self.install_dir / "salt-cp"], - "key": [self.install_dir / "salt-key"], - "master": [self.install_dir / "salt-master"], - "minion": [self.install_dir / "salt-minion"], - "proxy": [self.install_dir / "salt-proxy"], - "run": [self.install_dir / "salt-run"], - "ssh": [self.install_dir / "salt-ssh"], - "syndic": [self.install_dir / "salt-syndic"], - "spm": [self.install_dir / "spm"], - "pip": [self.install_dir / "salt-pip"], - "python": [self.install_dir / "bin" / "python3"], - } + if self.salt_pkg_install.run_root and os.path.exists( + self.salt_pkg_install.run_root + ): + self.binary_paths = { + "salt": [str(self.run_root)], + "api": [str(self.run_root), "api"], + "call": [str(self.run_root), "call"], + "cloud": [str(self.run_root), "cloud"], + "cp": [str(self.run_root), "cp"], + "key": [str(self.run_root), "key"], + "master": [str(self.run_root), "master"], + "minion": [str(self.run_root), "minion"], + "proxy": [str(self.run_root), "proxy"], + "run": [str(self.run_root), "run"], + "ssh": [str(self.run_root), "ssh"], + "syndic": [str(self.run_root), "syndic"], + "spm": [str(self.run_root), "spm"], + "pip": [str(self.run_root), "pip"], + } + else: + self.binary_paths = { + "salt": [self.install_dir / "salt"], + "api": [self.install_dir / "salt-api"], + "call": [self.install_dir / "salt-call"], + "cloud": [self.install_dir / "salt-cloud"], + "cp": [self.install_dir / "salt-cp"], + "key": [self.install_dir / "salt-key"], + "master": [self.install_dir / "salt-master"], + "minion": [self.install_dir / "salt-minion"], + "proxy": [self.install_dir / "salt-proxy"], + "run": [self.install_dir / "salt-run"], + "ssh": [self.install_dir / "salt-ssh"], + "syndic": [self.install_dir / "salt-syndic"], + "spm": [self.install_dir / "spm"], + "pip": [self.install_dir / "salt-pip"], + "python": [self.install_dir / "bin" / "python3"], + } @staticmethod def salt_factories_root_dir(system_service: bool = False) -> pathlib.Path: @@ -444,7 +464,10 @@ def install_previous(self): Install previous version. This is used for upgrade tests. """ - major_ver = "3005" + if platform.is_darwin(): + major_ver = "3005-1" + else: + major_ver = "3005" min_ver = f"{major_ver}" os_name, version, code_name = distro.linux_distribution() if os_name: @@ -540,6 +563,24 @@ def install_previous(self): self.run_root = self.bin_dir / "salt.exe" self.ssm_bin = self.bin_dir / "ssm.exe" + elif platform.is_darwin(): + mac_pkg = f"salt-{min_ver}-macos-x86_64.pkg" + mac_pkg_url = ( + f"https://repo.saltproject.io/salt/py3/macos/{major_ver}/{mac_pkg}" + ) + mac_pkg_path = f"/tmp/{mac_pkg}" + ret = self.proc.run( + "curl", + "-fsSL", + "-o", + f"/tmp/{mac_pkg}", + f"{mac_pkg_url}", + ) + self._check_retcode(ret) + + ret = self.proc.run("installer", "-pkg", mac_pkg_path, "-target", "/") + self._check_retcode(ret) + def _uninstall_compressed(self): if platform.is_windows(): if self.system_service: @@ -1097,11 +1138,24 @@ class PkgMixin: def get_script_path(self): if self.salt_pkg_install.compressed: - return str(self.salt_pkg_install.install_dir / self.script_name) + if self.salt_pkg_install.run_root and os.path.exists( + self.salt_pkg_install.run_root + ): + return str(self.salt_pkg_install.run_root) + else: + return str(self.salt_pkg_install.install_dir / self.script_name) return super().get_script_path() def get_base_script_args(self): base_script_args = [] + if self.salt_pkg_install.run_root and os.path.exists( + self.salt_pkg_install.run_root + ): + if self.salt_pkg_install.compressed: + if self.script_name == "spm": + base_script_args.append(self.script_name) + elif self.script_name != "salt": + base_script_args.append(self.script_name.split("salt-")[-1]) base_script_args.extend(super().get_base_script_args()) return base_script_args diff --git a/pkg/tests/upgrade/test_salt_upgrade.py b/pkg/tests/upgrade/test_salt_upgrade.py index eb802e734273..6e0b3821550c 100644 --- a/pkg/tests/upgrade/test_salt_upgrade.py +++ b/pkg/tests/upgrade/test_salt_upgrade.py @@ -28,6 +28,10 @@ def test_salt_upgrade(salt_call_cli, salt_minion, install_salt): assert ret.returncode == 0 assert ret.data + # install dep following upgrade + install = salt_call_cli.run("--local", "pip.install", dep) + assert install.returncode == 0 + # test pip install after an upgrade use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) assert "Authentication information could" in use_lib.stderr From 13af5abe4e98fc5f72530f396586d87552154cb4 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Fri, 27 Jan 2023 07:38:53 -0800 Subject: [PATCH 40/55] add a note about the pip install workaround --- pkg/tests/upgrade/test_salt_upgrade.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pkg/tests/upgrade/test_salt_upgrade.py b/pkg/tests/upgrade/test_salt_upgrade.py index 6e0b3821550c..c5c3771e71ed 100644 --- a/pkg/tests/upgrade/test_salt_upgrade.py +++ b/pkg/tests/upgrade/test_salt_upgrade.py @@ -29,6 +29,8 @@ def test_salt_upgrade(salt_call_cli, salt_minion, install_salt): assert ret.data # install dep following upgrade + # TODO: Remove this once we figure out how to + # preserve things installed via PIP between upgrades. install = salt_call_cli.run("--local", "pip.install", dep) assert install.returncode == 0 From 088c559af560a271c7786d911eefbc26e392c7e4 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 27 Jan 2023 16:10:55 -0500 Subject: [PATCH 41/55] prepare-ci is called prepare-workflow now --- .github/workflows/build-rpm-packages.yml | 2 +- .github/workflows/ci.yml | 78 +++++++++---------- .../workflows/test-packages-action-macos.yml | 1 - 3 files changed, 40 insertions(+), 41 deletions(-) diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index c476555ba589..f8145457582d 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -27,7 +27,7 @@ jobs: - aarch64 container: - image: centos:7 + image: ghcr.io/saltstack/salt-ci-containers/packaging:centos-7 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 9082b32faa5d..8cdf27283ce5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -281,185 +281,185 @@ jobs: amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: amazonlinux-2 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm centos-7-pkg-tests: name: CentOS 7 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: centos-7 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm centosstream-8-pkg-tests: name: CentOS 8 Stream Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: centosstream-8 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm centosstream-9-pkg-tests: name: CentOS 9 Stream Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: centosstream-9 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm debian-10-pkg-tests: name: Debian 10 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: debian-10 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb debian-11-pkg-tests: name: Debian 11 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: debian-11 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: debian-11-arm64 platform: linux arch: aarch64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb ubuntu-1804-pkg-tests: name: Ubuntu 18.04 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-18.04 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04-arm64 platform: linux arch: aarch64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04 platform: linux arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['self-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04-arm64 platform: linux arch: aarch64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb macos-12-pkg-tests: name: macOS 12 Package Tests - if: ${{ fromJSON(needs.prepare-ci.outputs.jobs)['github-hosted-runners'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['github-hosted-runners'] }} needs: - - prepare-ci + - prepare-workflow - build-pkgs uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-12 platform: darwin arch: x86_64 - salt-version: "${{ needs.prepare-ci.outputs.salt-version }}" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos # <-------------------------------- PACKAGE TESTS --------------------------------> diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index d27c729b8275..a358e05e7da6 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -219,7 +219,6 @@ jobs: - name: Run Package Tests env: - SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" PRINT_TEST_PLAN_ONLY: "0" PRINT_SYSTEM_INFO: "0" From 7ce44f256d7d0008a23edbee1bd5c53bab6aeec1 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 27 Jan 2023 16:17:01 -0500 Subject: [PATCH 42/55] Add windows NSIS and MSI test runs --- .github/workflows/ci.yml | 83 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 83 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8cdf27283ce5..4010033abf25 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -462,6 +462,89 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos + windows-2016-nsis-pkg-tests: + name: Windows 2016 NSIS Package Tests + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-workflow + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2016 + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: NSIS + + windows-2016-msi-pkg-tests: + name: Windows 2016 MSI Package Tests + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-workflow + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2016 + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: MSI + + windows-2019-nsis-pkg-tests: + name: Windows 2019 NSIS Package Tests + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-workflow + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2019 + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: NSIS + + windows-2019-msi-pkg-tests: + name: Windows 2019 MSI Package Tests + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-workflow + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2019 + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: MSI + + windows-2022-nsis-pkg-tests: + name: Windows 2022 NSIS Package Tests + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-workflow + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: NSIS + + windows-2022-msi-pkg-tests: + name: Windows 2022 MSI Package Tests + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['self-hosted-runners'] }} + needs: + - prepare-workflow + - build-pkgs + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: MSI # <-------------------------------- PACKAGE TESTS --------------------------------> windows-2016: From 25c94ae092f2499849ffcb48b061ed56c459f3d2 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 27 Jan 2023 16:54:59 -0500 Subject: [PATCH 43/55] Enable nox dependency caching for the package tests --- .github/workflows/test-packages-action.yml | 323 ++++++++++----------- tools/ci.py | 23 ++ 2 files changed, 183 insertions(+), 163 deletions(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 46fdf504cd07..39bb65b79633 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -23,6 +23,11 @@ on: type: string required: true description: The Salt version of the packages to install and test + package-name: + required: false + type: string + description: The onedir package name to use + default: salt env: @@ -30,121 +35,131 @@ env: COLUMNS: 160 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple jobs: - # dependencies: - # name: Setup Test Dependencies - # needs: - # - generate-matrix - # runs-on: - # - self-hosted - # - linux - # - bastion - # timeout-minutes: 90 - # strategy: - # fail-fast: false - # matrix: - # include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} - # steps: - # - name: Checkout Source Code - # uses: actions/checkout@v3 + generate-matrix: + name: Generate Package Test Matrix + runs-on: + - self-hosted + - linux + - x86_64 + outputs: + pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }} + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 - # - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - # id: nox-dependencies-cache - # uses: actions/cache@v3 - # with: - # path: nox.${{ inputs.distro-slug }}.tar.* - # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts - # # Skip jobs if nox.*.tar.* is already cached - # - name: Download Onedir Tarball as an Artifact - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # uses: actions/download-artifact@v3 - # with: - # name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - # path: artifacts/ + - name: Generate Package Test Matrix + id: generate-pkg-matrix + run: | + PKG_MATRIX=$(tools ci pkg-matrix ${{ inputs.distro-slug }}) + echo "$PKG_MATRIX" + echo "matrix=$PKG_MATRIX" >> "$GITHUB_OUTPUT" + + dependencies: + name: Setup Test Dependencies + needs: + - generate-matrix + runs-on: + - self-hosted + - linux + - bastion + timeout-minutes: 90 + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 - # - name: Decompress Onedir Tarball - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # shell: bash - # run: | - # python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - # cd artifacts - # tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} + id: nox-dependencies-cache + uses: actions/cache@v3 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - # - name: PyPi Proxy - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + # Skip jobs if nox.*.tar.* is already cached + - name: Download Onedir Tarball as an Artifact + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/ - # - name: Setup Python Tools Scripts - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # uses: ./.github/actions/setup-python-tools-scripts + - name: Decompress Onedir Tarball + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - # - name: Define Nox Session - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # if [ "${{ matrix.transport }}" != "tcp" ]; then - # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - # else - # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - # fi + - name: Setup Python Tools Scripts + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: ./.github/actions/setup-python-tools-scripts - # - name: Start VM - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # id: spin-up-vm - # run: | - # tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }} + - name: Start VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + id: spin-up-vm + run: | + tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }} - # - name: List Free Space - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + - name: List Free Space + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true - # - name: Upload Checkout To VM - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm rsync ${{ inputs.distro-slug }} + - name: Upload Checkout To VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm rsync ${{ inputs.distro-slug }} - # - name: Install Dependencies - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm install-dependencies --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} + - name: Install Dependencies + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm install-dependencies --nox-session=${{ matrix.nox-session }} ${{ inputs.distro-slug }} - # - name: Cleanup .nox Directory - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} + - name: Cleanup .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} - # - name: Compress .nox Directory - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} + - name: Compress .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} - # - name: Download Compressed .nox Directory - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} + - name: Download Compressed .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} - # - name: Destroy VM - # if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm destroy ${{ inputs.distro-slug }} + - name: Destroy VM + if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm destroy ${{ inputs.distro-slug }} - # - name: Set Exit Status - # if: always() - # run: | - # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ matrix.nox-session }}-deps - # - name: Upload Exit Status - # if: always() - # uses: actions/upload-artifact@v3 - # with: - # name: exitstatus - # path: exitstatus - # if-no-files-found: error + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error test: name: Test @@ -153,15 +168,12 @@ jobs: - linux - bastion timeout-minutes: 120 # 2 Hours - More than this and something is wrong - # needs: - # - dependencies - # - generate-matrix + needs: + - dependencies strategy: fail-fast: false matrix: - nox-session: - - test-pkgs-3 - - test-upgrade-pkgs-3 + include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} steps: - name: Checkout Source Code @@ -170,36 +182,24 @@ jobs: - name: Download Packages uses: actions/download-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }} + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }} path: pkg/artifacts/ - name: List Packages run: | tree pkg/artifacts - # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - # uses: actions/cache@v3 - # with: - # path: nox.${{ inputs.distro-slug }}.tar.* - # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - - name: PyPi Proxy - run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} + uses: actions/cache@v3 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts - # - name: Download testrun-changed-files.txt - # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} - # uses: actions/download-artifact@v3 - # with: - # name: testrun-changed-files.txt - - name: Start VM id: spin-up-vm - env: - TESTS_CHUNK: ${{ matrix.tests-chunk }} run: | tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }} @@ -211,61 +211,58 @@ jobs: run: | tools --timestamps vm rsync ${{ inputs.distro-slug }} - # - name: Decompress .nox Directory - # run: | - # tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} + - name: Decompress .nox Directory + run: | + tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} - # - name: Show System Info & Test Plan - # run: | - # tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ - # --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \ - # ${{ matrix.tests-chunk }} + - name: Show System Info & Test Plan + run: | + tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + --nox-session=${{ matrix.nox-session }} ${{ inputs.distro-slug }} - name: Run Package Tests run: | - tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test \ + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install\ --nox-session=${{ matrix.nox-session }} --rerun-failures ${{ inputs.distro-slug }} - # - name: Download Test Run Artifacts - # id: download-artifacts-from-vm - # if: always() && steps.spin-up-vm.outcome == 'success' - # run: | - # tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} - # # Delete the salt onedir, we won't need it anymore and it will prevent - # # from it showing in the tree command below - # rm -rf artifacts/salt* - # tree -a artifacts - # mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }} - # echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV + - name: Download Test Run Artifacts + id: download-artifacts-from-vm + if: always() && steps.spin-up-vm.outcome == 'success' + run: | + tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts - name: Destroy VM if: always() run: | tools --timestamps vm destroy ${{ inputs.distro-slug }} || true - # - name: Upload Test Run Artifacts - # if: always() && steps.download-artifacts-from-vm.outcome == 'success' - # uses: actions/upload-artifact@v3 - # with: - # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} - # path: | - # artifacts - # !artifacts/salt/* - # !artifacts/salt-*.tar.* - - # - name: Set Exit Status - # if: always() - # run: | - # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-${{ matrix.tests-chunk }}-tests - - # - name: Upload Exit Status - # if: always() - # uses: actions/upload-artifact@v3 - # with: - # name: exitstatus - # path: exitstatus - # if-no-files-found: error + - name: Upload Test Run Artifacts + if: always() && steps.download-artifacts-from-vm.outcome == 'success' + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ matrix.nox-session }}-tests + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error # report: # name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) diff --git a/tools/ci.py b/tools/ci.py index b3c9e07e98ab..b492b6d225c7 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -383,3 +383,26 @@ def transport_matrix(ctx: Context, distro_slug: str): _matrix.append({"transport": transport}) print(json.dumps(_matrix)) ctx.exit(0) + + +@ci.command( + name="pkg-matrix", + arguments={ + "distro_slug": { + "help": "The distribution slug to generate the matrix for", + }, + }, +) +def pkg_matrix(ctx: Context, distro_slug: str): + """ + Generate the test matrix. + """ + _matrix = [] + for sess in ( + "test-pkgs-3", + "test-upgrade-pkgs-3(classic=False)", + "test-upgrade-pkgs-3(classic=False)", + ): + _matrix.append({"nox-session": sess}) + print(json.dumps(_matrix)) + ctx.exit(0) From b2988a2358a09679ae139fe5c1f7dcbe9684d65e Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 27 Jan 2023 18:42:23 -0500 Subject: [PATCH 44/55] Try producing architecture dependent debian packages --- pkg/debian/control | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/pkg/debian/control b/pkg/debian/control index 5a0c0c75ecec..88c1d1025ff2 100644 --- a/pkg/debian/control +++ b/pkg/debian/control @@ -15,7 +15,7 @@ Vcs-Git: git://github.com/saltstack/salt.git Package: salt-common -Architecture: all +Architecture: amd64 arm64 Depends: ${misc:Depends} Suggests: ifupdown Recommends: lsb-release @@ -41,7 +41,7 @@ Description: shared libraries that salt requires for all packages Package: salt-master -Architecture: all +Architecture: amd64 arm64 Depends: salt-common (= ${source:Version}), ${misc:Depends} Description: remote manager to administer servers via salt @@ -65,7 +65,7 @@ Description: remote manager to administer servers via salt Package: salt-minion -Architecture: all +Architecture: amd64 arm64 Depends: bsdmainutils, dctrl-tools, salt-common (= ${source:Version}), @@ -92,7 +92,7 @@ Description: client package for salt, the distributed remote execution system Package: salt-syndic -Architecture: all +Architecture: amd64 arm64 Depends: salt-master (= ${source:Version}), ${misc:Depends} Description: master-of-masters for salt, the distributed remote execution system @@ -117,7 +117,7 @@ Description: master-of-masters for salt, the distributed remote execution system Package: salt-ssh -Architecture: all +Architecture: amd64 arm64 Depends: salt-common (= ${source:Version}), openssh-client, ${misc:Depends} @@ -145,7 +145,7 @@ Description: remote manager to administer servers via Salt SSH Package: salt-cloud -Architecture: all +Architecture: amd64 arm64 Depends: salt-common (= ${source:Version}), ${misc:Depends} Description: public cloud VM management system @@ -154,7 +154,7 @@ Description: public cloud VM management system Package: salt-api -Architecture: all +Architecture: amd64 arm64 Depends: salt-master, ${misc:Depends} Description: Generic, modular network access system From 44450de44a7cab61f9a4ee1c277787a0c33994e0 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Mon, 30 Jan 2023 08:04:54 -0700 Subject: [PATCH 45/55] Migrate links to salt-common for deb pkgs --- pkg/debian/salt-common.install | 5 +++++ pkg/debian/salt-common.links | 1 + pkg/debian/salt-master.install | 4 ---- pkg/debian/salt-minion.install | 1 - pkg/debian/salt-minion.links | 1 - 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pkg/debian/salt-common.install b/pkg/debian/salt-common.install index 1a624246301e..4b612bd3aa6c 100644 --- a/pkg/debian/salt-common.install +++ b/pkg/debian/salt-common.install @@ -7,3 +7,8 @@ pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d pkg/common/fish-completions/salt_common.fish /usr/share/fish/vendor_completions.d pkg/common/salt.bash /usr/share/bash-completions/completions/salt-common.bash +pkg/common/fish-completions/salt-minion.fish /usr/share/fish/vendor_completions.d +pkg/common/fish-completions/salt-key.fish /usr/share/fish/vendor_completions.d +pkg/common/fish-completions/salt-master.fish /usr/share/fish/vendor_completions.d +pkg/common/fish-completions/salt-run.fish /usr/share/fish/vendor_completions.d +pkg/common/fish-completions/salt.fish /usr/share/fish/vendor_completions.d diff --git a/pkg/debian/salt-common.links b/pkg/debian/salt-common.links index f1a5039416b1..a1f03163f767 100644 --- a/pkg/debian/salt-common.links +++ b/pkg/debian/salt-common.links @@ -1,2 +1,3 @@ opt/saltstack/salt/spm /usr/bin/spm opt/saltstack/salt/salt-pip /usr/bin/salt-pip +opt/saltstack/salt/salt-call /usr/bin/salt-call diff --git a/pkg/debian/salt-master.install b/pkg/debian/salt-master.install index 0ef6940970f9..1dc8a04ef55e 100644 --- a/pkg/debian/salt-master.install +++ b/pkg/debian/salt-master.install @@ -1,6 +1,2 @@ conf/master /etc/salt pkg/common/salt-master.service /lib/systemd/system -pkg/common/fish-completions/salt-master.fish /usr/share/fish/vendor_completions.d -pkg/common/fish-completions/salt-key.fish /usr/share/fish/vendor_completions.d -pkg/common/fish-completions/salt.fish /usr/share/fish/vendor_completions.d -pkg/common/fish-completions/salt-run.fish /usr/share/fish/vendor_completions.d diff --git a/pkg/debian/salt-minion.install b/pkg/debian/salt-minion.install index c6fc5d5e8c7f..4fc4633bda82 100644 --- a/pkg/debian/salt-minion.install +++ b/pkg/debian/salt-minion.install @@ -1,3 +1,2 @@ conf/minion /etc/salt pkg/common/salt-minion.service /lib/systemd/system -pkg/common/fish-completions/salt-minion.fish /usr/share/fish/vendor_completions.d diff --git a/pkg/debian/salt-minion.links b/pkg/debian/salt-minion.links index 9d9b990f53bc..9dae19eb1d3a 100644 --- a/pkg/debian/salt-minion.links +++ b/pkg/debian/salt-minion.links @@ -1,3 +1,2 @@ opt/saltstack/salt/salt-minion /usr/bin/salt-minion opt/saltstack/salt/salt-proxy /usr/bin/salt-proxy -opt/saltstack/salt/salt-call /usr/bin/salt-call From 4a8e268a9469f3499057bd5a21a5f4d7cc8ca890 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Mon, 30 Jan 2023 08:02:45 -0700 Subject: [PATCH 46/55] Add classic url's for all OS's for package tests --- pkg/tests/support/helpers.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 80742d108a2c..6a6947711a6a 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -474,15 +474,16 @@ def install_previous(self): os_name = os_name.split()[0].lower() if os_name == "centos" or os_name == "fedora": os_name = "redhat" + root_url = "salt/py3/" + if self.classic: + root_url = "py3/" + if os_name.lower() in ["redhat", "centos", "amazon", "fedora"]: for fp in pathlib.Path("/etc", "yum.repos.d").glob("epel*"): fp.unlink() gpg_key = "SALTSTACK-GPG-KEY.pub" if version == "9": gpg_key = "SALTSTACK-GPG-KEY2.pub" - root_url = "salt/py3/" - if self.classic: - root_url = "py3/" ret = self.proc.run( "rpm", "--import", @@ -517,7 +518,7 @@ def install_previous(self): "-fsSL", "-o", "/usr/share/keyrings/salt-archive-keyring.gpg", - f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/amd64/{major_ver}/salt-archive-keyring.gpg", + f"https://repo.saltproject.io/{root_url}{os_name}/{version}/amd64/{major_ver}/salt-archive-keyring.gpg", ) self._check_retcode(ret) with open( @@ -525,7 +526,7 @@ def install_previous(self): ) as fp: fp.write( "deb [signed-by=/usr/share/keyrings/salt-archive-keyring.gpg arch=amd64] " - f"https://repo.saltproject.io/salt/py3/{os_name}/{version}/amd64/{major_ver} {code_name} main" + f"https://repo.saltproject.io/{root_url}{os_name}/{version}/amd64/{major_ver} {code_name} main" ) ret = self.proc.run(self.pkg_mngr, "update") self._check_retcode(ret) @@ -542,6 +543,10 @@ def install_previous(self): win_pkg_url = ( f"https://repo.saltproject.io/salt/py3/windows/{major_ver}/{win_pkg}" ) + + if self.classic: + win_pkg = f"Salt-Minion-{min_ver}-1-Py3-AMD64-Setup.exe" + win_pkg_url = f"https://repo.saltproject.io/windows/{win_pkg}" pkg_path = pathlib.Path(r"C:\TEMP", win_pkg) pkg_path.parent.mkdir(exist_ok=True) ret = requests.get(win_pkg_url) @@ -568,6 +573,9 @@ def install_previous(self): mac_pkg_url = ( f"https://repo.saltproject.io/salt/py3/macos/{major_ver}/{mac_pkg}" ) + if self.classic: + mac_pkg = f"salt-{min_ver}-1-py3-x86_64.pkg" + mac_pkg_url = f"https://repo.saltproject.io/osx/{mac_pkg}" mac_pkg_path = f"/tmp/{mac_pkg}" ret = self.proc.run( "curl", From 5feb3094b77914e10698006c924dc77230a9d6c4 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Mon, 30 Jan 2023 12:14:28 -0700 Subject: [PATCH 47/55] Add ability to pass in --prev-version= to test an upgrade from that version --- pkg/tests/conftest.py | 13 +++++--- pkg/tests/support/helpers.py | 62 ++++++++++++++++++++++++++++++++++-- 2 files changed, 68 insertions(+), 7 deletions(-) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 3283b6cd73eb..50f30c01caad 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -33,11 +33,7 @@ def version(): artifact.name, ) if _version: - _version = ( - _version.groups()[0] - .replace("_", "-") - .replace("~", "") - ) + _version = _version.groups()[0].replace("_", "-").replace("~", "") _version = _version.split("-")[0] break return _version @@ -78,6 +74,11 @@ def pytest_addoption(parser): action="store_true", help="Test an upgrade from the classic packages.", ) + test_selection_group.addoption( + "--prev-version", + action="store", + help="Test an upgrade from the version specified.", + ) @pytest.fixture(scope="session") @@ -116,6 +117,8 @@ def install_salt(request, salt_factories_root_dir): upgrade=request.config.getoption("--upgrade"), no_uninstall=request.config.getoption("--no-uninstall"), no_install=request.config.getoption("--no-install"), + classic=request.config.getoption("--classic"), + prev_version=request.config.getoption("--prev-version"), ) as fixture: yield fixture diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 6a6947711a6a..e44790f42888 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -14,6 +14,7 @@ import attr import distro +import packaging import psutil import pytest import requests @@ -78,6 +79,11 @@ class SaltPkgInstall: install_dir: pathlib.Path = attr.ib(init=False) binary_paths: List[pathlib.Path] = attr.ib(init=False) classic: bool = attr.ib(default=False) + prev_version: str = attr.ib() + pkg_version: str = attr.ib(default="1") + repo_data: str = attr.ib(init=False) + major: str = attr.ib(init=False) + minor: str = attr.ib(init=False) @proc.default def _default_proc(self): @@ -140,7 +146,59 @@ def _default_install_dir(self): install_dir = pathlib.Path("/opt", "saltstack", "salt") return install_dir + @repo_data.default + def _default_repo_data(self): + """ + Query to see the published Salt artifacts + from repo.json + """ + url = "https://repo.saltproject.io/salt/onedir/repo.json" + ret = requests.get(url) + data = ret.json() + return data + + def relenv(self, version): + """ + Detects if we are using relenv + onedir build + """ + relenv = False + if packaging.version.parse(version) >= packaging.version.parse("3006.0"): + relenv = True + return relenv + + def get_version(self): + """ + Return the version information + needed to install a previous version + of Salt. + """ + prev_version = self.prev_version + pkg_version = None + if not prev_version: + # We did not pass in a version, lets detect the latest + # version information of a Salt artifact. + latest = list(self.repo_data["latest"].keys())[0] + version = self.repo_data["latest"][latest]["version"] + if "-" in version: + prev_version, pkg_version = version.split("-") + else: + prev_version, pkg_version = version, None + else: + # We passed in a version, but lets check if the pkg_version + # is defined. Relenv pkgs do not define a pkg build number + if "-" not in prev_version and not self.relenv(version=prev_version): + pkg_numbers = [x for x in self.repo_data.keys() if prev_version in x] + pkg_version = 1 + for number in pkg_numbers: + number = int(number.split("-")[1]) + if number > pkg_version: + pkg_version = number + major, minor = prev_version.split(".") + return major, minor, prev_version, pkg_version + def __attrs_post_init__(self): + self.major, self.minor, self.prev_version, self.pkg_version = self.get_version() file_ext_re = r"tar\.gz" if platform.is_darwin(): file_ext_re = r"tar\.gz|pkg" @@ -465,9 +523,9 @@ def install_previous(self): upgrade tests. """ if platform.is_darwin(): - major_ver = "3005-1" + major_ver = f"{self.major}-{self.pkg_version}" else: - major_ver = "3005" + major_ver = self.major min_ver = f"{major_ver}" os_name, version, code_name = distro.linux_distribution() if os_name: From 2891ce6d7bb1bbce87b7880138bcce431587667b Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 31 Jan 2023 15:56:05 -0500 Subject: [PATCH 48/55] Use quotes when the nox session has parentheses --- tools/ci.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index b492b6d225c7..50d16696a4be 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -400,8 +400,8 @@ def pkg_matrix(ctx: Context, distro_slug: str): _matrix = [] for sess in ( "test-pkgs-3", - "test-upgrade-pkgs-3(classic=False)", - "test-upgrade-pkgs-3(classic=False)", + "'test-upgrade-pkgs-3(classic=False)'", + "'test-upgrade-pkgs-3(classic=True)'", ): _matrix.append({"nox-session": sess}) print(json.dumps(_matrix)) From fdc646364784e4ee6026cce0cbef70574cff3bc8 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 31 Jan 2023 18:00:21 -0500 Subject: [PATCH 49/55] Also depend on generate-matrix for the package tests --- .github/workflows/test-packages-action.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 39bb65b79633..5e84ecc4009e 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -169,6 +169,7 @@ jobs: - bastion timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: + - generate-matrix - dependencies strategy: fail-fast: false From 26600763e0368c08672676057ae1ae34b6dad76b Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 1 Feb 2023 07:09:21 -0500 Subject: [PATCH 50/55] Give test-packages-action.yml the cache seed --- .github/workflows/ci.yml | 19 +++++++++++++++++++ .github/workflows/test-packages-action.yml | 4 ++++ 2 files changed, 23 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4010033abf25..f79b749b9351 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -292,6 +292,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} centos-7-pkg-tests: name: CentOS 7 Package Tests @@ -306,6 +307,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} centosstream-8-pkg-tests: name: CentOS 8 Stream Package Tests @@ -320,6 +322,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} centosstream-9-pkg-tests: name: CentOS 9 Stream Package Tests @@ -334,6 +337,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} debian-10-pkg-tests: name: Debian 10 Package Tests @@ -348,6 +352,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} debian-11-pkg-tests: name: Debian 11 Package Tests @@ -362,6 +367,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Tests @@ -376,6 +382,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} ubuntu-1804-pkg-tests: @@ -391,6 +398,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Tests @@ -405,6 +413,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Tests @@ -419,6 +428,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Tests @@ -433,6 +443,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Tests @@ -447,6 +458,7 @@ jobs: arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} macos-12-pkg-tests: name: macOS 12 Package Tests @@ -461,6 +473,7 @@ jobs: arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Tests @@ -475,6 +488,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Tests @@ -489,6 +503,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Tests @@ -503,6 +518,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Tests @@ -517,6 +533,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Tests @@ -531,6 +548,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} windows-2022-msi-pkg-tests: name: Windows 2022 MSI Package Tests @@ -545,6 +563,7 @@ jobs: arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} # <-------------------------------- PACKAGE TESTS --------------------------------> windows-2016: diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 5e84ecc4009e..1e41b9e08eaa 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -23,6 +23,10 @@ on: type: string required: true description: The Salt version of the packages to install and test + cache-seed: + required: true + type: string + description: Seed used to invalidate caches package-name: required: false type: string From c0a7fa1f69881f96dfbaadbd5aa27a5a717d3adc Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 1 Feb 2023 07:18:37 -0500 Subject: [PATCH 51/55] MacOS also needs the cache seed --- .github/workflows/test-packages-action-macos.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index a358e05e7da6..fdadbf20ef96 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -23,6 +23,10 @@ on: type: string required: true description: The Salt version of the packages to install and test + cache-seed: + required: true + type: string + description: Seed used to invalidate caches python-version: required: false type: string From 07e85cca13c0615810503ae873b2523e536a61bb Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 1 Feb 2023 11:23:04 -0500 Subject: [PATCH 52/55] Fix package capture regex to catch arm64 debs --- pkg/tests/support/helpers.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index e44790f42888..11941599cbd5 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -273,7 +273,9 @@ def __attrs_post_init__(self): else: log.error("Unexpected file extension: %s", file_ext) - if re.search(r"salt(.*)(x86_64|all|amd64|aarch64)\.(rpm|deb)$", f_path): + if re.search( + r"salt(.*)(x86_64|all|amd64|aarch64|arm64)\.(rpm|deb)$", f_path + ): self.installer_pkg = True self.pkgs.append(f_path) From d1d000d0089f4ce8b3b67e7afeee335893eea141 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 1 Feb 2023 12:09:24 -0500 Subject: [PATCH 53/55] Add tests reports for the package tests --- .github/workflows/test-packages-action.yml | 145 ++++++++------------- 1 file changed, 53 insertions(+), 92 deletions(-) diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 1e41b9e08eaa..7a107570eaec 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -269,95 +269,56 @@ jobs: path: exitstatus if-no-files-found: error - # report: - # name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) - # runs-on: - # - self-hosted - # - linux - # - x86_64 - # if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' - # needs: - # - test - # - generate-matrix - # strategy: - # fail-fast: false - # matrix: - # include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} - - # steps: - # - name: Checkout Source Code - # uses: actions/checkout@v3 - - # - name: Define Nox Session - # run: | - # if [ "${{ matrix.transport }}" != "tcp" ]; then - # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - # else - # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - # fi - - # - name: Download Test Run Artifacts - # id: download-test-run-artifacts - # uses: actions/download-artifact@v3 - # with: - # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} - # path: artifacts - - # - name: Show Test Run Artifacts - # if: always() && steps.download-test-run-artifacts.outcome == 'success' - # run: | - # tree -a artifacts - - # - name: Upload Code Coverage DB - # if: always() && steps.download-test-run-artifacts.outcome == 'success' - # uses: actions/upload-artifact@v3 - # with: - # name: code-coverage - # path: artifacts/coverage - - # - name: Set up Python 3.9 - # uses: actions/setup-python@v4 - # with: - # python-version: "3.9" - - # - name: Install Nox - # run: | - # python3 -m pip install 'nox==${{ env.NOX_VERSION }}' - - # - name: Report Salt Code Coverage - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage -- salt - - # - name: Report Tests Code Coverage - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage -- tests - - # - name: Report Combined Code Coverage - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage - - # - name: Publish Test Report - # uses: mikepenz/action-junit-report@v3 - # # always run even if the previous steps fails - # if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' - # with: - # check_name: Overall Test Results(${{ inputs.distro-slug }}) - # report_paths: 'artifacts/xml-unittests-output/*.xml' - # annotate_only: true - - # - name: Set Exit Status - # if: always() - # run: | - # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report - - # - name: Upload Exit Status - # if: always() - # uses: actions/upload-artifact@v3 - # with: - # name: exitstatus - # path: exitstatus - # if-no-files-found: error + report: + name: Reports for ${{ inputs.distro-slug }}(${{ matrix.nox-session }}) + runs-on: + - self-hosted + - linux + - x86_64 + if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + needs: + - test + - generate-matrix + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} + + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Download Test Run Artifacts + id: download-test-run-artifacts + uses: actions/download-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + path: artifacts + + - name: Show Test Run Artifacts + if: always() && steps.download-test-run-artifacts.outcome == 'success' + run: | + tree -a artifacts + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' + with: + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.nox-session }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error From c7b42cbaff5529a2a6b4f70bb815961a89f67e79 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 1 Feb 2023 12:52:41 -0500 Subject: [PATCH 54/55] Complete macOS package test pipeline to be analogous to linux and windows --- .../workflows/test-packages-action-macos.yml | 499 ++++++++---------- .github/workflows/test-packages-action.yml | 2 +- 2 files changed, 230 insertions(+), 271 deletions(-) diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index fdadbf20ef96..749577494089 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -32,6 +32,11 @@ on: type: string description: The python version to run tests with default: "3.9" + package-name: + required: false + type: string + description: The onedir package name to use + default: salt env: @@ -39,135 +44,130 @@ env: COLUMNS: 160 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple jobs: - # dependencies: - # name: Setup Test Dependencies - # needs: - # - generate-matrix - # runs-on: - # - self-hosted - # - linux - # - bastion - # timeout-minutes: 90 - # strategy: - # fail-fast: false - # matrix: - # include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} - # steps: - # - name: Checkout Source Code - # uses: actions/checkout@v3 - - # - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - # id: nox-dependencies-cache - # uses: actions/cache@v3 - # with: - # path: nox.${{ inputs.distro-slug }}.tar.* - # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - # # Skip jobs if nox.*.tar.* is already cached - # - name: Download Onedir Tarball as an Artifact - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # uses: actions/download-artifact@v3 - # with: - # name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - # path: artifacts/ - - # - name: Decompress Onedir Tarball - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # shell: bash - # run: | - # python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - # cd artifacts - # tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - - # - name: PyPi Proxy - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt - - # - name: Setup Python Tools Scripts - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # uses: ./.github/actions/setup-python-tools-scripts - - # - name: Define Nox Session - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # if [ "${{ matrix.transport }}" != "tcp" ]; then - # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - # else - # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - # fi - - # - name: Start VM - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # id: spin-up-vm - # run: | - # tools --timestamps vm create --retries=2 ${{ inputs.distro-slug }} - - # - name: List Free Space - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true - - # - name: Upload Checkout To VM - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm rsync ${{ inputs.distro-slug }} - - # - name: Install Dependencies - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm install-dependencies --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} - - # - name: Cleanup .nox Directory - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} - - # - name: Compress .nox Directory - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} - - # - name: Download Compressed .nox Directory - # if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} - - # - name: Destroy VM - # if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' - # run: | - # tools --timestamps vm destroy ${{ inputs.distro-slug }} - - # - name: Set Exit Status - # if: always() - # run: | - # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-deps - - # - name: Upload Exit Status - # if: always() - # uses: actions/upload-artifact@v3 - # with: - # name: exitstatus - # path: exitstatus - # if-no-files-found: error + generate-matrix: + name: Generate Package Test Matrix + runs-on: ubuntu-latest + outputs: + pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }} + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + - name: Generate Package Test Matrix + id: generate-pkg-matrix + run: | + PKG_MATRIX=$(tools ci pkg-matrix ${{ inputs.distro-slug }}) + echo "$PKG_MATRIX" + echo "matrix=$PKG_MATRIX" >> "$GITHUB_OUTPUT" + + dependencies: + name: Setup Test Dependencies + needs: + - generate-matrix + runs-on: ${{ inputs.distro-slug }} + timeout-minutes: 90 + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} + id: nox-dependencies-cache + uses: actions/cache@v3 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + + # Skip jobs if nox.*.tar.* is already cached + - name: Download Onedir Tarball as an Artifact + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + + - name: Set up Python ${{ inputs.python-version }} + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: actions/setup-python@v4 + with: + python-version: "${{ inputs.python-version }}" + + - name: Install System Dependencies + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + brew install openssl@3 + + - name: Install Nox + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + - name: Install Dependencies + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + env: + PRINT_TEST_SELECTION: "0" + PRINT_SYSTEM_INFO: "0" + run: | + export PYCURL_SSL_LIBRARY=openssl + export LDFLAGS="-L/usr/local/opt/openssl@3/lib" + export CPPFLAGS="-I/usr/local/opt/openssl@3/include" + export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig" + nox --install-only -e ${{ matrix.nox-session }} + + - name: Cleanup .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + nox -e "pre-archive-cleanup(pkg=False)" + + - name: Compress .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + nox -e compress-dependencies -- ${{ inputs.distro-slug }} + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ matrix.nox-session }}-deps + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error test: name: Test runs-on: ${{ inputs.distro-slug }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong - # needs: - # - dependencies - # - generate-matrix + needs: + - dependencies + - generate-matrix strategy: fail-fast: false matrix: - nox-session: - - test-pkgs-3 - - test-upgrade-pkgs-3 + include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} steps: - name: Checkout Source Code @@ -193,44 +193,39 @@ jobs: python-version: "${{ inputs.python-version }}" - name: Install Nox - env: - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' - # - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - # uses: actions/cache@v3 - # with: - # path: nox.${{ inputs.distro-slug }}.tar.* - # key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - # - name: Download testrun-changed-files.txt - # if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} - # uses: actions/download-artifact@v3 - # with: - # name: testrun-changed-files.txt + - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} + uses: actions/cache@v3 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: ${{ inputs.cache-seed }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - # - name: Decompress .nox Directory - # run: | - # tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} + - name: Decompress .nox Directory + run: | + nox -e decompress-dependencies -- ${{ inputs.distro-slug }} - # - name: Show System Info & Test Plan - # run: | - # tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ - # --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \ - # ${{ matrix.tests-chunk }} + - name: Show System Info & Test Plan + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "1" + PRINT_TEST_PLAN_ONLY: "1" + PRINT_SYSTEM_INFO: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + run: | + sudo -E nox -e ${{ matrix.nox-session }} - name: Run Package Tests env: + SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" PRINT_TEST_PLAN_ONLY: "0" PRINT_SYSTEM_INFO: "0" RERUN_FAILURES: "1" GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple run: | sudo -E nox -e ${{ matrix.nox-session }} @@ -238,131 +233,95 @@ jobs: run: | sudo chown -R "$(id -un)" . - # - name: Download Test Run Artifacts - # id: download-artifacts-from-vm - # if: always() && steps.spin-up-vm.outcome == 'success' - # run: | - # tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} - # # Delete the salt onedir, we won't need it anymore and it will prevent - # # from it showing in the tree command below - # rm -rf artifacts/salt* - # tree -a artifacts - # mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }} - # echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV - - # - name: Upload Test Run Artifacts - # if: always() && steps.download-artifacts-from-vm.outcome == 'success' - # uses: actions/upload-artifact@v3 - # with: - # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} - # path: | - # artifacts - # !artifacts/salt/* - # !artifacts/salt-*.tar.* - - # - name: Set Exit Status - # if: always() - # run: | - # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-${{ matrix.tests-chunk }}-tests - - # - name: Upload Exit Status - # if: always() - # uses: actions/upload-artifact@v3 - # with: - # name: exitstatus - # path: exitstatus - # if-no-files-found: error - - # report: - # name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) - # runs-on: - # - self-hosted - # - linux - # - x86_64 - # if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' - # needs: - # - test - # - generate-matrix - # strategy: - # fail-fast: false - # matrix: - # include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} - - # steps: - # - name: Checkout Source Code - # uses: actions/checkout@v3 - - # - name: Define Nox Session - # run: | - # if [ "${{ matrix.transport }}" != "tcp" ]; then - # echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - # else - # echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - # fi - - # - name: Download Test Run Artifacts - # id: download-test-run-artifacts - # uses: actions/download-artifact@v3 - # with: - # name: testrun-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} - # path: artifacts - - # - name: Show Test Run Artifacts - # if: always() && steps.download-test-run-artifacts.outcome == 'success' - # run: | - # tree -a artifacts - - # - name: Upload Code Coverage DB - # if: always() && steps.download-test-run-artifacts.outcome == 'success' - # uses: actions/upload-artifact@v3 - # with: - # name: code-coverage - # path: artifacts/coverage - - # - name: Set up Python 3.9 - # uses: actions/setup-python@v4 - # with: - # python-version: "3.9" - - # - name: Install Nox - # run: | - # python3 -m pip install 'nox==${{ env.NOX_VERSION }}' - - # - name: Report Salt Code Coverage - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage -- salt - - # - name: Report Tests Code Coverage - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage -- tests - - # - name: Report Combined Code Coverage - # continue-on-error: true - # run: | - # nox --force-color -e report-coverage - - # - name: Publish Test Report - # uses: mikepenz/action-junit-report@v3 - # # always run even if the previous steps fails - # if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' - # with: - # check_name: Overall Test Results(${{ inputs.distro-slug }}) - # report_paths: 'artifacts/xml-unittests-output/*.xml' - # annotate_only: true - - # - name: Set Exit Status - # if: always() - # run: | - # python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - # echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report - - # - name: Upload Exit Status - # if: always() - # uses: actions/upload-artifact@v3 - # with: - # name: exitstatus - # path: exitstatus - # if-no-files-found: error + - name: Prepare Test Run Artifacts + id: download-artifacts-from-vm + if: always() && job.status != 'cancelled' + run: | + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts + + - name: Upload Test Run Artifacts + if: always() && job.status != 'cancelled' + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ matrix.nox-session }}-tests + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error + + report: + name: Reports for ${{ inputs.distro-slug }}(${{ matrix.nox-session }}) + runs-on: ubuntu-latest + if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + needs: + - test + - generate-matrix + strategy: + fail-fast: false + matrix: + include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} + + steps: + - name: Checkout Source Code + uses: actions/checkout@v3 + + - name: Download Test Run Artifacts + id: download-test-run-artifacts + uses: actions/download-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + path: artifacts + + - name: Show Test Run Artifacts + if: always() && steps.download-test-run-artifacts.outcome == 'success' + run: | + tree -a artifacts + + - name: Set up Python 3.9 + uses: actions/setup-python@v4 + with: + python-version: "3.9" + + - name: Install Nox + run: | + python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' + with: + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.nox-session }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + + - name: Set Exit Status + if: always() + run: | + python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ matrix.nox-session }}-report + + - name: Upload Exit Status + if: always() + uses: actions/upload-artifact@v3 + with: + name: exitstatus + path: exitstatus + if-no-files-found: error diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 7a107570eaec..6aeb27f159c9 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -313,7 +313,7 @@ jobs: if: always() run: | python3 -c "import os; os.makedirs('exitstatus', exist_ok=True)" - echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }}-report + echo "${{ job.status }}" > exitstatus/${{ github.job }}-${{ inputs.distro-slug }}-${{ matrix.nox-session }}-report - name: Upload Exit Status if: always() From 64a2ce161041690b96b02978a315ace62b1ff082 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 1 Feb 2023 15:06:47 -0500 Subject: [PATCH 55/55] Remove trailing pytest session in test-upgrade-pkgs --- noxfile.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/noxfile.py b/noxfile.py index c0ff3991cdc6..9b4ca937a3ff 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1805,6 +1805,3 @@ def test_upgrade_pkgs(session, classic): _pytest(session, False, cmd_args) except nox.command.CommandFailed: sys.exit(0) - - cmd_args = ["pkg/tests/", "--no-install"] + session.posargs - _pytest(session, False, cmd_args)