diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..69ad7c2 --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,123 @@ +{ + "name": "Build", + "on": { + "push": { + "branches-ignore": "gh-pages", + "tags-ignore": "*", + }, + "pull_request": null, + }, + + "jobs": { + "linux": { + "runs-on": "ubuntu-latest", + "strategy": { + "fail-fast": false, + "matrix": { + "name": ["debian-stable", "debian-heimdal", "centos-8", + "fedora-latest"], + "include": [ + { + "name": "debian-stable", + "distro": "debian:stable", + }, + { + "name": "debian-heimdal", + "distro": "debian:stable", + "krb5_ver": "heimdal", + }, + { + "name": "centos-8", + "distro": "centos:8", + }, + { + "name": "fedora-latest", + "distro": "fedora:latest", + "flake": "yes", + }, + ], + }, + }, + "steps": [ + { + "name": "Check out code", + "uses": "actions/checkout@v2", + }, + { + "name": "Build and test gssapi", + "run": "./ci/run-on-linux.sh ./ci/build.sh", + "env": { + "DISTRO": "${{ matrix.distro }}", + "KRB5_VER": "${{ matrix.krb5_ver }}", + "FLAKE": "${{ matrix.flake }}", + }, + }, + ], + }, + + "windows": { + "runs-on": "windows-latest", + "strategy": { + "fail-fast": false, + "matrix": { + "name": [ + "win-py-3.9", + "win-py-3.8", + "win-py-3.7", + "win-py-3.6", + ], + "include": [ + { + "name": "win-py-3.9", + "pyenv": "3.9", + }, + { + "name": "win-py-3.8", + "pyenv": "3.8", + }, + { + "name": "win-py-3.7", + "pyenv": "3.7", + }, + { + "name": "win-py-3.6", + "pyenv": "3.6", + }, + ], + }, + }, + "steps": [ + { + "name": "Check out code", + "uses": "actions/checkout@v2", + }, + { + "name": "Install the right python", + "uses": "actions/setup-python@v2", + "with": { "python-version": "${{ matrix.pyenv }}" }, + }, + { + "name": "Build and test gssapi", + "shell": "bash", + "run": "./ci/build.sh", + "env": { "OS_NAME": "windows" }, + }, + ], + }, + + "macos-heimdal": { + "runs-on": "macos-latest", + "steps": [ + { + "name": "Check out code", + "uses": "actions/checkout@v2", + }, + { + "name": "Build and test gssapi", + "run": "./ci/build.sh", + "env": { "KRB5_VER": "heimdal" }, + }, + ], + }, + }, +} diff --git a/.github/workflows/deploy-docs.yml b/.github/workflows/deploy-docs.yml new file mode 100644 index 0000000..df17aba --- /dev/null +++ b/.github/workflows/deploy-docs.yml @@ -0,0 +1,30 @@ +{ + "name": "Deploy docs", + "on": { "push": { "branches": "main" }}, + "jobs": { + "update-pages": { + "runs-on": "ubuntu-latest", + "steps": [ + { + "name": "Check out code", + "uses": "actions/checkout@v2", + }, + { + "name": "Build docs", + "env": { "DISTRO": "fedora:latest" }, + "run": "./ci/run-on-linux.sh ./ci/before-docs-deploy.sh", + }, + { + "name": "Deploy latest docs", + "uses": "JamesIves/github-pages-deploy-action@3.7.1", + "with": { + "GITHUB_TOKEN": "${{ secrets.GITHUB_TOKEN }}", + "BRANCH": "gh-pages", + "FOLDER": "ci_docs_build/html", + "TARGET_FOLDER": "latest", + }, + }, + ], + }, + }, +} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..600a665 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,131 @@ +{ + "name": "Release", + "on": { "push": { "tags": "v*" }}, + "jobs": { + "release-linux": { + "runs-on": "ubuntu-latest", + "steps": [ + { + "name": "Check out code", + "uses": "actions/checkout@v2", + }, + { + "name": "Set things up", + "env": { "DISTRO": "fedora:latest" }, + "run": "./ci/run-on-linux.sh ./ci/before-deploy.sh", + }, + { + "name": "Deploy to PyPI", + "uses": "pypa/gh-action-pypi-publish@v1.1.0", + "with": { + "user": "rharwood", + "password": "${{ secrets.pypi_password }}", + }, + }, + { + "name": "Deploy stable docs", + "uses": "JamesIves/github-pages-deploy-action@3.7.1", + "with": { + "GITHUB_TOKEN": "${{ secrets.GITHUB_TOKEN }}", + "BRANCH": "gh-pages", + "FOLDER": "ci_docs_build/html", + "TARGET_FOLDER": "stable", + }, + }, + { + "name": "Create release", + "uses": "actions/create-release@v1", + "id": "cr", + "env": { "GITHUB_TOKEN": "${{ secrets.GITHUB_TOKEN }}" }, + "with": { + "tag_name": "${{ github.ref }}", + "release_name": "${{ github.ref }}", + }, + }, + { + "id": "tarball", + "run": "echo \"::set-output name=tarball::`ls tag_build/*.tar.gz | awk -F/ '{print $2}'`\"" + }, + { + "id": "checksum", + "run": "echo \"::set-output name=checksum::`ls tag_build/*.sha512sum | awk -F/ '{print $2}'`\"" + }, + { + "name": "Upload release tarball", + "uses": "actions/upload-release-asset@v1", + "env": { "GITHUB_TOKEN": "${{ secrets.GITHUB_TOKEN }}" }, + "with": { + "upload_url": "${{ steps.cr.outputs.upload_url }}", + "asset_path": "tag_build/${{ steps.tarball.outputs.tarball }}", + "asset_name": "${{ steps.tarball.outputs.tarball }}", + "asset_content_type": "application/octet-stream", + }, + }, + { + "name": "Upload release checksum", + "uses": "actions/upload-release-asset@v1", + "env": { "GITHUB_TOKEN": "${{ secrets.GITHUB_TOKEN }}" }, + "with": { + "upload_url": "${{ steps.cr.outputs.upload_url }}", + "asset_path": "tag_build/${{ steps.checksum.outputs.checksum }}", + "asset_name": "${{ steps.checksum.outputs.checksum }}", + "asset_content_type": "text/plain", + }, + }, + ], + }, + + "release-windows": { + "runs-on": "windows-latest", + "strategy": { + "matrix": { + "name": [ + "win-wheel-3.9", + "win-wheel-3.8", + "win-wheel-3.7", + "win-wheel-3.6", + ], + "include": [ + { + "name": "win-wheel-3.9", + "pyenv": "3.9", + }, + { + "name": "win-wheel-3.8", + "pyenv": "3.8", + }, + { + "name": "win-wheel-3.7", + "pyenv": "3.7", + }, + { + "name": "win-wheel-3.6", + "pyenv": "3.6", + }, + ], + }, + }, + "steps": [ + { + "name": "Check out code", + "uses": "actions/checkout@v2", + }, + { + "name": "Install the right python", + "uses": "actions/setup-python@v2", + "with": { "python-version": "${{ matrix.pyenv }}" }, + }, + { + "name": "Create and upload Windows wheel", + "shell": "bash", + "run": "./ci/release-win.sh", + "env": { + "OS_NAME": "windows", + "TWINE_USER": "rharwood", + "TWINE_PASSWORD": "${{ secrets.pypi_password }}", + }, + }, + ], + }, + }, +} diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 62d4569..0000000 --- a/.travis.yml +++ /dev/null @@ -1,162 +0,0 @@ -sudo: required - -# not necessary, but less confusing if defined -language: python - -services: - - docker - -# we do everything in docker for non MacOS, MacOS setup is in .travis/build.sh -install: skip -before_install: skip - -stages: -- verify -- test -- name: deploy latest docs - if: (branch = master OR branch =~ ^infra/$) AND type = push -- name: deploy - if: tag is PRESENT - -script: -- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then - sudo sed -i '1i 127.0.0.1 test.box' /etc/hosts; - sudo hostname test.box; - source ./.travis/lib-util.sh; - util::docker-run $DISTRO ./.travis/build.sh; - fi -- if [[ "$TRAVIS_OS_NAME" != "linux" ]]; then ./.travis/build.sh; fi - -jobs: - include: - - &docker_verify - stage: verify - env: DISTRO=fedora:latest PYTHON="2" - script: - - source ./.travis/lib-util.sh - - util::docker-run $DISTRO ./.travis/verify.sh - - - <<: *docker_verify - env: DISTRO=fedora:latest PYTHON="3" - - - # need to explictly define each builder for test due to different os types - - stage: test - env: DISTRO=debian:stable PYTHON="2" - - - stage: test - env: DISTRO=debian:stable PYTHON="3" # 3.4, not 3.5 - - - stage: test - env: DISTRO=debian:stable PYTHON="3" KRB5_VER="heimdal" - - - stage: test - env: DISTRO=centos:7 PYTHON="2" # el7 doesn't do python3 modules - - - stage: test - env: DISTRO=fedora:latest PYTHON="3" - - - stage: test - env: DISTRO=fedora:latest PYTHON="2" - - - &osx_test - stage: test - env: PYTHON="2" KRB5_VER="heimdal" PYENV="2.7.14" - os: osx - osx_image: xcode9.2 - language: generic # causes issues with pyenv installer when set to python - - - <<: *osx_test - env: PYTHON="3" KRB5_VER="heimdal" PYENV="3.6.3" - - - &win_test - stage: test - env: PYTHON="2" PYENV="2.7.16" EXTRA_BUILDEXT="--compiler=mingw32" - os: windows - language: sh # Windows not supported yet - - - <<: *win_test - env: PYTHON="3" PYENV="3.6.8" - - - <<: *win_test - env: PYTHON="3" PYENV="3.7.3" - - - - stage: deploy latest docs - script: skip - env: - - DISTRO=fedora:latest - - PYTHON="3" - - secure: L5SpEj5+no20PWwC9Y/XNhAfmUvYiuykwSMa/YyqvUuBjdizzpZcHr7Ego5nMdM1TniTxj4pSTM+GbM0FHCzNmAINSRh9g/D3hheRqlRBacqR0XwC9ZZRvkKvtzwnLh4vYWiauq4AoDeR5U6tkEcay6LjE57iMQcLjcKYBc+Eos= - before_deploy: - - source ./.travis/lib-util.sh - - util::docker-run $DISTRO ./.travis/before-docs-deploy.sh - - deploy: - - provider: script - script: .travis/docs-deploy.sh travis_docs_build/html latest pythongssapi/python-gssapi - skip_cleanup: true - on: - all_branches: true - - - - stage: deploy - script: skip - env: - - DISTRO=fedora:latest - - PYTHON="3" - - secure: L5SpEj5+no20PWwC9Y/XNhAfmUvYiuykwSMa/YyqvUuBjdizzpZcHr7Ego5nMdM1TniTxj4pSTM+GbM0FHCzNmAINSRh9g/D3hheRqlRBacqR0XwC9ZZRvkKvtzwnLh4vYWiauq4AoDeR5U6tkEcay6LjE57iMQcLjcKYBc+Eos= - before_deploy: - - source ./.travis/lib-util.sh - - util::docker-run $DISTRO ./.travis/before-deploy.sh - - ls -alR `pwd` - - deploy: - - provider: releases - api_key: - secure: fAaSSSjd/nUrIpINBjAT590pGF2nGq3I8ee6aGq6IAFpXoa/9eeN5eyOrE4MYucWCwYcH28c7510n35vuZQQor+UZIDo6l0K5M64/NZE1cZ43zOMjw3yHlrsJG+ohPS7YvjqD8GaFlLhF6ZvWvrPmWeijvs8qAT1eL7QoEG0xBk= - file_glob: true - file: - - tag_build/* - skip_cleanup: true - on: - all_branches: true - - - provider: pypi - user: rharwood - password: - secure: "hN861mjtLeC8IysypC6Pqzlazq29I+c69XGjbUR53izYQ90cz2F+B2azVTl9Su9NbXzdsGnhWZrjY1jtYMPIZE15xDaC8vs61QijFClqmyuKNRVzCt1w/sj21hyLXnYIrkAo4e3bswPF+hRGNwfb+rVrR/dqUwd1wyjZBBYMcQE=" - skip_cleanup: true - docs_dir: travis_docs_build/html - on: - all_branches: true - # NB(directxman12): this is a hack. Check ./.travis/before-deploy.sh for an explanation. - distributions: "check" - - - provider: script - script: .travis/docs-deploy.sh travis_docs_build/html stable pythongssapi/python-gssapi - skip_cleanup: true - on: - all_branches: true - - - &win_deploy - stage: deploy - os: windows - script: # This is egregious hacks around Travis - - ./.travis/before-deploy-windows-wheels.sh - - ./.travis/deploy-win.sh - env: - - PYTHON="2" - - PYENV="2.7.16" - - EXTRA_BUILDEXT="--compiler=mingw32" - language: sh # Travis doesn't support python here - - - <<: *win_deploy - env: - - PYTHON="3" - - PYENV="3.6.8" - - - <<: *win_deploy - env: - - PYTHON="3" - - PYENV="3.7.3" diff --git a/.travis/before-deploy-windows-wheels.sh b/.travis/before-deploy-windows-wheels.sh deleted file mode 100644 index 000d62a..0000000 --- a/.travis/before-deploy-windows-wheels.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -ex - -# See before-deploy.sh for anything unexplained - -source ./.travis/lib-setup.sh -source ./.travis/lib-deploy.sh - -./.travis/build.sh - -# Sigh, go find paths again -PYPATH="/c/Python${PYENV:0:1}${PYENV:2:1}" -export PATH="$PYPATH:$PYPATH/Scripts:/c/Program Files/MIT/Kerberos/bin:$PATH" - -# build the wheel -python -m pip install wheel -python setup.py bdist_wheel - -cd dist - -# Rename and checksum the wheel -if [ x"${TRAVIS_TAG#v[0-9]}" = "x${TRAVIS_TAG}" ]; then - PYTHON_GSSAPI_VERSION=${TRAVIS_TAG} -else - PYTHON_GSSAPI_VERSION=${TRAVIS_TAG#v} -fi - -PKG_NAME_VER=$(ls *.whl | sed "s/gssapi-[^-]*-\(.*\)\.whl/python-gssapi-${PYTHON_GSSAPI_VERSION}-\1/") - -cp *.whl "${PKG_NAME_VER}.whl" - -sha512sum --binary ./${PKG_NAME_VER}.whl > ./${PKG_NAME_VER}.sha512sum - -cd .. diff --git a/.travis/before-docs-deploy.sh b/.travis/before-docs-deploy.sh deleted file mode 100644 index 6860771..0000000 --- a/.travis/before-docs-deploy.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -ex - -source ./.travis/lib-setup.sh -source ./.travis/lib-deploy.sh - -# build again since I can't figure out how to get travis to recognize the old -# build in the new container. The other alternative (besides actually solving -# the issue) is to run the docs build and tarball generation every time. - -./.travis/build.sh - -setup::activate - -deploy::build-docs diff --git a/.travis/build.sh b/.travis/build.sh deleted file mode 100755 index 4d16b59..0000000 --- a/.travis/build.sh +++ /dev/null @@ -1,29 +0,0 @@ -#!/bin/bash -ex - -# set up dependencies, etc -source ./.travis/lib-setup.sh -setup::install - -# always build in-place so that Sphinx can find the modules -python setup.py build_ext --inplace $EXTRA_BUILDEXT -BUILD_RES=$? - -if [ x"$KRB5_VER" = "xheimdal" ]; then - # heimdal can't run the tests yet, so just exit - exit $BUILD_RES -fi - -if [ "$TRAVIS_OS_NAME" == "windows" ]; then - # Windows can't run tests yet, so just exit - exit $BUILD_RES -fi - -if [ $BUILD_RES -ne 0 ]; then - # if the build failed, don't run the tests - exit $BUILD_RES -fi - -python setup.py nosetests --verbosity=3 -TEST_RES=$? - -exit $TEST_RES diff --git a/.travis/deploy-win.sh b/.travis/deploy-win.sh deleted file mode 100755 index b66b56d..0000000 --- a/.travis/deploy-win.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -e - -# Temporary hack while issue DPL issue persists -# Manually upload wheels via twine for windows -# https://github.com/travis-ci/dpl/issues/1009 - -success="yes" - -# Sigh, go find paths again -PYPATH="/c/Python${PYENV:0:1}${PYENV:2:1}" -export PATH="$PYPATH:$PYPATH/Scripts:/c/Program Files/MIT/Kerberos/bin:$PATH" - -echo 'Running: python -m pip install twine ...' -python -m pip install twine - -echo 'Running: set +x; python -m twine upload...' -# Please note this cannot be set -x or passwords will leak! -set +x - -python -m twine upload -u $TWINE_USER -p $TWINE_PASSWORD dist/gssapi* > out.log 2>&1 || true - -# and restore... -set -x -egrep -i 'fail|error' out.log && cat out.log && exit 1 - -exit 0 diff --git a/.travis/docs-deploy.sh b/.travis/docs-deploy.sh deleted file mode 100755 index 998dd55..0000000 --- a/.travis/docs-deploy.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -ex - -# NB (very important): BE VERY CAREFUL WITH `set -x` FOR THIS FILE. -# The GitHub token is sensitive information, and should never -# be displayed on in the clear. - -source_directory=${1?need []} -target_directory=${2?need []} -target_repo=${3?need []} -target_branch=${4:-gh-pages} - -desc=$(git describe --tags) - -scratch_dir=$(mktemp -d) - -set +x # IMPORTANT -echo "cloning https://@github.com/${target_repo}.git#${target_branch} in to ${scratch_dir}/docs..." -git clone https://${GITHUB_TOKEN}@github.com/${target_repo}.git ${scratch_dir}/docs -b ${target_branch} -set -x - -mkdir -p ${scratch_dir}/docs/${target_directory} -cp -r ${source_directory}/. ${scratch_dir}/docs/${target_directory} -echo $desc > ${scratch_dir}/docs/${target_directory}/.from -pushd $scratch_dir/docs -git config user.email "deploy@travis-ci.org" -git config user.name "Deployment Bot (from Travis CI)" - -if [[ $(git status --porcelain | wc -l) -eq 0 ]]; then - echo "no docs changes in the latest commit" - exit 0 -fi - -git add ${target_directory} -git commit -m "Update ${target_directory} docs in based on ${desc}" - -set +x # IMPORTANT -echo "pushing to https://@github.com/${target_repo}.git#${target_branch}" -git push --quiet --force-with-lease origin ${target_branch}:${target_branch} -set -x - -popd -rm -rf ${scratch_dir} -echo "done!" diff --git a/.travis/lib-util.sh b/.travis/lib-util.sh deleted file mode 100644 index 2cf476a..0000000 --- a/.travis/lib-util.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -util::docker-run() { - local distro=$1 - shift - - docker run \ - -v `pwd`:/tmp/build \ - -w /tmp/build \ - -e TRAVIS_TAG=$TRAVIS_TAG \ - -e PKG_NAME_VER=$PKG_NAME_VER \ - -e KRB5_VER=$KRB5_VER \ - -e PYTHON=$PYTHON \ - $distro \ - /bin/bash -ex $@ -} diff --git a/.travis/lib-verify.sh b/.travis/lib-verify.sh deleted file mode 100644 index fe84670..0000000 --- a/.travis/lib-verify.sh +++ /dev/null @@ -1,25 +0,0 @@ -#!/bin/bash - -verify::flake8() { - flake8 setup.py - F8_SETUP=$? - - flake8 gssapi - F8_PY=$? - - # Cython requires special flags since it is not proper Python - # E225: missing whitespace around operator - # E226: missing whitespace around arithmetic operator - # E227: missing whitespace around bitwise or shift operator - # E402: module level import not at top of file (needed for the `GSSAPI="blah" lines) - # E901: SyntaxError or IndentationError - # E999: Internal AST compilation error (flake8 specific) - flake8 gssapi --filename='*.pyx,*.pxd' --ignore=E225,E226,E227,E402,E901,E999 - F8_MAIN_CYTHON=$? - - if [ $F8_SETUP -eq 0 -a $F8_PY -eq 0 -a $F8_MAIN_CYTHON -eq 0 ]; then - return 0 - else - return 1 - fi -} diff --git a/.travis/verify.sh b/.travis/verify.sh deleted file mode 100644 index cf4bb36..0000000 --- a/.travis/verify.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash -ex - -# set up dependencies, etc -source ./.travis/lib-setup.sh -setup::install - -source ./.travis/lib-verify.sh -verify::flake8 diff --git a/README.rst b/README.rst index 8873b40..f1bd246 120000 --- a/README.rst +++ b/README.rst @@ -1 +1 @@ -python-gssapi-1.6.1/README.txt \ No newline at end of file +./README.txt \ No newline at end of file diff --git a/README.txt b/README.txt index 1035210..62554e9 100644 --- a/README.txt +++ b/README.txt @@ -8,9 +8,6 @@ Python-GSSAPI .. role:: bash(code) :language: bash -.. image:: https://travis-ci.org/pythongssapi/python-gssapi.svg?branch=master - :target: https://travis-ci.org/pythongssapi/python-gssapi - .. image:: https://badge.fury.io/gh/pythongssapi%2Fpython-gssapi.svg :target: http://badge.fury.io/gh/pythongssapi%2Fpython-gssapi @@ -25,7 +22,7 @@ Documentation for the latest released version (including pre-release versions) can be found at `https://pythongssapi.github.io/python-gssapi/stable `_. -Documentation for the latest commit on master can be found at +Documentation for the latest commit on main can be found at `https://pythongssapi.github.io/python-gssapi/latest `_. Requirements @@ -35,13 +32,13 @@ Basic ----- * A working implementation of GSSAPI (such as from MIT Kerberos) - which includes header files + which supports delegation and includes header files * a C compiler (such as GCC) -* either the `enum34` Python package or Python 3.4+ +* Python 3.6+ (older releases support older versions, but are unsupported) -* the `six` and `decorator` python packages +* the `decorator` python package Compiling from Scratch ---------------------- @@ -51,9 +48,7 @@ To compile from scratch, you will need Cython >= 0.21.1. For Running the Tests --------------------- -* the `nose` package (for tests) - -* the `shouldbe` package (for tests) +* the `nose` package * the `k5test` package @@ -171,10 +166,10 @@ The Team (GitHub usernames in parentheses) -* Solly Ross (@directxman12) -* Robbie Harwood (@frozencemetery) -* Simo Sorce (@simo5) -* Hugh Cole-Baker (@sigmaris) +* Robbie Harwood (@frozencemetery) - current maintainer and developer +* Simo Sorce (@simo5) - developer +* Solly Ross (@directxman12) - author emeritus +* Hugh Cole-Baker (@sigmaris) - author emeritus Get Involved ============ diff --git a/.travis/before-deploy.sh b/ci/before-deploy.sh similarity index 65% rename from .travis/before-deploy.sh rename to ci/before-deploy.sh index 91e198b..1f86b7d 100755 --- a/.travis/before-deploy.sh +++ b/ci/before-deploy.sh @@ -1,13 +1,9 @@ #!/bin/bash -ex -source ./.travis/lib-setup.sh -source ./.travis/lib-deploy.sh +source ./ci/lib-setup.sh +source ./ci/lib-deploy.sh -# build again since I can't figure out how to get travis to recognize the old -# build in the new container. The other alternative (besides actually solving -# the issue) is to run the docs build and tarball generation every time. - -./.travis/build.sh +./ci/build.sh setup::activate @@ -17,8 +13,7 @@ yum -y install tar git deploy::build-docs # NB(directxman12): this is a *terrible* hack, but basically, -# dpl (the Travis deployer) uses `twine` instead of `setup.py sdist upload`. -# like this: +# `twine` gets called like this: # - python setup.py $PYPI_DISTRIBUTIONS # - twine upload -r pypi dist/* # - [some other stuff] @@ -44,10 +39,11 @@ mkdir ./tag_build # create and checksum the tarball -if [ x"${TRAVIS_TAG#v[0-9]}" = "x${TRAVIS_TAG}" ]; then - PYTHON_GSSAPI_VERSION=${TRAVIS_TAG} +tag=$(git describe --tags) +if [ x"${tag#v[0-9]}" = "x${tag}" ]; then + PYTHON_GSSAPI_VERSION=${tag} else - PYTHON_GSSAPI_VERSION=${TRAVIS_TAG#v} + PYTHON_GSSAPI_VERSION=${tag#v} fi PKG_NAME_VER="python-gssapi-${PYTHON_GSSAPI_VERSION}" diff --git a/ci/before-docs-deploy.sh b/ci/before-docs-deploy.sh new file mode 100755 index 0000000..a6bb1eb --- /dev/null +++ b/ci/before-docs-deploy.sh @@ -0,0 +1,12 @@ +#!/bin/bash -ex + +source ./ci/lib-setup.sh +source ./ci/lib-deploy.sh + +# GitHub Actions doesn't have a good concept of connected pipelines here, so +# just rebuild rather than trying to figure it out. +./ci/build.sh + +setup::activate + +deploy::build-docs diff --git a/ci/build.sh b/ci/build.sh new file mode 100755 index 0000000..2db0b6c --- /dev/null +++ b/ci/build.sh @@ -0,0 +1,47 @@ +#!/bin/bash -ex + +# set up dependencies, etc +source ./ci/lib-setup.sh +setup::install + +if [ x"$FLAKE" = "xyes" ]; then + flake8 setup.py + F8_SETUP=$? + + flake8 gssapi + F8_PY=$? + + # Cython requires special flags since it is not proper Python: + # - E225: missing whitespace around operator + # - E226: missing whitespace around arithmetic operator + # - E227: missing whitespace around bitwise or shift operator + # - E402: module level import not at top of file (needed for the `GSSAPI="blah" lines) + # - E901: SyntaxError or IndentationError + # - E999: Internal AST compilation error (flake8 specific) + flake8 gssapi --filename='*.pyx,*.pxd' --ignore=E225,E226,E227,E402,E901,E999 + F8_MAIN_CYTHON=$? + + if [ $F8_SETUP -ne 0 -o $F8_PY -ne 0 -o $F8_MAIN_CYTHON -ne 0 ]; then + exit 1 + fi +fi + +# always build in-place so that Sphinx can find the modules +python setup.py build_ext --inplace $EXTRA_BUILDEXT +BUILD_RES=$? + +if [ $BUILD_RES -ne 0 ]; then + # if the build failed, don't run the tests + exit $BUILD_RES +fi + +if [ x"$KRB5_VER" = "xheimdal" ] || [ "$OS_NAME" = "windows" ]; then + # heimdal/Windows can't run the tests yet, so just make sure it imports and exit + python -c "import gssapi" + exit $? +fi + +python setup.py nosetests --verbosity=3 +TEST_RES=$? + +exit $TEST_RES diff --git a/.travis/lib-deploy.sh b/ci/lib-deploy.sh old mode 100644 new mode 100755 similarity index 79% rename from .travis/lib-deploy.sh rename to ci/lib-deploy.sh index 8b20452..25c1130 --- a/.travis/lib-deploy.sh +++ b/ci/lib-deploy.sh @@ -7,7 +7,7 @@ deploy::build-docs() { pip install -e . # place in a non-standard location so that they don't get cleaned up - python setup.py build_sphinx --build-dir travis_docs_build + python setup.py build_sphinx --build-dir ci_docs_build - echo "travis_docs_build" + echo "docs_build" } diff --git a/.travis/lib-setup.sh b/ci/lib-setup.sh old mode 100644 new mode 100755 similarity index 58% rename from .travis/lib-setup.sh rename to ci/lib-setup.sh index be06945..c90b530 --- a/.travis/lib-setup.sh +++ b/ci/lib-setup.sh @@ -1,19 +1,9 @@ #!/bin/bash -setup::python-suffix() { - if [ x"$PYTHON" = "x3" ]; then - echo "3" - else - echo "" - fi -} - # We test Debian's cython. el7's cython is too old, and Rawhide's virtualenv # doesn't work right (usrmerge bugs) so we can only test Debian's cython. setup::debian::install() { - local IS3=$(setup::python-suffix) - export DEBIAN_FRONTEND=noninteractive apt-get update @@ -24,23 +14,21 @@ setup::debian::install() { gss-ntlmssp fi - apt-get -y install gcc virtualenv python$IS3-{virtualenv,dev} cython$IS3 + apt-get -y install gcc virtualenv python3-{virtualenv,dev} cython3 - virtualenv --system-site-packages -p $(which python${PYTHON}) .venv + virtualenv --system-site-packages -p $(which python3) .venv source ./.venv/bin/activate } setup::rh::yuminst() { - # yum has no update-only verb - yum -y --nogpgcheck install $@ + # yum has no update-only verb. Also: modularity just makes this slower. + yum -y --nogpgcheck --disablerepo=\*modul\* install $@ } setup::centos::install() { - local IS3=$(setup::python-suffix) - # Cython on el7 is too old - downstream patches - setup::rh::yuminst python$IS3-{virtualenv,devel} - virtualenv -p $(which python$IS3) .venv + setup::rh::yuminst python3-{virtualenv,devel} + virtualenv -p $(which python3) .venv source ./.venv/bin/activate pip install --upgrade pip # el7 pip doesn't quite work right pip install --install-option='--no-cython-compile' cython @@ -49,8 +37,8 @@ setup::centos::install() { setup::fedora::install() { # path to binary here in case Rawhide changes it setup::rh::yuminst redhat-rpm-config \ - /usr/bin/virtualenv python${PYTHON}-{virtualenv,devel} - virtualenv -p $(which python${PYTHON}) .venv + /usr/bin/virtualenv python3-{virtualenv,devel} + virtualenv -p $(which python3) .venv source ./.venv/bin/activate pip install --install-option='--no-cython-compile' cython } @@ -67,28 +55,20 @@ setup::rh::install() { } setup::macos::install() { - # install Python from pyenv so we know what version is being used - pyenv install $PYENV - pyenv global $PYENV - virtualenv -p $(pyenv which python) .venv - source ./.venv/bin/activate + sudo pip3 install virtualenv + python3 -m virtualenv -p $(which python3) .venv + source .venv/bin/activate pip install --install-option='--no-cython-compile' cython } setup::windows::install() { - # Install the right Python version and MIT Kerberos - choco install python"${PYENV:0:1}" --version $PYENV - choco install mitkerberos --install-arguments "'ADDLOCAL=ALL'" || true - PYPATH="/c/Python${PYENV:0:1}${PYENV:2:1}" - # Update path to include them - export PATH="$PYPATH:$PYPATH/Scripts:/c/Program Files/MIT/Kerberos/bin:$PATH" - - if [ "${PYENV:0:1}" == "2" ]; then - choco install vcredist2008 - # Skip dotnet dependency: - # https://github.com/fredrikaverpil/vcpython27/pull/3 - choco install --ignore-dependencies vcpython27 - fi + CHINST="choco install --no-progress --yes --ignore-detected-reboot --allow-downgrade" + + # Install MIT Kerberos. choco will fail despite the installation working. + $CHINST mitkerberos --install-arguments "'ADDLOCAL=ALL'" || true + + # Update path to include it + export PATH="/c/Program Files/MIT/Kerberos/bin:$PATH" python -m pip install --upgrade pip } @@ -100,7 +80,7 @@ setup::install() { setup::rh::install elif [ "$(uname)" == "Darwin" ]; then setup::macos::install - elif [ "$TRAVIS_OS_NAME" == "windows" ]; then + elif [ "$OS_NAME" == "windows" ]; then setup::windows::install else echo "Distro not found!" diff --git a/ci/release-win.sh b/ci/release-win.sh new file mode 100755 index 0000000..1e8aec4 --- /dev/null +++ b/ci/release-win.sh @@ -0,0 +1,49 @@ +#!/bin/bash -e + +source ./ci/lib-setup.sh +source ./ci/lib-deploy.sh + +./ci/build.sh + +# Sigh, go find paths again +export PATH="/c/Program Files/MIT/Kerberos/bin:$PATH" + +# build the wheel +python -m pip install wheel +python setup.py bdist_wheel + +cd dist + +tag=$(git describe --tags) + +# Rename and checksum the wheel +if [ x"${tag#v[0-9]}" = "x${tag}" ]; then + PYTHON_GSSAPI_VERSION=${tag} +else + PYTHON_GSSAPI_VERSION=${tag#v} +fi + +PKG_NAME_VER=$(ls *.whl | sed "s/gssapi-[^-]*-\(.*\)\.whl/python-gssapi-${PYTHON_GSSAPI_VERSION}-\1/") + +cp *.whl "${PKG_NAME_VER}.whl" + +sha512sum --binary ./${PKG_NAME_VER}.whl > ./${PKG_NAME_VER}.sha512sum + +cd .. + +# Hack around https://github.com/pypa/gh-action-pypi-publish/issues/32 + +echo 'Running: python -m pip install twine ...' +python -m pip install twine + +echo 'Running: set +x; python -m twine upload...' +# Please note this cannot be set -x or passwords will leak! +set +x + +python -m twine upload -u $TWINE_USER -p $TWINE_PASSWORD dist/gssapi* > out.log 2>&1 || true + +# and restore... +set -x +egrep -i 'fail|error' out.log && cat out.log && exit 1 + +exit 0 diff --git a/ci/run-on-linux.sh b/ci/run-on-linux.sh new file mode 100755 index 0000000..7e12d83 --- /dev/null +++ b/ci/run-on-linux.sh @@ -0,0 +1,10 @@ +#!/bin/bash -ex + +# If we try to use a normal Github Actions container with +# github-pages-deploy-action, it will fail due to inability to find git. + +docker run -h test.box \ + -v `pwd`:/tmp/build -w /tmp/build \ + -e KRB5_VER=${KRB5_VER:-mit} \ + -e FLAKE=${FLAKE:no} \ + $DISTRO /bin/bash -ex $@ diff --git a/debian/README.source b/debian/README.source new file mode 100644 index 0000000..4a025c8 --- /dev/null +++ b/debian/README.source @@ -0,0 +1,13 @@ +This package is managed with gbp. + +If you are not familiar with this workflow, treat it as a standard quilt +package in git. + + +Patches: + +Patches may be submitted to github (preferred), or emailed to me. If you are +familiar with quilt, please use quilt-friendly patches; otherwise, I will take +care of it. + + -- Robbie Harwood (frozencemetery) , Wed, 20 May 2015 17:01:18 -0400 diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000..2d7eb5b --- /dev/null +++ b/debian/changelog @@ -0,0 +1,64 @@ +python-gssapi (1.6.12-1) unstable; urgency=low + + * New upstream version 1.6.12 + * Drop Daf from uploaders list (Closes: #965391) + * Skip our test suite more clearly + * Bump standards and lintian cleaner + + -- Robbie Harwood (frozencemetery) Wed, 17 Mar 2021 12:10:16 -0400 + +python-gssapi (1.6.1-1) unstable; urgency=medium + + [ Robbie Harwood ] + * Make package lintian clean + * New upstream version 1.6.1 + * Remove references to nonexistent doc package (Closes: #900660) + + [ Chris Lamb ] + * Fix find(1) call in dh_auto_clean (Closes: #876720) + + -- Robbie Harwood (frozencemetery) Wed, 05 Feb 2020 20:03:19 -0500 + +python-gssapi (1.4.1-1.1) unstable; urgency=medium + + * Non-maintainer upload. + * Removed Python 2 support (Closes: #937801). + + -- Thomas Goirand Fri, 13 Sep 2019 12:56:48 +0200 + +python-gssapi (1.4.1-1) unstable; urgency=medium + + * New upstream version 1.4.1 + * Add Timo to uploaders + + -- Robbie Harwood (frozencemetery) Tue, 13 Mar 2018 16:44:53 -0400 + +python-gssapi (1.2.0-1) unstable; urgency=low + + * New upstream release + * Drop enum34 patch that has merged upstream + * Remove build dependency on python-tox and virtualenv + * Fix X-Python3-Version to reflect enum34 state + * Fix lintian false positive ("tEH" in binary) + * Bump standards version to 3.9.7 + + -- Robbie Harwood (frozencemetery) Fri, 15 Apr 2016 16:09:28 -0400 + +python-gssapi (1.1.3-2) unstable; urgency=high + + * Rebuild and patch since python3-enum34 is gone (Closes: #799254) + * Remove old patches that have merged upstream + + -- Robbie Harwood (frozencemetery) Fri, 25 Sep 2015 12:31:19 -0400 + +python-gssapi (1.1.3-1) unstable; urgency=medium + + * New upstream release + + -- Robbie Harwood (frozencemetery) Fri, 04 Sep 2015 17:39:01 -0400 + +python-gssapi (1.1.2-1) unstable; urgency=medium + + * Initial release. (Closes: #794980) + + -- Robbie Harwood (frozencemetery) Sat, 08 Aug 2015 18:57:16 -0400 diff --git a/debian/control b/debian/control new file mode 100644 index 0000000..6aab28a --- /dev/null +++ b/debian/control @@ -0,0 +1,28 @@ +Source: python-gssapi +Section: python +Priority: optional +Maintainer: Robbie Harwood (frozencemetery) +Uploaders: Timo Aaltonen +Standards-Version: 4.5.1 +Homepage: https://github.com/pythongssapi/python-gssapi +Build-Depends: cython3, + debhelper-compat (>= 13), dh-python, + python3-all-dev, + python3-setuptools, + python3-flake8, + python3-nose, + python3-nose-parameterized, + python3-six, + libkrb5-dev + +Package: python3-gssapi +Architecture: any +Depends: ${python3:Depends}, ${shlibs:Depends}, ${misc:Depends} +Provides: ${python3:Provides} +Description: Python 3 interface to GSSAPI + Python3 Bindings for GSSAPI. These bindings are for both RFC 2743/2744 and + many extensions. They are native bindings produced using Cython. + . + Available extensions will vary based on what your GSSAPI implementation + supports; see package documentation for a detailed list of what is available. + diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000..1e29fcd --- /dev/null +++ b/debian/copyright @@ -0,0 +1,12 @@ +Format-Specification: http://svn.debian.org/wsvn/dep/web/deps/dep5.mdwn?op=file&rev=135 +Name: python-gssapi +Maintainer: Robbie Harwood (frozencemetery) +Source: https://github.com/pythongssapi/python-gssapi + +Copyright: 2014 The Python GSSAPI Team +License: ISC + +License: ISC + Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + . + THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/debian/gbp.conf b/debian/gbp.conf new file mode 100644 index 0000000..70ff3a1 --- /dev/null +++ b/debian/gbp.conf @@ -0,0 +1,3 @@ +[import-orig] +dch = True +pristine-tar = False diff --git a/debian/python3-gssapi.lintian-overrides b/debian/python3-gssapi.lintian-overrides new file mode 100644 index 0000000..e69de29 diff --git a/debian/rules b/debian/rules new file mode 100755 index 0000000..86fa2d0 --- /dev/null +++ b/debian/rules @@ -0,0 +1,19 @@ +#!/usr/bin/make -f + +# PIE currently breaks compilation +export DEB_BUILD_MAINT_OPTIONS = hardening=+all,-pie +DPKG_EXPORT_BUILDFLAGS = 1 +include /usr/share/dpkg/buildflags.mk + +LDFLAGS += -Wl,--as-needed +CFLAGS += $(CPPFLAGS) + +export PYBUILD_NAME=gssapi + +%: + dh $@ --with python3 --buildsystem=pybuild + +override_dh_auto_test: + +override_dh_auto_clean: + find . \( -name \*.pyc -or -name \*.c -or -name \*.so -or -name \*.egg \) -delete diff --git a/debian/source/format b/debian/source/format new file mode 100644 index 0000000..163aaf8 --- /dev/null +++ b/debian/source/format @@ -0,0 +1 @@ +3.0 (quilt) diff --git a/debian/watch b/debian/watch new file mode 100644 index 0000000..f3dac0a --- /dev/null +++ b/debian/watch @@ -0,0 +1,3 @@ +version=4 +opts=filenamemangle=s/.+\/v?(\d\S*)\.tar\.gz/python-gssapi-$1\.tar\.gz/ \ + https://github.com/pythongssapi/python-gssapi/tags .*/v?(\d\S*)\.tar\.gz diff --git a/docs/custom_extensions/gssapi_find_missing.py b/docs/custom_extensions/gssapi_find_missing.py index 4e064f8..9b8a748 100644 --- a/docs/custom_extensions/gssapi_find_missing.py +++ b/docs/custom_extensions/gssapi_find_missing.py @@ -29,7 +29,7 @@ def _missing_ref(app, env, node, contnode): non_raw_opts = [] for opt in options: full_name, type_info = opt - mod_name, _mod_type = type_info + lib_name, mod_name, _mod_type = type_info if mod_name.startswith('gssapi.raw'): raw_opts.append(opt) else: @@ -53,7 +53,7 @@ def _missing_ref(app, env, node, contnode): choice = options[0] choice_name, choice_info = choice - choice_mod, choice_type = choice_info + gssapi, choice_mod, choice_type = choice_info if choice_type == 'module': return env.domains['py']._make_module_refnode( diff --git a/docs/source/basic-tutorial.md b/docs/source/basic-tutorial.md index f11a3dd..25b9232 100644 --- a/docs/source/basic-tutorial.md +++ b/docs/source/basic-tutorial.md @@ -43,12 +43,12 @@ Suppose we wanted to refer to an HTTP server on the current host. We could refer to it as a *host-based service*, or in the default mechanism form (in this case, for krb5): - >>> server_hostbased_name = gssapi.Name('HTTP@' + FQDN, name_type=gssapi.NameType.hostbased_service) + >>> server_hostbased_name = gssapi.Name(f"HTTP@{FQDN}", name_type=gssapi.NameType.hostbased_service) >>> server_hostbased_name - Name(b'HTTP@sross', ) - >>> server_name = gssapi.Name('HTTP/sross@') + Name(b'HTTP@seton.mivehind.net', ) + >>> server_name = gssapi.Name(f"HTTP/{FQDN}@") >>> server_name - Name(b'HTTP/sross@', None) + Name(b'HTTP/seton.mivehind.net@', None) >>> These are both effectively the same, but if we *canonicalize* both diff --git a/docs/source/conf.py b/docs/source/conf.py index 5b8d1c7..b423b56 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -58,9 +58,9 @@ # built documents. # # The short X.Y version. -version = '1.6.1' +version = '1.6.12' # The full version, including alpha/beta/rc tags. -release = '1.6.1' +release = '1.6.12' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/docs/source/credstore.rst b/docs/source/credstore.rst index 32d8a3e..b777ca7 100644 --- a/docs/source/credstore.rst +++ b/docs/source/credstore.rst @@ -2,16 +2,21 @@ Common Values for Credentials Store Extensions ============================================== The credentials store extension is an extension introduced by the MIT krb5 -library implementation of GSSAPI. It allows for finer control of credentials -from within a GSSAPI application. -Each mechanism can define keywords to manipulate various aspects of their -credentials for storage or retrieval operations. +library implementation of GSSAPI. It allows for finer control of credentials +from within a GSSAPI application. Each mechanism can define keywords to +manipulate various aspects of their credentials for storage or retrieval +operations. .. note: - Only mechanisms that implement keywords can use them, some mechanism may - share the same or similar keywords, but their meaning is always local to - a specific mechanism. + Only mechanisms that implement keywords can use them: some mechanisms may + share the same or similar keywords, but their meaning is always local to a + specific mechanism. + +.. note: + + `None` is not a permitted value and will raise exceptions. Phrased + differently, values must be strings, not empty. The krb5 mechanism in MIT libraries ----------------------------------- @@ -24,12 +29,13 @@ client_keytab The `client_keytab` keyword can be used in a credential store when it is used with the :func:`gssapi.raw.ext_cred_store.acquire_cred_from` / -:func:`gssapi.raw.ext_cred_store.add_cred_from` functions, to indicate a -custom location for a keytab containing client keys. -It is not used in the context of calls used to store credentials. +:func:`gssapi.raw.ext_cred_store.add_cred_from` functions to indicate a custom +location for a keytab containing client keys. It is not used in the context +of calls used to store credentials. + The value is a string in the form **type:residual** where **type** can be any keytab storage type understood by the implementation and **residual** is the -keytab identifier (usually something like a path). If the string is just a path +keytab identifier (usually something like a path). If the string is a path, then the type is defaulted to `FILE`. keytab @@ -37,12 +43,13 @@ keytab The `keytab` keyword can be used in a credential store when it is used with the :func:`gssapi.raw.ext_cred_store.acquire_cred_from` / -:func:`gssapi.raw.ext_cred_store.add_cred_from` functions, to indicate a -custom location for a keytab containing service keys. -It is not used in the context of calls used to store credentials. +:func:`gssapi.raw.ext_cred_store.add_cred_from` functions to indicate a custom +location for a keytab containing service keys. It is not used in the context +of calls used to store credentials. + The value is a string in the form **type:residual** where **type** can be any keytab storage type understood by the implementation and **residual** is the -keytab identifier (usually something like a path). If the string is just a path +keytab identifier (usually something like a path). If the string is a path, then the type is defaulted to `FILE`. ccache @@ -54,11 +61,12 @@ It can be used both to indicate the source of existing credentials for the :func:`gssapi.raw.ext_cred_store.add_cred_from` functions, as well as the destination storage for the :func:`gssapi.raw.ext_cred_store.store_cred_into` function. -The value is a string in the form **type:residual** where type can be any + +The value is a string in the form **type:residual** where **type** can be any credential cache storage type understood by the implementation and -**residual** is the ccache identifier. If the string is just a path then -the type is defaulted to `FILE`. Other commonly used types are `DIR`, -`KEYRING`, `KCM`. Each type has a different format for the **residual**; +**residual** is the ccache identifier. If the string is a path, then the type +is defaulted to `FILE`. Other commonly used types are `DIR`, `KEYRING`, +`KCM`, and `MEMORY`. Each type has a different format for the **residual**; refer to the MIT krb5 documentation for more details. rcache @@ -68,8 +76,15 @@ The `rcache` keyword can be used to reference a custom replay cache storage. It is used only with the :func:`gssapi.raw.ext_cred_store.acquire_cred_from` / :func:`gssapi.raw.ext_cred_store.add_cred_from` functions for credentials used to accept context establishments, not to initiate contexts. -The value is a string in the form **type:residual** where type can be any + +The value is a string in the form **type:residual** where **type** can be any replay cache storage type understood by the implementation and **residual** is -the cache identifier (usually something like a path). If the string is just a -path then the type is defaulted to `FILE`. +the cache identifier (usually something like a path). If the string is a +path, then the type is defaulted to `FILE`. + +The krb5 mechanism in Heimdal +----------------------------- +Heimdal has recently implemented the credential store extensions with the same +interface as MIT krb5. However, it is not yet present in any released +version. diff --git a/docs/source/gssapi.rst b/docs/source/gssapi.rst index 75173f9..7c99fd4 100644 --- a/docs/source/gssapi.rst +++ b/docs/source/gssapi.rst @@ -65,6 +65,34 @@ imported in the high-level API :mod:`gssapi` module: .. autoclass:: gssapi.RequirementFlag :show-inheritance: +The ``ok_as_delegate`` flag corresponds to the C level flag +``GSS_C_DELEG_POLICY_FLAG``. This flag is similar to ``delegate_to_peer`` +except it only delegates if the KDC delegation policies for the service +principal allow it to use delegation. This is typically used on Microsoft +domain environments to control whether constrained or unconstrained delegation +is allowed for a service principal. By setting this flag, the delegation +process follows the same behaviour as delegation on SSPI/Windows. + +Here are the four cases when either of these flags are set or not. + +Neither flag set + No delegation occurs. + +delegate_to_peer + Always try to delegate regardless of the KDC delegation policies. + ``delegate_to_peer`` is set in the return flags if successful. + +ok_as_delegate + Try to delegate but only if the KDC trusts the service principal for + delegation. ``delegate_to_peer`` and ``ok_as_delegate`` are set in the + return flags if successful. + +delegate_to_peer | ok_as_delegate + Acts like ``delegate_to_peer`` being set but will also set + ``ok_as_delegate`` in the return flags if the service principal was trusted + for delegation by the KDC. + + .. autoclass:: gssapi.AddressType :show-inheritance: diff --git a/gssapi/__init__.py b/gssapi/__init__.py index 4d6eba4..cb89f73 100644 --- a/gssapi/__init__.py +++ b/gssapi/__init__.py @@ -26,6 +26,8 @@ low-level API functions. """ +import gssapi._win_config # noqa + from gssapi.raw.types import NameType, RequirementFlag, AddressType # noqa from gssapi.raw.types import MechType, IntEnumFlagSet # noqa from gssapi.raw.oids import OID # noqa diff --git a/gssapi/_utils.py b/gssapi/_utils.py index 5956ac4..d2da9e8 100644 --- a/gssapi/_utils.py +++ b/gssapi/_utils.py @@ -1,9 +1,10 @@ import sys import types -import six import decorator as deco +from typing import Optional + from gssapi.raw.misc import GSSError @@ -43,7 +44,7 @@ def getter(self): return property(getter, setter) -def inquire_property(name, doc=None): +def inquire_property(name: str, doc: Optional[str] = None): """Creates a property based on an inquire result This method creates a property that calls the @@ -59,7 +60,7 @@ def inquire_property(name, doc=None): def inquire_property(self): if not self._started: - msg = ("Cannot read {0} from a security context whose " + msg = (f"Cannot read {name} from a security context whose " "establishment has not yet been started.") raise AttributeError(msg) @@ -101,12 +102,12 @@ def set_encoding(enc): def _encode_dict(d): """Encodes any relevant strings in a dict""" def enc(x): - if isinstance(x, six.text_type): + if isinstance(x, str): return x.encode(_ENCODING) else: return x - return dict((enc(k), enc(v)) for k, v in six.iteritems(d)) + return {enc(k): enc(v) for k, v in d.items()} # in case of Python 3, just use exception chaining @@ -129,10 +130,7 @@ def catch_and_return_token(func, self, *args, **kwargs): if e.token is not None and self.__DEFER_STEP_ERRORS__: self._last_err = e # skip the "return func" line above in the traceback - if six.PY2: - self._last_tb = sys.exc_info()[2].tb_next.tb_next - else: - self._last_err.__traceback__ = e.__traceback__.tb_next + self._last_err.__traceback__ = e.__traceback__.tb_next return e.token else: @@ -150,18 +148,8 @@ def check_last_err(func, self, *args, **kwargs): if self._last_err is not None: try: - if six.PY2: - six.reraise(type(self._last_err), self._last_err, - self._last_tb) - else: - # NB(directxman12): not using six.reraise in Python 3 leads - # to cleaner tracebacks, and raise x is valid - # syntax in Python 3 (unlike raise x, y, z) - raise self._last_err + raise self._last_err finally: - if six.PY2: - del self._last_tb # in case of cycles, break glass - self._last_err = None else: return func(self, *args, **kwargs) diff --git a/gssapi/_win_config.py b/gssapi/_win_config.py new file mode 100644 index 0000000..ccbdb25 --- /dev/null +++ b/gssapi/_win_config.py @@ -0,0 +1,74 @@ +""" +Using GSSAPI on Windows requires having an installation of Kerberos for Windows +(KfW) available in the user's PATH. This module should be imported before +anything else to check for that installation, add it to the PATH if necessary, +and throw any errors before they manifest as cryptic missing DLL errors later +down the import tree. +""" + +import os +import shutil +import ctypes + +#: Path to normal KfW installed bin folder +KFW_BIN = os.path.join( + os.environ.get('ProgramFiles', r'C:\Program Files'), + 'MIT', 'Kerberos', 'bin', +) +#: Download location for KfW +KFW_DL = "https://web.mit.edu/KERBEROS/dist" + + +def kfw_available(): + """Return if the main GSSAPI DLL for KfW can be loaded""" + try: # to load the main GSSAPI DLL + ctypes.WinDLL('gssapi64.dll') + except OSError: # DLL is not in PATH + return False + else: # DLL is in PATH, everything should work + return True + + +def error_not_found(): + """Raise an OSError detailing that KfW is missing and how to get it""" + raise OSError( + "Could not find KfW installation. Please download and install " + "the 64bit Kerberos for Windows MSI from %s and ensure the " + "'bin' folder (%s) is in your PATH." + % (KFW_DL, KFW_BIN) + ) + + +def configure_windows(): + """ + Validate that KfW appears to be installed correctly and add it to the + DLL directories/PATH if necessary. In the case that it can't be located, + raise an error. + """ + if kfw_available(): + return # All set, necessary DLLs should be available + + if os.path.exists(KFW_BIN): # In standard location + try: # to use Python 3.8's DLL handling + os.add_dll_directory(KFW_BIN) + except AttributeError: # <3.8, use PATH + os.environ['PATH'] += os.pathsep + KFW_BIN + if kfw_available(): + return + + # Check if kinit is in the PATH which should lead us to the bin folder + kinit_path = shutil.which('kinit') # KfW provided binary + if kinit_path: # Non-standard install location + try: # Most likely >=3.8, otherwise it would have been found already + os.add_dll_directory(os.path.dirname(kinit_path)) + except AttributeError: # <3.8, corrupted installation? + pass + else: + if kfw_available(): + return + + error_not_found() + + +if os.name == 'nt': # Make sure we have the required DLLs + configure_windows() diff --git a/gssapi/mechs.py b/gssapi/mechs.py index c00f9bf..5e6b682 100644 --- a/gssapi/mechs.py +++ b/gssapi/mechs.py @@ -1,5 +1,3 @@ -import six - from gssapi.raw import oids as roids from gssapi._utils import import_gssapi_extension from gssapi.raw import misc as rmisc @@ -45,11 +43,7 @@ def _attrs(self): return rfc5587.inquire_attrs_for_mech(self) def __str__(self): - if issubclass(str, six.text_type): - # Python 3 -- we should return unicode - return self._bytes_desc().decode(_utils._get_encoding()) - else: - return self._bytes_desc() + return self._bytes_desc().decode(_utils._get_encoding()) def __unicode__(self): return self._bytes_desc().decode(_utils._get_encoding()) @@ -59,7 +53,7 @@ def _bytes_desc(self): if rfc5801 is not None and self._saslname and self._saslname.mech_name: base = self._saslname.mech_name - if isinstance(base, six.text_type): + if isinstance(base, str): base = base.encode(_utils._get_encoding()) return base @@ -156,7 +150,7 @@ def from_sasl_name(cls, name=None): if rfc5801 is None: raise NotImplementedError("Your GSSAPI implementation does not " "have support for RFC 5801") - if isinstance(name, six.text_type): + if isinstance(name, str): name = name.encode(_utils._get_encoding()) m = rfc5801.inquire_mech_for_saslname(name) diff --git a/gssapi/names.py b/gssapi/names.py index ee7a1db..acd4b8b 100644 --- a/gssapi/names.py +++ b/gssapi/names.py @@ -1,14 +1,9 @@ -import six - from gssapi.raw import names as rname from gssapi.raw import NameType from gssapi.raw import named_tuples as tuples from gssapi import _utils -if six.PY2: - from collections import MutableMapping, Iterable -else: - from collections.abc import MutableMapping, Iterable +from collections.abc import MutableMapping, Iterable rname_rfc6680 = _utils.import_gssapi_extension('rfc6680') @@ -69,7 +64,7 @@ def __new__(cls, base=None, name_type=None, token=None, elif isinstance(base, rname.Name): base_name = base else: - if isinstance(base, six.text_type): + if isinstance(base, str): base = base.encode(_utils._get_encoding()) base_name = rname.import_name(base, name_type) @@ -107,12 +102,7 @@ def __init__(self, base=None, name_type=None, token=None, composite=False): self._attr_obj = None def __str__(self): - if issubclass(str, six.text_type): - # Python 3 -- we should return unicode - return bytes(self).decode(_utils._get_encoding()) - else: - # Python 2 -- we should return a string - return self.__bytes__() + return bytes(self).decode(_utils._get_encoding()) def __unicode__(self): # Python 2 -- someone asked for unicode @@ -324,7 +314,7 @@ def __init__(self, name): self._name = name def __getitem__(self, key): - if isinstance(key, six.text_type): + if isinstance(key, str): key = key.encode(_utils._get_encoding()) res = rname_rfc6680.get_name_attribute(self._name, key) @@ -334,7 +324,7 @@ def __getitem__(self, key): res.complete) def __setitem__(self, key, value): - if isinstance(key, six.text_type): + if isinstance(key, str): key = key.encode(_utils._get_encoding()) rname_rfc6680.delete_name_attribute(self._name, key) @@ -348,7 +338,7 @@ def __setitem__(self, key, value): else: complete = False - if (isinstance(value, (six.string_types, bytes)) or + if (isinstance(value, (str, bytes)) or not isinstance(value, Iterable)): # NB(directxman12): this allows us to easily assign a single # value, since that's a common case @@ -358,7 +348,7 @@ def __setitem__(self, key, value): complete=complete) def __delitem__(self, key): - if isinstance(key, six.text_type): + if isinstance(key, str): key = key.encode(_utils._get_encoding()) rname_rfc6680.delete_name_attribute(self._name, key) diff --git a/gssapi/raw/creds.pyx b/gssapi/raw/creds.pyx index a424075..d123857 100644 --- a/gssapi/raw/creds.pyx +++ b/gssapi/raw/creds.pyx @@ -131,8 +131,11 @@ def acquire_cred(Name name=None, lifetime=None, mechs=None, usage='both'): c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_cred_id_t creds cdef gss_OID_set actual_mechs @@ -227,8 +230,11 @@ accept_lifetime=None, mutate_input=False) c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: # usage == 'both' + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_cred_id_t raw_input_cred if input_cred is not None: diff --git a/gssapi/raw/cython_converters.pxd b/gssapi/raw/cython_converters.pxd index 499f21a..750b62e 100644 --- a/gssapi/raw/cython_converters.pxd +++ b/gssapi/raw/cython_converters.pxd @@ -9,7 +9,7 @@ from gssapi.raw.types import MechType, NameType cdef gss_OID_set c_get_mech_oid_set(object mechs) -cdef inline bint c_compare_oids(gss_OID a, gss_OID b) +cdef bint c_compare_oids(gss_OID a, gss_OID b) cdef object c_create_oid_set(gss_OID_set mech_set, bint free=*) cdef OID c_make_oid(gss_OID oid) diff --git a/gssapi/raw/cython_types.pxd b/gssapi/raw/cython_types.pxd index 987c922..1af03d7 100644 --- a/gssapi/raw/cython_types.pxd +++ b/gssapi/raw/cython_types.pxd @@ -105,6 +105,7 @@ cdef extern from "python_gssapi.h": OM_uint32 GSS_C_ANON_FLAG OM_uint32 GSS_C_TRANS_FLAG OM_uint32 GSS_C_PROT_READY_FLAG + OM_uint32 GSS_C_DELEG_POLICY_FLAG # address types OM_uint32 GSS_C_AF_UNSPEC diff --git a/gssapi/raw/ext_cred_store.pyx b/gssapi/raw/ext_cred_store.pyx index 9ee57c3..ccfe001 100644 --- a/gssapi/raw/ext_cred_store.pyx +++ b/gssapi/raw/ext_cred_store.pyx @@ -147,8 +147,11 @@ usage='both') c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_key_value_set_desc *c_store if store is not None: @@ -232,8 +235,11 @@ init_lifetime=None, accept_lifetime=None) c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_name_t c_name = name.raw_name cdef gss_OID c_mech = &mech.raw_oid @@ -325,8 +331,11 @@ set_default=False) c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_key_value_set_desc *c_store if store is not None: diff --git a/gssapi/raw/ext_dce.pyx b/gssapi/raw/ext_dce.pyx index afc5654..52d87e5 100644 --- a/gssapi/raw/ext_dce.pyx +++ b/gssapi/raw/ext_dce.pyx @@ -10,16 +10,11 @@ from gssapi.raw.misc import GSSError from gssapi.raw import types as gssapi_types from gssapi.raw.named_tuples import IOVUnwrapResult, WrapResult, UnwrapResult from collections import namedtuple +from collections.abc import Sequence from enum import IntEnum -import six from gssapi.raw._enum_extensions import ExtendableEnum -if six.PY2: - from collections import Sequence -else: - from collections.abc import Sequence - cdef extern from "python_gssapi_ext.h": # NB(directxman12): this wiki page has a different argument order diff --git a/gssapi/raw/ext_password.pyx b/gssapi/raw/ext_password.pyx index be960f9..93f9c75 100644 --- a/gssapi/raw/ext_password.pyx +++ b/gssapi/raw/ext_password.pyx @@ -74,8 +74,11 @@ usage="initiate") c_usage = GSS_C_INITIATE elif usage == "accept": c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_cred_id_t creds cdef gss_OID_set actual_mechs diff --git a/gssapi/raw/ext_password_add.pyx b/gssapi/raw/ext_password_add.pyx index aed3cd0..c020063 100644 --- a/gssapi/raw/ext_password_add.pyx +++ b/gssapi/raw/ext_password_add.pyx @@ -78,8 +78,11 @@ usage='initiate', init_lifetime=None, accept_lifetime=None) c_usage = GSS_C_INITIATE elif usage == "accept": c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef OM_uint32 input_initiator_ttl = c_py_ttl_to_c(init_lifetime) cdef OM_uint32 input_acceptor_ttl = c_py_ttl_to_c(accept_lifetime) diff --git a/gssapi/raw/ext_rfc5588.pyx b/gssapi/raw/ext_rfc5588.pyx index d87e618..6244573 100644 --- a/gssapi/raw/ext_rfc5588.pyx +++ b/gssapi/raw/ext_rfc5588.pyx @@ -63,8 +63,11 @@ set_default=False) c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_cred_id_t c_creds = creds.raw_creds diff --git a/gssapi/raw/ext_s4u.pyx b/gssapi/raw/ext_s4u.pyx index a1269b3..d91986e 100644 --- a/gssapi/raw/ext_s4u.pyx +++ b/gssapi/raw/ext_s4u.pyx @@ -84,8 +84,11 @@ mechs=None, usage='initiate') c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_cred_id_t creds cdef gss_OID_set actual_mechs @@ -162,8 +165,11 @@ usage='initiate', init_lifetime=None, accept_lifetime=None) c_usage = GSS_C_INITIATE elif usage == 'accept': c_usage = GSS_C_ACCEPT - else: + elif usage == 'both': c_usage = GSS_C_BOTH + else: + raise ValueError(f'Invalid usage "{usage}" - permitted values are ' + '"initiate", "accept", and "both"') cdef gss_cred_id_t raw_input_cred if input_cred is not None: diff --git a/gssapi/raw/misc.pyx b/gssapi/raw/misc.pyx index ca1be8c..622bfb7 100644 --- a/gssapi/raw/misc.pyx +++ b/gssapi/raw/misc.pyx @@ -291,7 +291,10 @@ class GSSError(Exception, metaclass=GSSErrorRegistry): given code """ - msg_encoding = locale.getlocale(locale.LC_MESSAGES)[1] or 'UTF-8' + try: + msg_encoding = locale.getlocale(locale.LC_MESSAGES)[1] or 'UTF-8' + except AttributeError: # Windows doesn't have LC_MESSAGES + msg_encoding = 'UTF-8' res = [] try: diff --git a/gssapi/raw/oids.pyx b/gssapi/raw/oids.pyx index 2f133da..b0bd554 100644 --- a/gssapi/raw/oids.pyx +++ b/gssapi/raw/oids.pyx @@ -1,7 +1,5 @@ GSSAPI="BASE" # This ensures that a full module is generated by Cython -import six - from libc.string cimport memcmp, memcpy from libc.stdlib cimport free, malloc @@ -97,7 +95,7 @@ cdef class OID: ValueError: the sequence is less than two elements long """ - if isinstance(integer_sequence, six.string_types): + if isinstance(integer_sequence, str): integer_sequence = integer_sequence.split('.') oid_seq = [int(x) for x in integer_sequence] @@ -134,10 +132,6 @@ cdef class OID: def _decode_asn1ber(self): ber_encoding = self.__bytes__() - # NB(directxman12): indexing a byte string yields an int in Python 3, - # but yields a substring in Python 2 - if six.PY2: - ber_encoding = [ord(c) for c in ber_encoding] decoded = [ber_encoding[0] // 40, ber_encoding[0] % 40] pos = 1 diff --git a/gssapi/raw/types.pyx b/gssapi/raw/types.pyx index 68d6af1..d99f74a 100644 --- a/gssapi/raw/types.pyx +++ b/gssapi/raw/types.pyx @@ -11,12 +11,8 @@ import collections import copy import numbers import operator -import six -if six.PY2: - from collections import MutableSet -else: - from collections.abc import MutableSet +from collections.abc import MutableSet class NameType(object): @@ -61,6 +57,11 @@ class RequirementFlag(IntEnum, metaclass=ExtendableEnum): protection_ready = GSS_C_PROT_READY_FLAG transferable = GSS_C_TRANS_FLAG + # GSS_C_DELEG_POLICY_FLAG. cython can't do compile-time detection of + # this, so take the value from RFC 5896. Implementations that don't + # support it will ignore it. + ok_as_delegate = 32768 + class AddressType(IntEnum, metaclass=ExtendableEnum): """ diff --git a/gssapi/sec_contexts.py b/gssapi/sec_contexts.py index 9f8ec31..a1893d9 100644 --- a/gssapi/sec_contexts.py +++ b/gssapi/sec_contexts.py @@ -1,5 +1,3 @@ -import six - from gssapi.raw import sec_contexts as rsec_contexts from gssapi.raw import message as rmessage from gssapi.raw import named_tuples as tuples @@ -11,8 +9,8 @@ from gssapi.creds import Credentials -@six.add_metaclass(_utils.CheckLastError) -class SecurityContext(rsec_contexts.SecurityContext): +class SecurityContext(rsec_contexts.SecurityContext, + metaclass=_utils.CheckLastError): """A GSSAPI Security Context This class represents a GSSAPI security context that may be used diff --git a/gssapi/tests/test_high_level.py b/gssapi/tests/test_high_level.py index a3137c3..09cf05b 100644 --- a/gssapi/tests/test_high_level.py +++ b/gssapi/tests/test_high_level.py @@ -4,8 +4,6 @@ import sys import pickle -import should_be.all # noqa -import six from parameterized import parameterized from gssapi import creds as gsscreds @@ -126,9 +124,7 @@ def setUp(self): usage='both') def test_acquire_by_init(self, str_name, kwargs): creds = gsscreds.Credentials(name=self.name, **kwargs) - - creds.lifetime.should_be_an_integer() - + self.assertIsInstance(creds.lifetime, int) del creds @exist_perms(lifetime=30, mechs=[gb.MechType.kerberos], @@ -136,18 +132,12 @@ def test_acquire_by_init(self, str_name, kwargs): def test_acquire_by_method(self, str_name, kwargs): cred_resp = gsscreds.Credentials.acquire(name=self.name, **kwargs) + self.assertIsNotNone(cred_resp) - cred_resp.shouldnt_be_none() - - (creds, actual_mechs, ttl) = cred_resp - - creds.shouldnt_be_none() - creds.should_be_a(gsscreds.Credentials) - - actual_mechs.shouldnt_be_empty() - actual_mechs.should_include(gb.MechType.kerberos) - - ttl.should_be_an_integer() + creds, actual_mechs, ttl = cred_resp + self.assertIsInstance(creds, gsscreds.Credentials) + self.assertIn(gb.MechType.kerberos, actual_mechs) + self.assertIsInstance(ttl, int) del creds @@ -172,16 +162,16 @@ def test_store_acquire(self): server_ctx.step(client_token) deleg_creds = server_ctx.delegated_creds - deleg_creds.shouldnt_be_none() + self.assertIsNotNone(deleg_creds) store_res = deleg_creds.store(usage='initiate', set_default=True, overwrite=True) - store_res.usage.should_be('initiate') - store_res.mechs.should_include(gb.MechType.kerberos) + self.assertEqual(store_res.usage, "initiate") + self.assertIn(gb.MechType.kerberos, store_res.mechs) reacquired_creds = gsscreds.Credentials(name=deleg_creds.name, usage='initiate') - reacquired_creds.shouldnt_be_none() + self.assertIsNotNone(reacquired_creds) @ktu.gssapi_extension_test('cred_store', 'credentials store') def test_store_into_acquire_from(self): @@ -198,21 +188,19 @@ def test_store_into_acquire_from(self): usage='initiate') store_res = initial_creds.store(store, overwrite=True) - - store_res.mechs.shouldnt_be_none() - store_res.mechs.shouldnt_be_empty() - store_res.usage.should_be('initiate') + self.assertIsNotNone(store_res.mechs) + self.assertGreater(len(store_res.mechs), 0) + self.assertEqual(store_res.usage, "initiate") name = gssnames.Name(princ_name) retrieved_creds = gsscreds.Credentials(name=name, store=store) - - retrieved_creds.shouldnt_be_none() + self.assertIsNotNone(retrieved_creds) def test_create_from_other(self): raw_creds = gb.acquire_cred(None, usage='accept').creds high_level_creds = gsscreds.Credentials(raw_creds) - high_level_creds.usage.should_be('accept') + self.assertEqual(high_level_creds.usage, "accept") @true_false_perms('name', 'lifetime', 'usage', 'mechs') def test_inquire(self, str_name, kwargs): @@ -220,25 +208,24 @@ def test_inquire(self, str_name, kwargs): resp = creds.inquire(**kwargs) if kwargs['name']: - resp.name.should_be(self.name) + self.assertEqual(resp.name, self.name) else: - resp.name.should_be_none() + self.assertIsNone(resp.name) if kwargs['lifetime']: - resp.lifetime.should_be_an_integer() + self.assertIsInstance(resp.lifetime, int) else: - resp.lifetime.should_be_none() + self.assertIsNone(resp.lifetime) if kwargs['usage']: - resp.usage.should_be('both') + self.assertEqual(resp.usage, "both") else: - resp.usage.should_be_none() + self.assertIsNone(resp.usage) if kwargs['mechs']: - resp.mechs.shouldnt_be_empty() - resp.mechs.should_include(gb.MechType.kerberos) + self.assertIn(gb.MechType.kerberos, resp.mechs) else: - resp.mechs.should_be_none() + self.assertIsNone(resp.mechs) @true_false_perms('name', 'init_lifetime', 'accept_lifetime', 'usage') def test_inquire_by_mech(self, str_name, kwargs): @@ -246,33 +233,31 @@ def test_inquire_by_mech(self, str_name, kwargs): resp = creds.inquire_by_mech(mech=gb.MechType.kerberos, **kwargs) if kwargs['name']: - resp.name.should_be(self.name) + self.assertEqual(resp.name, self.name) else: - resp.name.should_be_none() + self.assertIsNone(resp.name) if kwargs['init_lifetime']: - resp.init_lifetime.should_be_an_integer() + self.assertIsInstance(resp.init_lifetime, int) else: - resp.init_lifetime.should_be_none() + self.assertIsNone(resp.init_lifetime) if kwargs['accept_lifetime']: - resp.accept_lifetime.should_be_an_integer() + self.assertIsInstance(resp.accept_lifetime, int) else: - resp.accept_lifetime.should_be_none() + self.assertIsNone(resp.accept_lifetime) if kwargs['usage']: - resp.usage.should_be('both') + self.assertEqual(resp.usage, "both") else: - resp.usage.should_be_none() + self.assertIsNone(resp.usage) def test_add(self): input_creds = gsscreds.Credentials(gb.Creds()) name = gssnames.Name(SERVICE_PRINCIPAL) new_creds = input_creds.add(name, gb.MechType.kerberos, usage='initiate') - - new_creds.shouldnt_be_none() - new_creds.should_be_a(gsscreds.Credentials) + self.assertIsInstance(new_creds, gsscreds.Credentials) @ktu.gssapi_extension_test('cred_store', 'credentials store') def test_store_into_add_from(self): @@ -289,24 +274,21 @@ def test_store_into_add_from(self): usage='initiate') store_res = initial_creds.store(store, overwrite=True) - - store_res.mechs.shouldnt_be_none() - store_res.mechs.shouldnt_be_empty() - store_res.usage.should_be('initiate') + self.assertIsNotNone(store_res.mechs) + self.assertGreater(len(store_res.mechs), 0) + self.assertEqual(store_res.usage, "initiate") name = gssnames.Name(princ_name) input_creds = gsscreds.Credentials(gb.Creds()) retrieved_creds = input_creds.add(name, gb.MechType.kerberos, store=store) - - retrieved_creds.shouldnt_be_none() - retrieved_creds.should_be_a(gsscreds.Credentials) + self.assertIsInstance(retrieved_creds, gsscreds.Credentials) @ktu.gssapi_extension_test('cred_imp_exp', 'credentials import-export') def test_export(self): creds = gsscreds.Credentials(name=self.name) token = creds.export() - token.should_be_a(bytes) + self.assertIsInstance(token, bytes) @ktu.gssapi_extension_test('cred_imp_exp', 'credentials import-export') def test_import_by_init(self): @@ -314,8 +296,8 @@ def test_import_by_init(self): token = creds.export() imported_creds = gsscreds.Credentials(token=token) - imported_creds.lifetime.should_be(creds.lifetime) - imported_creds.name.should_be(creds.name) + self.assertEqual(imported_creds.lifetime, creds.lifetime) + self.assertEqual(imported_creds.name, creds.name) @ktu.gssapi_extension_test('cred_imp_exp', 'credentials import-export') def test_pickle_unpickle(self): @@ -323,51 +305,57 @@ def test_pickle_unpickle(self): pickled_creds = pickle.dumps(creds) unpickled_creds = pickle.loads(pickled_creds) - unpickled_creds.lifetime.should_be(creds.lifetime) - unpickled_creds.name.should_be(creds.name) + self.assertEqual(unpickled_creds.lifetime, creds.lifetime) + self.assertEqual(unpickled_creds.name, creds.name) @exist_perms(lifetime=30, mechs=[gb.MechType.kerberos], usage='initiate') @ktu.gssapi_extension_test('s4u', 'S4U') def test_impersonate(self, str_name, kwargs): - target_name = gssnames.Name(TARGET_SERVICE_NAME, - gb.NameType.hostbased_service) - # TODO(directxman12): make this use the high-level SecurityContext - client_ctx_resp = gb.init_sec_context(target_name) - client_token = client_ctx_resp[3] - del client_ctx_resp # free everything but the token + server_name = gssnames.Name(SERVICE_PRINCIPAL, + gb.NameType.kerberos_principal) - server_name = self.name - server_creds = gsscreds.Credentials(name=server_name, - usage='both') - server_ctx_resp = gb.accept_sec_context(client_token, - acceptor_creds=server_creds) + password = self.realm.password("user") + self.realm.kinit(self.realm.user_princ, password=password, + flags=["-f"]) + client_ctx = gssctx.SecurityContext( + name=server_name, flags=gb.RequirementFlag.delegate_to_peer) + client_token = client_ctx.step() - imp_creds = server_creds.impersonate(server_ctx_resp[1], **kwargs) + self.realm.kinit(SERVICE_PRINCIPAL.decode("utf-8"), flags=["-k"]) + server_creds = gsscreds.Credentials(usage="both") + server_ctx = gssctx.SecurityContext(creds=server_creds) + server_ctx.step(client_token) + self.assertTrue(server_ctx.complete) - imp_creds.shouldnt_be_none() - imp_creds.should_be_a(gsscreds.Credentials) + imp_creds = server_ctx.delegated_creds.impersonate(server_name, + **kwargs) + self.assertIsInstance(imp_creds, gsscreds.Credentials) @ktu.gssapi_extension_test('s4u', 'S4U') def test_add_with_impersonate(self): - target_name = gssnames.Name(TARGET_SERVICE_NAME, - gb.NameType.hostbased_service) - client_ctx = gssctx.SecurityContext(name=target_name) + server_name = gssnames.Name(SERVICE_PRINCIPAL, + gb.NameType.kerberos_principal) + + password = self.realm.password("user") + self.realm.kinit(self.realm.user_princ, password=password, + flags=["-f"]) + client_ctx = gssctx.SecurityContext( + name=server_name, flags=gb.RequirementFlag.delegate_to_peer) client_token = client_ctx.step() - server_creds = gsscreds.Credentials(usage='both') - server_ctx = gssctx.SecurityContext(creds=server_creds, usage='accept') + self.realm.kinit(SERVICE_PRINCIPAL.decode("utf-8"), flags=["-k"]) + server_creds = gsscreds.Credentials(usage="both") + server_ctx = gssctx.SecurityContext(creds=server_creds) server_ctx.step(client_token) + self.assertTrue(server_ctx.complete) # use empty creds to test here input_creds = gsscreds.Credentials(gb.Creds()) - new_creds = input_creds.add(server_ctx.initiator_name, - gb.MechType.kerberos, - impersonator=server_creds, - usage='initiate') - - new_creds.shouldnt_be(None) - new_creds.should_be_a(gsscreds.Credentials) + new_creds = input_creds.add( + server_name, gb.MechType.kerberos, + impersonator=server_ctx.delegated_creds, usage='initiate') + self.assertIsInstance(new_creds, gsscreds.Credentials) class MechsTestCase(_GSSAPIKerberosTestCase): @@ -375,26 +363,26 @@ def test_indicate_mechs(self): mechs = gssmechs.Mechanism.all_mechs() for mech in mechs: s = str(mech) - s.shouldnt_be_empty() + self.assertGreater(len(s), 0) @ktu.gssapi_extension_test('rfc5801', 'RFC 5801: SASL Names') def test_sasl_properties(self): mechs = gssmechs.Mechanism.all_mechs() for mech in mechs: s = str(mech) - s.shouldnt_be_empty() - s.should_be_a(str) + self.assertGreater(len(s), 0) + self.assertIsInstance(s, str) # Note that some mechanisms don't have SASL names or SASL # descriptions; in this case, GSSAPI returns empty strings. if mech.sasl_name: - mech.sasl_name.should_be_a(six.text_type) + self.assertIsInstance(mech.sasl_name, str) if mech.description: - mech.description.should_be_a(six.text_type) + self.assertIsInstance(mech.description, str) cmp_mech = gssmechs.Mechanism.from_sasl_name(mech.sasl_name) - str(cmp_mech).should_be(str(mech)) + self.assertEqual(str(cmp_mech), str(mech)) @ktu.gssapi_extension_test('rfc5587', 'RFC 5587: Mech Inquiry') def test_mech_inquiry(self): @@ -414,9 +402,9 @@ def test_mech_inquiry(self): from_desired = list(from_desired) from_except = list(from_except) - (len(from_desired) + len(from_except)).should_be(c) - from_desired.should_include(mech) - from_except.shouldnt_include(mech) + self.assertEqual(len(from_desired) + len(from_except), c) + self.assertIn(mech, from_desired) + self.assertNotIn(mech, from_except) for attr in known_attrs: from_desired = g_M_from_attrs(desired_attrs=[attr]) @@ -425,7 +413,7 @@ def test_mech_inquiry(self): from_desired = list(from_desired) from_except = list(from_except) - (len(from_desired) + len(from_except)).should_be(c) + self.assertEqual(len(from_desired) + len(from_except), c) class NamesTestCase(_GSSAPIKerberosTestCase): @@ -433,18 +421,16 @@ def test_create_from_other(self): raw_name = gb.import_name(SERVICE_PRINCIPAL) high_level_name = gssnames.Name(raw_name) - bytes(high_level_name).should_be(SERVICE_PRINCIPAL) + self.assertEqual(bytes(high_level_name), SERVICE_PRINCIPAL) def test_create_from_name_no_type(self): name = gssnames.Name(SERVICE_PRINCIPAL) - - name.shouldnt_be_none() + self.assertIsNotNone(name) def test_create_from_name_and_type(self): name = gssnames.Name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) - - name.shouldnt_be_none() - name.name_type.should_be(gb.NameType.kerberos_principal) + self.assertIsNotNone(name) + self.assertEqual(name.name_type, gb.NameType.kerberos_principal) def test_create_from_token(self): name1 = gssnames.Name(TARGET_SERVICE_NAME, @@ -452,8 +438,7 @@ def test_create_from_token(self): exported_name = name1.canonicalize(gb.MechType.kerberos).export() name2 = gssnames.Name(token=exported_name) - name2.shouldnt_be_none() - name2.name_type.should_be(gb.NameType.kerberos_principal) + self.assertEqual(name2.name_type, gb.NameType.kerberos_principal) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') def test_display_as(self): @@ -467,9 +452,9 @@ def test_display_as(self): gb.NameType.hostbased_service) princ_str = SERVICE_PRINCIPAL.decode('utf-8') + '@' - six.text_type(canonical_name).should_be(princ_str) - krb_name.should_be_a(six.text_type) - krb_name.should_be(princ_str) + self.assertEqual(str(canonical_name), princ_str) + self.assertIsInstance(krb_name, str) + self.assertEqual(krb_name, princ_str) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') def test_create_from_composite_token_no_attrs(self): @@ -479,7 +464,7 @@ def test_create_from_composite_token_no_attrs(self): gb.MechType.kerberos).export(composite=True) name2 = gssnames.Name(token=exported_name, composite=True) - name2.shouldnt_be_none() + self.assertIsNotNone(name2) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') @ktu.krb_plugin_test('authdata', 'greet_client') @@ -502,43 +487,35 @@ def test_create_from_composite_token_with_attrs(self): # name2 = name2_raw.canonicalize(gb.MechType.kerberos) name2 = gssnames.Name(token=exported_name) + self.assertIsNotNone(name2) - name2.shouldnt_be_none() - - name2.attributes['urn:greet:greeting'].values.should_be( - set([b'some val'])) - name2.attributes['urn:greet:greeting'].complete.should_be_true() - name2.attributes['urn:greet:greeting'].authenticated.should_be_false() + ugg = name2.attributes["urn:greet:greeting"] + self.assertEqual(ugg.values, set([b"some val"])) + self.assertTrue(ugg.complete) + self.assertFalse(ugg.authenticated) def test_to_str(self): name = gssnames.Name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) name_str = str(name) - name_str.should_be_a(str) if sys.version_info[0] == 2: target_val = SERVICE_PRINCIPAL else: target_val = SERVICE_PRINCIPAL.decode(gssutils._get_encoding()) - name_str.should_be(target_val) + self.assertEqual(name_str, target_val) def test_to_unicode(self): name = gssnames.Name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) - - name_str = six.text_type(name) - - name_str.should_be_a(six.text_type) - name_str.should_be(SERVICE_PRINCIPAL.decode(gssutils._get_encoding())) + self.assertEqual(str(name), + SERVICE_PRINCIPAL.decode(gssutils._get_encoding())) def test_to_bytes(self): name = gssnames.Name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) # NB(directxman12): bytes only calles __bytes__ on Python 3+ - name_bytes = name.__bytes__() - - name_bytes.should_be_a(bytes) - name_bytes.should_be(SERVICE_PRINCIPAL) + self.assertEqual(name.__bytes__(), SERVICE_PRINCIPAL) def test_compare(self): name1 = gssnames.Name(SERVICE_PRINCIPAL) @@ -546,29 +523,29 @@ def test_compare(self): name3 = gssnames.Name(TARGET_SERVICE_NAME, gb.NameType.hostbased_service) - name1.should_be(name2) - name1.shouldnt_be(name3) + self.assertEqual(name1, name2) + self.assertNotEqual(name1, name3) def test_canoncialize_and_export(self): name = gssnames.Name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) canonical_name = name.canonicalize(gb.MechType.kerberos) exported_name = canonical_name.export() - exported_name.should_be_a(bytes) + self.assertIsInstance(exported_name, bytes) def test_canonicalize(self): name = gssnames.Name(TARGET_SERVICE_NAME, gb.NameType.hostbased_service) canonicalized_name = name.canonicalize(gb.MechType.kerberos) - canonicalized_name.should_be_a(gssnames.Name) - bytes(canonicalized_name).should_be(SERVICE_PRINCIPAL + b'@') + self.assertIsInstance(canonicalized_name, gssnames.Name) + self.assertEqual(bytes(canonicalized_name), SERVICE_PRINCIPAL + b"@") def test_copy(self): name1 = gssnames.Name(SERVICE_PRINCIPAL) name2 = copy.copy(name1) - name1.should_be(name2) + self.assertEqual(name1, name2) # NB(directxman12): we don't test display_name_ext because the krb5 mech # doesn't actually implement it @@ -577,14 +554,12 @@ def test_copy(self): def test_is_mech_name(self): name = gssnames.Name(TARGET_SERVICE_NAME, gb.NameType.hostbased_service) - - name.is_mech_name.should_be_false() + self.assertFalse(name.is_mech_name) canon_name = name.canonicalize(gb.MechType.kerberos) - - canon_name.is_mech_name.should_be_true() - canon_name.mech.should_be_a(gb.OID) - canon_name.mech.should_be(gb.MechType.kerberos) + self.assertTrue(canon_name.is_mech_name) + self.assertIsInstance(canon_name.mech, gb.OID) + self.assertEqual(canon_name.mech, gb.MechType.kerberos) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') def test_export_name_composite_no_attrs(self): @@ -593,7 +568,7 @@ def test_export_name_composite_no_attrs(self): canon_name = name.canonicalize(gb.MechType.kerberos) exported_name = canon_name.export(composite=True) - exported_name.should_be_a(bytes) + self.assertIsInstance(exported_name, bytes) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') @ktu.krb_plugin_test('authdata', 'greet_client') @@ -604,7 +579,7 @@ def test_export_name_composite_with_attrs(self): canon_name.attributes['urn:greet:greeting'] = b'some val' exported_name = canon_name.export(composite=True) - exported_name.should_be_a(bytes) + self.assertIsInstance(exported_name, bytes) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') @ktu.krb_plugin_test('authdata', 'greet_client') @@ -614,20 +589,17 @@ def test_basic_get_set_del_name_attribute_no_auth(self): canon_name = name.canonicalize(gb.MechType.kerberos) canon_name.attributes['urn:greet:greeting'] = (b'some val', True) - canon_name.attributes['urn:greet:greeting'].values.should_be( - set([b'some val'])) - canon_name.attributes['urn:greet:greeting'].complete.should_be_true() - (canon_name.attributes['urn:greet:greeting'].authenticated - .should_be_false()) + ugg = canon_name.attributes["urn:greet:greeting"] + self.assertEqual(ugg.values, set([b"some val"])) + self.assertTrue(ugg.complete) + self.assertFalse(ugg.authenticated) del canon_name.attributes['urn:greet:greeting'] # NB(directxman12): for some reason, the greet:greeting handler plugin - # doesn't properly delete itself -- it just clears the value - # If we try to get its value now, we segfault (due to an issue with - # greet:greeting's delete). Instead, just try setting the value again - # canon_name.attributes.should_be_empty(), which would normally give - # an error. + # doesn't properly delete itself -- it just clears the value. If we + # try to get its value now, we segfault (due to an issue with + # greet:greeting's delete). Instead, just set the value again. canon_name.attributes['urn:greet:greeting'] = b'some other val' @@ -656,7 +628,7 @@ def _create_client_ctx(self, **kwargs): def test_create_from_other(self): raw_client_ctx, raw_server_ctx = self._create_completed_contexts() high_level_ctx = gssctx.SecurityContext(raw_client_ctx) - high_level_ctx.target_name.should_be(self.target_name) + self.assertEqual(high_level_ctx.target_name, self.target_name) @exist_perms(lifetime=30, flags=[], mech=gb.MechType.kerberos, @@ -665,30 +637,28 @@ def test_create_new_init(self, str_name, kwargs): client_ctx = gssctx.SecurityContext(name=self.target_name, creds=self.client_creds, **kwargs) - client_ctx.usage.should_be('initiate') + self.assertEqual(client_ctx.usage, "initiate") client_ctx = self._create_client_ctx(**kwargs) - client_ctx.usage.should_be('initiate') + self.assertEqual(client_ctx.usage, "initiate") def test_create_new_accept(self): server_ctx = gssctx.SecurityContext(creds=self.server_creds) - server_ctx.usage.should_be('accept') + self.assertEqual(server_ctx.usage, "accept") def test_init_throws_error_on_invalid_args(self): - def create_sec_context(): - gssctx.SecurityContext(usage='accept', name=self.target_name) - - create_sec_context.should_raise(TypeError) + self.assertRaises(TypeError, gssctx.SecurityContext, usage='accept', + name=self.target_name) def _create_completed_contexts(self): client_ctx = self._create_client_ctx(lifetime=400) client_token = client_ctx.step() - client_token.should_be_a(bytes) + self.assertIsInstance(client_token, bytes) server_ctx = gssctx.SecurityContext(creds=self.server_creds) server_token = server_ctx.step(client_token) - server_token.should_be_a(bytes) + self.assertIsInstance(server_token, bytes) client_ctx.step(server_token) @@ -697,33 +667,32 @@ def _create_completed_contexts(self): def test_complete_on_partially_completed(self): client_ctx = self._create_client_ctx() client_tok = client_ctx.step() - client_ctx.complete.should_be_false() + self.assertFalse(client_ctx.complete) server_ctx = gssctx.SecurityContext(creds=self.server_creds) server_tok = server_ctx.step(client_tok) client_ctx.step(server_tok) - - client_ctx.complete.should_be_true() - server_ctx.complete.should_be_true() + self.assertTrue(client_ctx.complete) + self.assertTrue(server_ctx.complete) def test_initiate_accept_steps(self): client_ctx, server_ctx = self._create_completed_contexts() # KDC may allow for clockskew by increasing acceptor context lifetime - server_ctx.lifetime.should_be_at_most(400 + 300) - server_ctx.initiator_name.should_be(client_ctx.initiator_name) - server_ctx.mech.should_be_a(gb.OID) - server_ctx.actual_flags.should_be_a(gb.IntEnumFlagSet) - server_ctx.locally_initiated.should_be_false() - server_ctx.complete.should_be_true() - - client_ctx.lifetime.should_be_at_most(400) - client_ctx.target_name.should_be(self.target_name) - client_ctx.mech.should_be_a(gb.OID) - client_ctx.actual_flags.should_be_a(gb.IntEnumFlagSet) - client_ctx.locally_initiated.should_be_true() - client_ctx.complete.should_be_true() + self.assertLessEqual(server_ctx.lifetime, 400 + 300) + self.assertEqual(server_ctx.initiator_name, client_ctx.initiator_name) + self.assertIsInstance(server_ctx.mech, gb.OID) + self.assertIsInstance(server_ctx.actual_flags, gb.IntEnumFlagSet) + self.assertFalse(server_ctx.locally_initiated) + self.assertTrue(server_ctx.complete) + + self.assertLessEqual(client_ctx.lifetime, 400) + self.assertEqual(client_ctx.target_name, self.target_name) + self.assertIsInstance(client_ctx.mech, gb.OID) + self.assertIsInstance(client_ctx.actual_flags, gb.IntEnumFlagSet) + self.assertTrue(client_ctx.locally_initiated) + self.assertTrue(client_ctx.complete) def test_channel_bindings(self): bdgs = gb.ChannelBindings(application_data=b'abcxyz', @@ -735,12 +704,12 @@ def test_channel_bindings(self): channel_bindings=bdgs) client_token = client_ctx.step() - client_token.should_be_a(bytes) + self.assertIsInstance(client_token, bytes) server_ctx = gssctx.SecurityContext(creds=self.server_creds, channel_bindings=bdgs) server_token = server_ctx.step(client_token) - server_token.should_be_a(bytes) + self.assertIsInstance(server_token, bytes) client_ctx.step(server_token) @@ -754,68 +723,66 @@ def test_bad_channel_bindings_raises_error(self): channel_bindings=bdgs) client_token = client_ctx.step() - client_token.should_be_a(bytes) + self.assertIsInstance(client_token, bytes) bdgs.acceptor_address = b'127.0.1.0' server_ctx = gssctx.SecurityContext(creds=self.server_creds, channel_bindings=bdgs) - server_ctx.step.should_raise(gb.BadChannelBindingsError, client_token) + self.assertRaises(gb.BadChannelBindingsError, server_ctx.step, + client_token) def test_export_create_from_token(self): client_ctx, server_ctx = self._create_completed_contexts() token = client_ctx.export() - - token.should_be_a(bytes) + self.assertIsInstance(token, bytes) imported_ctx = gssctx.SecurityContext(token=token) - - imported_ctx.usage.should_be('initiate') - imported_ctx.target_name.should_be(self.target_name) + self.assertEqual(imported_ctx.usage, "initiate") + self.assertEqual(imported_ctx.target_name, self.target_name) def test_pickle_unpickle(self): client_ctx, server_ctx = self._create_completed_contexts() pickled_ctx = pickle.dumps(client_ctx) unpickled_ctx = pickle.loads(pickled_ctx) - - unpickled_ctx.should_be_a(gssctx.SecurityContext) - unpickled_ctx.usage.should_be('initiate') - unpickled_ctx.target_name.should_be(self.target_name) + self.assertIsInstance(unpickled_ctx, gssctx.SecurityContext) + self.assertEqual(unpickled_ctx.usage, "initiate") + self.assertEqual(unpickled_ctx.target_name, self.target_name) def test_encrypt_decrypt(self): client_ctx, server_ctx = self._create_completed_contexts() encrypted_msg = client_ctx.encrypt(b'test message') - encrypted_msg.should_be_a(bytes) + self.assertIsInstance(encrypted_msg, bytes) decrypted_msg = server_ctx.decrypt(encrypted_msg) - decrypted_msg.should_be_a(bytes) - decrypted_msg.should_be(b'test message') + self.assertIsInstance(decrypted_msg, bytes) + self.assertEqual(decrypted_msg, b"test message") def test_encrypt_decrypt_throws_error_on_no_encryption(self): client_ctx, server_ctx = self._create_completed_contexts() wrap_res = client_ctx.wrap(b'test message', False) - wrap_res.should_be_a(gb.WrapResult) - wrap_res.encrypted.should_be_false() - wrap_res.message.should_be_a(bytes) + self.assertIsInstance(wrap_res, gb.WrapResult) + self.assertFalse(wrap_res.encrypted) + self.assertIsInstance(wrap_res.message, bytes) - server_ctx.decrypt.should_raise(excs.EncryptionNotUsed, - wrap_res.message) + self.assertRaises(excs.EncryptionNotUsed, server_ctx.decrypt, + wrap_res.message) def test_wrap_unwrap(self): client_ctx, server_ctx = self._create_completed_contexts() wrap_res = client_ctx.wrap(b'test message', True) - wrap_res.should_be_a(gb.WrapResult) - wrap_res.encrypted.should_be_true() - wrap_res.message.should_be_a(bytes) + self.assertIsInstance(wrap_res, gb.WrapResult) + self.assertTrue(wrap_res.encrypted) + self.assertIsInstance(wrap_res.message, bytes) unwrap_res = server_ctx.unwrap(wrap_res.message) - unwrap_res.should_be_a(gb.UnwrapResult) - unwrap_res.message.should_be_a(bytes) - unwrap_res.message.should_be(b'test message') - unwrap_res.encrypted.should_be_true() + self.assertIsInstance(unwrap_res, gb.UnwrapResult) + self.assertIsInstance(unwrap_res.message, bytes) + self.assertEqual(unwrap_res.message, b"test message") + self.assertTrue(unwrap_res.encrypted) def test_get_wrap_size_limit(self): client_ctx, server_ctx = self._create_completed_contexts() @@ -823,27 +790,25 @@ def test_get_wrap_size_limit(self): with_conf = client_ctx.get_wrap_size_limit(100) without_conf = client_ctx.get_wrap_size_limit(100, encrypted=True) - with_conf.should_be_an_integer() - without_conf.should_be_an_integer() - - with_conf.should_be_at_most(100) - without_conf.should_be_at_most(100) + self.assertIsInstance(with_conf, int) + self.assertIsInstance(without_conf, int) + self.assertLessEqual(with_conf, 100) + self.assertLessEqual(without_conf, 100) def test_get_signature(self): client_ctx, server_ctx = self._create_completed_contexts() mic_token = client_ctx.get_signature(b'some message') - mic_token.should_be_a(bytes) - mic_token.shouldnt_be_empty() + self.assertIsInstance(mic_token, bytes) + self.assertGreater(len(mic_token), 0) def test_verify_signature_raise(self): client_ctx, server_ctx = self._create_completed_contexts() mic_token = client_ctx.get_signature(b'some message') - server_ctx.verify_signature(b'some message', mic_token) - server_ctx.verify_signature.should_raise(gb.GSSError, - b'other message', mic_token) + self.assertRaises(gb.GSSError, server_ctx.verify_signature, + b"other message", mic_token) @ktu.krb_minversion_test("1.11", "returning tokens") def test_defer_step_error_on_method(self): @@ -853,13 +818,14 @@ def test_defer_step_error_on_method(self): channel_bindings=bdgs) client_token = client_ctx.step() - client_token.should_be_a(bytes) + self.assertIsInstance(client_token, bytes) bdgs.application_data = b'defuvw' server_ctx = gssctx.SecurityContext(creds=self.server_creds, channel_bindings=bdgs) - server_ctx.step(client_token).should_be_a(bytes) - server_ctx.encrypt.should_raise(gb.BadChannelBindingsError, b'test') + self.assertIsInstance(server_ctx.step(client_token), bytes) + self.assertRaises(gb.BadChannelBindingsError, server_ctx.encrypt, + b"test") @ktu.krb_minversion_test("1.11", "returning tokens") def test_defer_step_error_on_complete_property_access(self): @@ -869,14 +835,12 @@ def test_defer_step_error_on_complete_property_access(self): channel_bindings=bdgs) client_token = client_ctx.step() - client_token.should_be_a(bytes) + self.assertIsInstance(client_token, bytes) bdgs.application_data = b'defuvw' server_ctx = gssctx.SecurityContext(creds=self.server_creds, channel_bindings=bdgs) - server_ctx.step(client_token).should_be_a(bytes) - - def check_complete(): - return server_ctx.complete + self.assertIsInstance(server_ctx.step(client_token), bytes) - check_complete.should_raise(gb.BadChannelBindingsError) + self.assertRaises(gb.BadChannelBindingsError, + lambda: server_ctx.complete) diff --git a/gssapi/tests/test_raw.py b/gssapi/tests/test_raw.py index 74cf6ee..3742b27 100644 --- a/gssapi/tests/test_raw.py +++ b/gssapi/tests/test_raw.py @@ -3,18 +3,12 @@ import socket import unittest -import six -import should_be.all # noqa - import gssapi.raw as gb import gssapi.raw.misc as gbmisc import k5test.unit as ktu import k5test as kt -if six.PY2: - from collections import Set -else: - from collections.abc import Set +from collections.abc import Set TARGET_SERVICE_NAME = b'host' @@ -63,18 +57,12 @@ def setUp(self): def test_indicate_mechs(self): mechs = gb.indicate_mechs() - - mechs.shouldnt_be_none() - mechs.should_be_a(set) - mechs.shouldnt_be_empty() - - mechs.should_include(gb.MechType.kerberos) + self.assertIsInstance(mechs, set) + self.assertIn(gb.MechType.kerberos, mechs) def test_import_name(self): imported_name = gb.import_name(TARGET_SERVICE_NAME) - - imported_name.shouldnt_be_none() - imported_name.should_be_a(gb.Name) + self.assertIsInstance(imported_name, gb.Name) gb.release_name(imported_name) @@ -84,38 +72,28 @@ def test_canonicalize_export_name(self): canonicalized_name = gb.canonicalize_name(imported_name, gb.MechType.kerberos) - - canonicalized_name.shouldnt_be_none() - canonicalized_name.should_be_a(gb.Name) + self.assertIsInstance(canonicalized_name, gb.Name) exported_name = gb.export_name(canonicalized_name) - - exported_name.shouldnt_be_none() - exported_name.should_be_a(bytes) - exported_name.shouldnt_be_empty() + self.assertIsInstance(exported_name, bytes) + self.assertGreater(len(exported_name), 0) def test_duplicate_name(self): orig_name = gb.import_name(TARGET_SERVICE_NAME) new_name = gb.duplicate_name(orig_name) - - new_name.shouldnt_be_none() - gb.compare_name(orig_name, new_name).should_be_true() + self.assertIsNotNone(new_name) + self.assertTrue(gb.compare_name(orig_name, new_name)) def test_display_name(self): imported_name = gb.import_name(TARGET_SERVICE_NAME, gb.NameType.hostbased_service) displ_resp = gb.display_name(imported_name) + self.assertIsNotNone(displ_resp) - displ_resp.shouldnt_be_none() - - (displayed_name, out_type) = displ_resp - - displayed_name.shouldnt_be_none() - displayed_name.should_be_a(bytes) - displayed_name.should_be(TARGET_SERVICE_NAME) - - out_type.shouldnt_be_none() - out_type.should_be(gb.NameType.hostbased_service) + displayed_name, out_type = displ_resp + self.assertIsInstance(displayed_name, bytes) + self.assertEqual(displayed_name, TARGET_SERVICE_NAME) + self.assertEqual(out_type, gb.NameType.hostbased_service) # NB(directxman12): we don't test display_name_ext because the krb5 mech # doesn't actually implement it @@ -124,12 +102,11 @@ def test_display_name(self): def test_inquire_name_not_mech_name(self): base_name = gb.import_name(TARGET_SERVICE_NAME, gb.NameType.hostbased_service) - inquire_res = gb.inquire_name(base_name) - - inquire_res.shouldnt_be_none() - inquire_res.is_mech_name.should_be_false() - inquire_res.mech.should_be_none() + inquire_res = gb.inquire_name(base_name) + self.assertIsNotNone(inquire_res) + self.assertFalse(inquire_res.is_mech_name) + self.assertIsNone(inquire_res.mech) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') def test_inquire_name_mech_name(self): @@ -138,11 +115,10 @@ def test_inquire_name_mech_name(self): mech_name = gb.canonicalize_name(base_name, gb.MechType.kerberos) inquire_res = gb.inquire_name(mech_name) - inquire_res.shouldnt_be_none() - - inquire_res.is_mech_name.should_be_true() - inquire_res.mech.should_be_a(gb.OID) - inquire_res.mech.should_be(gb.MechType.kerberos) + self.assertIsNotNone(inquire_res) + self.assertTrue(inquire_res.is_mech_name) + self.assertIsInstance(inquire_res.mech, gb.OID) + self.assertEqual(inquire_res.mech, gb.MechType.kerberos) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') @ktu.gssapi_extension_test('rfc6680_comp_oid', @@ -150,17 +126,15 @@ def test_inquire_name_mech_name(self): def test_import_export_name_composite_no_attrs(self): base_name = gb.import_name(TARGET_SERVICE_NAME, gb.NameType.hostbased_service) - canon_name = gb.canonicalize_name(base_name, gb.MechType.kerberos) - exported_name = gb.export_name_composite(canon_name) - exported_name.should_be_a(bytes) + exported_name = gb.export_name_composite(canon_name) + self.assertIsInstance(exported_name, bytes) imported_name = gb.import_name(exported_name, gb.NameType.composite_export) - - imported_name.should_be_a(gb.Name) + self.assertIsInstance(imported_name, gb.Name) # NB(directxman12): the greet_client plugin only allows for one value @@ -174,10 +148,8 @@ def test_inquire_name_with_attrs(self): [b'some greeting']) inquire_res = gb.inquire_name(canon_name) - inquire_res.shouldnt_be_none() - - inquire_res.attrs.should_be_a(list) - inquire_res.attrs.should_be([b'urn:greet:greeting']) + self.assertIsInstance(inquire_res.attrs, list) + self.assertEqual(inquire_res.attrs, [b"urn:greet:greeting"]) @ktu.gssapi_extension_test('rfc6680', 'RFC 6680') @ktu.krb_plugin_test('authdata', 'greet_client') @@ -190,16 +162,13 @@ def test_basic_get_set_delete_name_attributes_no_auth(self): [b'some other val'], complete=True) get_res = gb.get_name_attribute(canon_name, b'urn:greet:greeting') - get_res.shouldnt_be_none() - - get_res.values.should_be_a(list) - get_res.values.should_be([b'some other val']) - - get_res.display_values.should_be_a(list) - get_res.display_values.should_be(get_res.values) - - get_res.complete.should_be_true() - get_res.authenticated.should_be_false() + self.assertIsNotNone(get_res) + self.assertIsInstance(get_res.values, list) + self.assertEqual(get_res.values, [b"some other val"]) + self.assertIsInstance(get_res.display_values, list) + self.assertEqual(get_res.display_values, get_res.values) + self.assertTrue(get_res.complete) + self.assertFalse(get_res.authenticated) gb.delete_name_attribute(canon_name, b'urn:greet:greeting') @@ -218,8 +187,7 @@ def test_import_export_name_composite(self): gb.set_name_attribute(canon_name, b'urn:greet:greeting', [b'some val']) exported_name = gb.export_name_composite(canon_name) - - exported_name.should_be_a(bytes) + self.assertIsInstance(exported_name, bytes) # TODO(directxman12): when you just import a token as composite, # appears as this name whose text is all garbled, since it contains @@ -234,11 +202,10 @@ def test_import_export_name_composite(self): # gb.MechType.kerberos) imported_name = gb.import_name(exported_name, gb.NameType.export) - - imported_name.should_be_a(gb.Name) + self.assertIsInstance(imported_name, gb.Name) get_res = gb.get_name_attribute(imported_name, b'urn:greet:greeting') - get_res.values.should_be([b'some val']) + self.assertEqual(get_res.values, [b"some val"]) def test_compare_name(self): service_name1 = gb.import_name(TARGET_SERVICE_NAME) @@ -246,10 +213,9 @@ def test_compare_name(self): init_name = gb.import_name(self.ADMIN_PRINC, gb.NameType.kerberos_principal) - gb.compare_name(service_name1, service_name2).should_be_true() - gb.compare_name(service_name2, service_name1).should_be_true() - - gb.compare_name(service_name1, init_name).should_be_false() + self.assertTrue(gb.compare_name(service_name1, service_name2)) + self.assertTrue(gb.compare_name(service_name2, service_name1)) + self.assertFalse(gb.compare_name(service_name1, init_name)) gb.release_name(service_name1) gb.release_name(service_name2) @@ -257,33 +223,25 @@ def test_compare_name(self): def test_display_status(self): status_resp = gbmisc._display_status(0, False) - status_resp.shouldnt_be_none() + self.assertIsNotNone(status_resp) - (status, ctx, cont) = status_resp - - status.should_be_a(bytes) - status.shouldnt_be_empty() - - ctx.should_be_an_integer() - - cont.should_be_a(bool) - cont.should_be_false() + status, ctx, cont = status_resp + self.assertIsInstance(status, bytes) + self.assertGreater(len(status), 0) + self.assertIsInstance(ctx, int) + self.assertIsInstance(cont, bool) + self.assertFalse(cont) def test_acquire_creds(self): name = gb.import_name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) cred_resp = gb.acquire_cred(name) - cred_resp.shouldnt_be_none() - - (creds, actual_mechs, ttl) = cred_resp - - creds.shouldnt_be_none() - creds.should_be_a(gb.Creds) + self.assertIsNotNone(cred_resp) - actual_mechs.shouldnt_be_empty() - actual_mechs.should_include(gb.MechType.kerberos) - - ttl.should_be_an_integer() + creds, actual_mechs, ttl = cred_resp + self.assertIsInstance(creds, gb.Creds) + self.assertIn(gb.MechType.kerberos, actual_mechs) + self.assertIsInstance(ttl, int) gb.release_name(name) gb.release_cred(creds) @@ -296,8 +254,7 @@ def test_cred_import_export(self): inquire_orig = gb.inquire_cred(creds, name=True) inquire_imp = gb.inquire_cred(imported_creds, name=True) - - gb.compare_name(inquire_orig.name, inquire_imp.name).should_be_true() + self.assertTrue(gb.compare_name(inquire_orig.name, inquire_imp.name)) def test_context_time(self): target_name = gb.import_name(TARGET_SERVICE_NAME, @@ -319,9 +276,8 @@ def test_context_time(self): ctx = client_resp2[0] ttl = gb.context_time(ctx) - - ttl.should_be_an_integer() - ttl.should_be_greater_than(0) + self.assertIsInstance(ttl, int) + self.assertGreater(ttl, 0) def test_inquire_context(self): target_name = gb.import_name(TARGET_SERVICE_NAME, @@ -343,97 +299,83 @@ def test_inquire_context(self): ctx = client_resp2[0] inq_resp = gb.inquire_context(ctx) - inq_resp.shouldnt_be_none() - - (src_name, target_name, ttl, mech_type, - flags, local_est, is_open) = inq_resp - - src_name.shouldnt_be_none() - src_name.should_be_a(gb.Name) - - target_name.shouldnt_be_none() - target_name.should_be_a(gb.Name) - - ttl.should_be_an_integer() - - mech_type.shouldnt_be_none() - mech_type.should_be(gb.MechType.kerberos) - - flags.shouldnt_be_none() - flags.should_be_a(Set) - flags.shouldnt_be_empty() - - local_est.should_be_a(bool) - local_est.should_be_true() - - is_open.should_be_a(bool) - is_open.should_be_true() + self.assertIsNotNone(inq_resp) + + src_name, target_name, ttl, mech_type, flags, local_est, is_open = \ + inq_resp + self.assertIsInstance(src_name, gb.Name) + self.assertIsInstance(target_name, gb.Name) + self.assertIsInstance(ttl, int) + self.assertEqual(mech_type, gb.MechType.kerberos) + self.assertIsInstance(flags, Set) + self.assertGreater(len(flags), 0) + self.assertIsInstance(local_est, bool) + self.assertTrue(local_est) + self.assertIsInstance(is_open, bool) + self.assertTrue(is_open) # NB(directxman12): We don't test `process_context_token` because # there is no clear non-deprecated way to test it @ktu.gssapi_extension_test('s4u', 'S4U') def test_add_cred_impersonate_name(self): - target_name = gb.import_name(TARGET_SERVICE_NAME, - gb.NameType.hostbased_service) - client_ctx_resp = gb.init_sec_context(target_name) - client_token = client_ctx_resp[3] - del client_ctx_resp # free all the things (except the token)! - server_name = gb.import_name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) - server_creds = gb.acquire_cred(server_name, usage='both')[0] - server_ctx_resp = gb.accept_sec_context(client_token, - acceptor_creds=server_creds) + + password = self.realm.password('user') + self.realm.kinit(self.realm.user_princ, password=password, + flags=["-f"]) + name = gb.import_name(b"user", gb.NameType.kerberos_principal) + client_creds = gb.acquire_cred(name, usage="initiate").creds + cctx_res = gb.init_sec_context( + server_name, creds=client_creds, + flags=gb.RequirementFlag.delegate_to_peer) + + self.realm.kinit(SERVICE_PRINCIPAL.decode("utf-8"), flags=["-k"]) + server_creds = gb.acquire_cred(server_name, usage="both").creds + sctx_res = gb.accept_sec_context(cctx_res.token, server_creds) + self.assertTrue(gb.inquire_context(sctx_res.context).complete) input_creds = gb.Creds() imp_resp = gb.add_cred_impersonate_name(input_creds, - server_creds, - server_ctx_resp[1], + sctx_res.delegated_creds, + server_name, gb.MechType.kerberos) - - imp_resp.shouldnt_be_none() - - new_creds, actual_mechs, output_init_ttl, output_accept_ttl = imp_resp - - actual_mechs.shouldnt_be_empty() - actual_mechs.should_include(gb.MechType.kerberos) - - output_init_ttl.should_be_a(int) - output_accept_ttl.should_be_a(int) - - new_creds.should_be_a(gb.Creds) + self.assertIsNotNone(imp_resp) + self.assertIsInstance(imp_resp, gb.AddCredResult) + self.assertIsInstance(imp_resp.creds, gb.Creds) + self.assertIn(gb.MechType.kerberos, imp_resp.mechs) + self.assertIsInstance(imp_resp.init_lifetime, int) + self.assertGreater(imp_resp.init_lifetime, 0) + self.assertIsInstance(imp_resp.accept_lifetime, int) + self.assertEqual(imp_resp.accept_lifetime, 0) @ktu.gssapi_extension_test('s4u', 'S4U') def test_acquire_creds_impersonate_name(self): - target_name = gb.import_name(TARGET_SERVICE_NAME, - gb.NameType.hostbased_service) - client_ctx_resp = gb.init_sec_context(target_name) - client_token = client_ctx_resp[3] - del client_ctx_resp # free all the things (except the token)! - server_name = gb.import_name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) - server_creds = gb.acquire_cred(server_name, usage='both')[0] - server_ctx_resp = gb.accept_sec_context(client_token, - acceptor_creds=server_creds) - - imp_resp = gb.acquire_cred_impersonate_name(server_creds, - server_ctx_resp[1]) - imp_resp.shouldnt_be_none() - - imp_creds, actual_mechs, output_ttl = imp_resp - - imp_creds.shouldnt_be_none() - imp_creds.should_be_a(gb.Creds) + password = self.realm.password('user') + self.realm.kinit(self.realm.user_princ, password=password, + flags=["-f"]) + name = gb.import_name(b'user', gb.NameType.kerberos_principal) + client_creds = gb.acquire_cred(name, usage="initiate").creds + cctx_res = gb.init_sec_context( + server_name, creds=client_creds, + flags=gb.RequirementFlag.delegate_to_peer) - actual_mechs.shouldnt_be_empty() - actual_mechs.should_include(gb.MechType.kerberos) + self.realm.kinit(SERVICE_PRINCIPAL.decode("utf-8"), flags=["-k"]) + server_creds = gb.acquire_cred(server_name, usage='both').creds + sctx_res = gb.accept_sec_context(cctx_res.token, server_creds) + self.assertTrue(gb.inquire_context(sctx_res.context).complete) - output_ttl.should_be_a(int) - # no need to explicitly release any more -- we can just rely on - # __dealloc__ (b/c cython) + imp_resp = gb.acquire_cred_impersonate_name(sctx_res.delegated_creds, + server_name) + self.assertIsInstance(imp_resp, gb.AcquireCredResult) + self.assertIsInstance(imp_resp.creds, gb.Creds) + self.assertIn(gb.MechType.kerberos, imp_resp.mechs) + self.assertIsInstance(imp_resp.lifetime, int) + self.assertGreater(imp_resp.lifetime, 0) @ktu.gssapi_extension_test('s4u', 'S4U') @ktu.krb_minversion_test('1.11', @@ -452,10 +394,8 @@ def test_always_get_delegated_creds(self): server_creds = gb.acquire_cred(None, usage='both').creds server_ctx_resp = gb.accept_sec_context(client_token, acceptor_creds=server_creds) - - server_ctx_resp.shouldnt_be_none() - server_ctx_resp.delegated_creds.shouldnt_be_none() - server_ctx_resp.delegated_creds.should_be_a(gb.Creds) + self.assertIsNotNone(server_ctx_resp) + self.assertIsInstance(server_ctx_resp.delegated_creds, gb.Creds) @ktu.gssapi_extension_test('rfc5588', 'RFC 5588') def test_store_cred_acquire_cred(self): @@ -478,17 +418,17 @@ def test_store_cred_acquire_cred(self): acceptor_creds=server_creds) deleg_creds = server_ctx_resp.delegated_creds - deleg_creds.shouldnt_be_none() + self.assertIsNotNone(deleg_creds) + store_res = gb.store_cred(deleg_creds, usage='initiate', set_default=True, overwrite=True) - - store_res.shouldnt_be_none() - store_res.usage.should_be('initiate') - store_res.mechs.should_include(gb.MechType.kerberos) + self.assertIsNotNone(store_res) + self.assertEqual(store_res.usage, "initiate") + self.assertIn(gb.MechType.kerberos, store_res.mechs) deleg_name = gb.inquire_cred(deleg_creds).name acq_resp = gb.acquire_cred(deleg_name, usage='initiate') - acq_resp.shouldnt_be_none() + self.assertIsNotNone(acq_resp) @ktu.gssapi_extension_test('cred_store', 'credentials store') def test_store_cred_into_acquire_cred(self): @@ -506,21 +446,17 @@ def test_store_cred_into_acquire_cred(self): # NB(sross): overwrite because the ccache doesn't exist yet store_res = gb.store_cred_into(store, initial_creds, overwrite=True) - - store_res.mechs.shouldnt_be_none() - store_res.usage.should_be('initiate') + self.assertIsNotNone(store_res.mechs) + self.assertEqual(store_res.usage, "initiate") name = gb.import_name(princ_name.encode('UTF-8')) retrieve_res = gb.acquire_cred_from(store, name) - retrieve_res.shouldnt_be_none() - retrieve_res.creds.shouldnt_be_none() - retrieve_res.creds.should_be_a(gb.Creds) - - retrieve_res.mechs.shouldnt_be_empty() - retrieve_res.mechs.should_include(gb.MechType.kerberos) - - retrieve_res.lifetime.should_be_an_integer() + self.assertIsNotNone(retrieve_res) + self.assertIsNotNone(retrieve_res.creds) + self.assertIsInstance(retrieve_res.creds, gb.Creds) + self.assertIn(gb.MechType.kerberos, retrieve_res.mechs) + self.assertIsInstance(retrieve_res.lifetime, int) def test_add_cred(self): target_name = gb.import_name(TARGET_SERVICE_NAME, @@ -539,18 +475,13 @@ def test_add_cred(self): imp_resp = gb.add_cred(input_creds, server_ctx_resp[1], gb.MechType.kerberos) - - imp_resp.shouldnt_be_none() + self.assertIsNotNone(imp_resp) new_creds, actual_mechs, output_init_ttl, output_accept_ttl = imp_resp - - actual_mechs.shouldnt_be_empty() - actual_mechs.should_include(gb.MechType.kerberos) - - output_init_ttl.should_be_a(int) - output_accept_ttl.should_be_a(int) - - new_creds.should_be_a(gb.Creds) + self.assertIsInstance(new_creds, gb.Creds) + self.assertIn(gb.MechType.kerberos, actual_mechs) + self.assertIsInstance(output_init_ttl, int) + self.assertIsInstance(output_accept_ttl, int) # NB(sross): we skip testing add_cred with mutate for the same reasons # that testing add_cred in the high-level API is skipped @@ -561,49 +492,41 @@ def test_inquire_creds(self): cred = gb.acquire_cred(name).creds inq_resp = gb.inquire_cred(cred) - - inq_resp.shouldnt_be_none() - - inq_resp.name.should_be_a(gb.Name) - assert gb.compare_name(name, inq_resp.name) - - inq_resp.lifetime.should_be_an_integer() - - inq_resp.usage.should_be('both') - - inq_resp.mechs.shouldnt_be_empty() - inq_resp.mechs.should_include(gb.MechType.kerberos) + self.assertIsNotNone(inq_resp) + self.assertIsInstance(inq_resp.name, gb.Name) + self.assertTrue(gb.compare_name(name, inq_resp.name)) + self.assertIsInstance(inq_resp.lifetime, int) + self.assertEqual(inq_resp.usage, "both") + self.assertIn(gb.MechType.kerberos, inq_resp.mechs) def test_create_oid_from_bytes(self): kerberos_bytes = gb.MechType.kerberos.__bytes__() new_oid = gb.OID(elements=kerberos_bytes) - new_oid.should_be(gb.MechType.kerberos) + self.assertEqual(new_oid, gb.MechType.kerberos) del new_oid # make sure we can dealloc def test_error_dispatch(self): err_code1 = gb.ParameterReadError.CALLING_CODE err_code2 = gb.BadNameError.ROUTINE_CODE - err = gb.GSSError(err_code1 | err_code2, 0) - err.should_be_a(gb.NameReadError) - err.maj_code.should_be(err_code1 | err_code2) + err = gb.GSSError(err_code1 | err_code2, 0) + self.assertIsInstance(err, gb.NameReadError) + self.assertEqual(err.maj_code, err_code1 | err_code2) def test_inquire_names_for_mech(self): res = gb.inquire_names_for_mech(gb.MechType.kerberos) - - res.shouldnt_be_none() - res.should_include(gb.NameType.kerberos_principal) + self.assertIsNotNone(res) + self.assertIn(gb.NameType.kerberos_principal, res) def test_inquire_mechs_for_name(self): name = gb.import_name(self.USER_PRINC, gb.NameType.kerberos_principal) res = gb.inquire_mechs_for_name(name) - - res.shouldnt_be_none() - res.should_include(gb.MechType.kerberos) + self.assertIsNotNone(res) + self.assertIn(gb.MechType.kerberos, res) @ktu.gssapi_extension_test('password', 'Password') def test_acquire_cred_with_password(self): @@ -614,17 +537,13 @@ def test_acquire_cred_with_password(self): imp_resp = gb.acquire_cred_with_password(name, password.encode('UTF-8')) - imp_resp.shouldnt_be_none() + self.assertIsNotNone(imp_resp) imp_creds, actual_mechs, output_ttl = imp_resp - - imp_creds.shouldnt_be_none() - imp_creds.should_be_a(gb.Creds) - - actual_mechs.shouldnt_be_empty() - actual_mechs.should_include(gb.MechType.kerberos) - - output_ttl.should_be_a(int) + self.assertIsNotNone(imp_creds) + self.assertIsInstance(imp_creds, gb.Creds) + self.assertIn(gb.MechType.kerberos, actual_mechs) + self.assertIsInstance(output_ttl, int) @ktu.gssapi_extension_test('password_add', 'Password (add)') def test_add_cred_with_password(self): @@ -637,24 +556,19 @@ def test_add_cred_with_password(self): imp_resp = gb.add_cred_with_password(input_creds, name, gb.MechType.kerberos, password.encode('UTF-8')) - imp_resp.shouldnt_be_none() + self.assertIsNotNone(imp_resp) new_creds, actual_mechs, output_init_ttl, output_accept_ttl = imp_resp - - actual_mechs.shouldnt_be_empty() - actual_mechs.should_include(gb.MechType.kerberos) - - output_init_ttl.should_be_a(int) - output_accept_ttl.should_be_a(int) - - new_creds.should_be_a(gb.Creds) + self.assertIsInstance(new_creds, gb.Creds) + self.assertIn(gb.MechType.kerberos, actual_mechs) + self.assertIsInstance(output_init_ttl, int) + self.assertIsInstance(output_accept_ttl, int) @ktu.gssapi_extension_test('rfc5587', 'RFC 5587') def test_rfc5587(self): mechs = gb.indicate_mechs_by_attrs(None, None, None) - - mechs.should_be_a(set) - mechs.shouldnt_be_empty() + self.assertIsInstance(mechs, set) + self.assertGreater(len(mechs), 0) # We're validating RFC 5587 here: by iterating over all mechanisms, # we can query their attributes and build a mapping of attr->{mechs}. @@ -665,47 +579,37 @@ def test_rfc5587(self): known_attrs_dict = {} for mech in mechs: - mech.shouldnt_be_none() - mech.should_be_a(gb.OID) + self.assertIsInstance(mech, gb.OID) inquire_out = gb.inquire_attrs_for_mech(mech) mech_attrs = inquire_out.mech_attrs known_mech_attrs = inquire_out.known_mech_attrs - mech_attrs.should_be_a(set) - - known_mech_attrs.should_be_a(set) + self.assertIsInstance(mech_attrs, set) + self.assertIsInstance(known_mech_attrs, set) # Verify that we get data for every available # attribute. Testing the contents of a few known # attributes is done in test_display_mech_attr(). for mech_attr in mech_attrs: - mech_attr.shouldnt_be_none() - mech_attr.should_be_a(gb.OID) + self.assertIsInstance(mech_attr, gb.OID) display_out = gb.display_mech_attr(mech_attr) - display_out.name.shouldnt_be_none() - display_out.short_desc.shouldnt_be_none() - display_out.long_desc.shouldnt_be_none() - display_out.name.should_be_a(bytes) - display_out.short_desc.should_be_a(bytes) - display_out.long_desc.should_be_a(bytes) + self.assertIsInstance(display_out.name, bytes) + self.assertIsInstance(display_out.short_desc, bytes) + self.assertIsInstance(display_out.long_desc, bytes) if mech_attr not in attrs_dict: attrs_dict[mech_attr] = set() attrs_dict[mech_attr].add(mech) for mech_attr in known_mech_attrs: - mech_attr.shouldnt_be_none() - mech_attr.should_be_a(gb.OID) + self.assertIsInstance(mech_attr, gb.OID) display_out = gb.display_mech_attr(mech_attr) - display_out.name.shouldnt_be_none() - display_out.short_desc.shouldnt_be_none() - display_out.long_desc.shouldnt_be_none() - display_out.name.should_be_a(bytes) - display_out.short_desc.should_be_a(bytes) - display_out.long_desc.should_be_a(bytes) + self.assertIsInstance(display_out.name, bytes) + self.assertIsInstance(display_out.short_desc, bytes) + self.assertIsInstance(display_out.long_desc, bytes) if mech_attr not in known_attrs_dict: known_attrs_dict[mech_attr] = set() @@ -715,19 +619,19 @@ def test_rfc5587(self): attrs = set([attr]) mechs = gb.indicate_mechs_by_attrs(attrs, None, None) - mechs.shouldnt_be_empty() - mechs.should_be(expected_mechs) + self.assertGreater(len(mechs), 0) + self.assertEqual(mechs, expected_mechs) mechs = gb.indicate_mechs_by_attrs(None, attrs, None) for expected_mech in expected_mechs: - mechs.shouldnt_include(expected_mech) + self.assertNotIn(expected_mech, mechs) for attr, expected_mechs in known_attrs_dict.items(): attrs = set([attr]) mechs = gb.indicate_mechs_by_attrs(None, None, attrs) - mechs.shouldnt_be_empty() - mechs.should_be(expected_mechs) + self.assertGreater(len(mechs), 0) + self.assertEqual(mechs, expected_mechs) @ktu.gssapi_extension_test('rfc5587', 'RFC 5587') def test_display_mech_attr(self): @@ -744,9 +648,9 @@ def test_display_mech_attr(self): for attr in test_attrs: display_out = gb.display_mech_attr(attr[0]) - display_out.name.should_be(attr[1]) - display_out.short_desc.should_be(attr[2]) - display_out.long_desc.should_be(attr[3]) + self.assertEqual(display_out.name, attr[1]) + self.assertEqual(display_out.short_desc, attr[2]) + self.assertEqual(display_out.long_desc, attr[3]) @ktu.gssapi_extension_test('rfc5801', 'SASL Names') def test_sasl_names(self): @@ -756,21 +660,18 @@ def test_sasl_names(self): out = gb.inquire_saslname_for_mech(mech) out_smn = out.sasl_mech_name - out_smn.shouldnt_be_none() - out_smn.should_be_a(bytes) - out_smn.shouldnt_be_empty() + self.assertIsInstance(out_smn, bytes) + self.assertGreater(len(out_smn), 0) out_mn = out.mech_name - out_mn.shouldnt_be_none() - out_mn.should_be_a(bytes) + self.assertIsInstance(out_mn, bytes) out_md = out.mech_description - out_md.shouldnt_be_none() - out_md.should_be_a(bytes) + self.assertIsInstance(out_md, bytes) cmp_mech = gb.inquire_mech_for_saslname(out_smn) - cmp_mech.shouldnt_be_none() - cmp_mech.should_be(mech) + self.assertIsNotNone(cmp_mech) + self.assertEqual(cmp_mech, mech) @ktu.gssapi_extension_test('rfc4178', 'Negotiation Mechanism') def test_set_neg_mechs(self): @@ -800,7 +701,7 @@ def test_set_neg_mechs(self): mechs=all_mechs).creds neg_resp = gb.set_neg_mechs(server_creds, [ntlm_mech]) - neg_resp.should_be_none() + self.assertIsNone(neg_resp) client_ctx_resp = gb.init_sec_context(server_name, creds=ntlm_client_creds, @@ -809,18 +710,18 @@ def test_set_neg_mechs(self): server_ctx_resp = gb.accept_sec_context(client_token, acceptor_creds=server_creds) - server_ctx_resp.shouldnt_be_none() + self.assertIsNotNone(server_ctx_resp) client_ctx_resp = gb.init_sec_context(server_name, creds=krb5_client_creds, mech=spnego_mech) client_token = client_ctx_resp.token - gb.accept_sec_context.should_raise(gb.GSSError, client_token, - acceptor_creds=server_creds) + self.assertRaises(gb.GSSError, gb.accept_sec_context, client_token, + acceptor_creds=server_creds) neg_resp = gb.set_neg_mechs(server_creds, [krb5_mech]) - neg_resp.should_be_none() + self.assertIsNone(neg_resp) client_ctx_resp = gb.init_sec_context(server_name, creds=krb5_client_creds, @@ -829,15 +730,15 @@ def test_set_neg_mechs(self): server_ctx_resp = gb.accept_sec_context(client_token, acceptor_creds=server_creds) - server_ctx_resp.shouldnt_be_none() + self.assertIsNotNone(server_ctx_resp) client_ctx_resp = gb.init_sec_context(server_name, creds=ntlm_client_creds, mech=spnego_mech) client_token = client_ctx_resp.token - gb.accept_sec_context.should_raise(gb.GSSError, client_token, - acceptor_creds=server_creds) + self.assertRaises(gb.GSSError, gb.accept_sec_context, client_token, + acceptor_creds=server_creds) @ktu.gssapi_extension_test('ggf', 'Global Grid Forum') @ktu.gssapi_extension_test('s4u', 'S4U') @@ -860,19 +761,19 @@ def test_inquire_cred_by_oid_impersonator(self): server_ctx_resp = gb.accept_sec_context(client_token, acceptor_creds=server_creds) - server_ctx_resp.shouldnt_be_none() - server_ctx_resp.delegated_creds.shouldnt_be_none() - server_ctx_resp.delegated_creds.should_be_a(gb.Creds) + self.assertIsNotNone(server_ctx_resp) + self.assertIsNotNone(server_ctx_resp.delegated_creds) + self.assertIsInstance(server_ctx_resp.delegated_creds, gb.Creds) # GSS_KRB5_GET_CRED_IMPERSONATOR oid = gb.OID.from_int_seq("1.2.840.113554.1.2.2.5.14") info = gb.inquire_cred_by_oid(server_ctx_resp.delegated_creds, oid) - info.should_be_a(list) - info.shouldnt_be_empty() - info[0].should_be_a(bytes) - info[0].should_be(b"%s@%s" % (SERVICE_PRINCIPAL, - self.realm.realm.encode('utf-8'))) + self.assertIsInstance(info, list) + self.assertGreater(len(info), 0) + self.assertIsInstance(info[0], bytes) + self.assertEqual(info[0], b"%s@%s" % ( + SERVICE_PRINCIPAL, self.realm.realm.encode('utf-8'))) @ktu.gssapi_extension_test('ggf', 'Global Grid Forum') def test_inquire_sec_context_by_oid(self): @@ -899,11 +800,11 @@ def test_inquire_sec_context_by_oid(self): client_key = gb.inquire_sec_context_by_oid(client_ctx, session_key_oid) server_key = gb.inquire_sec_context_by_oid(server_ctx, session_key_oid) - client_key.should_be_a(list) - client_key.shouldnt_be_empty() - server_key.should_be_a(list) - server_key.shouldnt_be_empty() - client_key.should_have_same_items_as(server_key) + self.assertIsInstance(client_key, list) + self.assertGreater(len(client_key), 0) + self.assertIsInstance(server_key, list) + self.assertGreater(len(server_key), 0) + self.assertCountEqual(client_key, server_key) @ktu.gssapi_extension_test('ggf', 'Global Grid Forum') def test_inquire_sec_context_by_oid_should_raise_error(self): @@ -923,8 +824,8 @@ def test_inquire_sec_context_by_oid_should_raise_error(self): client_ctx = client_resp2[0] invalid_oid = gb.OID.from_int_seq("1.2.3.4.5.6.7.8.9") - gb.inquire_sec_context_by_oid.should_raise(gb.GSSError, client_ctx, - invalid_oid) + self.assertRaises(gb.GSSError, gb.inquire_sec_context_by_oid, + client_ctx, invalid_oid) @ktu.gssapi_extension_test('ggf', 'Global Grid Forum') @ktu.gssapi_extension_test('password', 'Add Credential with Password') @@ -950,7 +851,7 @@ def test_set_sec_context_option(self): out_context = gb.set_sec_context_option(reset_mech, context=orig_context, value=b"\x00" * 4) - out_context.should_be_a(gb.SecurityContext) + self.assertIsInstance(out_context, gb.SecurityContext) @ktu.gssapi_extension_test('ggf', 'Global Grid Forum') @ktu.gssapi_extension_test('password', 'Add Credential with Password') @@ -975,8 +876,8 @@ def test_set_sec_context_option_fail(self): reset_mech = gb.OID.from_int_seq("1.3.6.1.4.1.7165.655.1.3") # will raise a GSSError if no data was passed in - gb.set_sec_context_option.should_raise(gb.GSSError, reset_mech, - context) + self.assertRaises(gb.GSSError, gb.set_sec_context_option, reset_mech, + context) @ktu.gssapi_extension_test('set_cred_opt', 'Kitten Set Credential Option') @ktu.krb_minversion_test('1.14', @@ -992,7 +893,7 @@ def test_set_cred_option(self): # nothing much we can test here apart from it doesn't fail and the # id of the return cred is the same as the input one output_cred = gb.set_cred_option(no_ci_flags_x, creds=orig_cred) - output_cred.should_be_a(gb.Creds) + self.assertIsInstance(output_cred, gb.Creds) @ktu.gssapi_extension_test('set_cred_opt', 'Kitten Set Credential Option') def test_set_cred_option_should_raise_error(self): @@ -1002,8 +903,8 @@ def test_set_cred_option_should_raise_error(self): # this is a fake OID and shouldn't work at all invalid_oid = gb.OID.from_int_seq("1.2.3.4.5.6.7.8.9") - gb.set_cred_option.should_raise(gb.GSSError, invalid_oid, orig_cred, - b"\x00") + self.assertRaises(gb.GSSError, gb.set_cred_option, invalid_oid, + orig_cred, b"\x00") class TestIntEnumFlagSet(unittest.TestCase): @@ -1012,7 +913,7 @@ def test_create_from_int(self): gb.RequirementFlag.confidentiality) fset = gb.IntEnumFlagSet(gb.RequirementFlag, int_val) - int(fset).should_be(int_val) + self.assertEqual(int(fset), int_val) def test_create_from_other_set(self): int_val = (gb.RequirementFlag.integrity | @@ -1020,18 +921,18 @@ def test_create_from_other_set(self): fset1 = gb.IntEnumFlagSet(gb.RequirementFlag, int_val) fset2 = gb.IntEnumFlagSet(gb.RequirementFlag, fset1) - fset1.should_be(fset2) + self.assertEqual(fset1, fset2) def test_create_from_list(self): lst = [gb.RequirementFlag.integrity, gb.RequirementFlag.confidentiality] fset = gb.IntEnumFlagSet(gb.RequirementFlag, lst) - list(fset).should_have_same_items_as(lst) + self.assertCountEqual(list(fset), lst) def test_create_empty(self): fset = gb.IntEnumFlagSet(gb.RequirementFlag) - fset.should_be_empty() + self.assertEqual(len(fset), 0) def _create_fset(self): lst = [gb.RequirementFlag.integrity, @@ -1040,41 +941,41 @@ def _create_fset(self): def test_contains(self): fset = self._create_fset() - fset.should_include(gb.RequirementFlag.integrity) - fset.shouldnt_include(gb.RequirementFlag.protection_ready) + self.assertIn(gb.RequirementFlag.integrity, fset) + self.assertNotIn(gb.RequirementFlag.protection_ready, fset) def test_len(self): - self._create_fset().should_have_length(2) + self.assertEqual(len(self._create_fset()), 2) def test_add(self): fset = self._create_fset() - fset.should_have_length(2) + self.assertEqual(len(fset), 2) fset.add(gb.RequirementFlag.protection_ready) - fset.should_have_length(3) - fset.should_include(gb.RequirementFlag.protection_ready) + self.assertEqual(len(fset), 3) + self.assertIn(gb.RequirementFlag.protection_ready, fset) def test_discard(self): fset = self._create_fset() - fset.should_have_length(2) + self.assertEqual(len(fset), 2) fset.discard(gb.RequirementFlag.protection_ready) - fset.should_have_length(2) + self.assertEqual(len(fset), 2) fset.discard(gb.RequirementFlag.integrity) - fset.should_have_length(1) - fset.shouldnt_include(gb.RequirementFlag.integrity) + self.assertEqual(len(fset), 1) + self.assertNotIn(gb.RequirementFlag.integrity, fset) def test_and_enum(self): fset = self._create_fset() - (fset & gb.RequirementFlag.integrity).should_be_true() - (fset & gb.RequirementFlag.protection_ready).should_be_false() + self.assertTrue(fset & gb.RequirementFlag.integrity) + self.assertFalse(fset & gb.RequirementFlag.protection_ready) def test_and_int(self): fset = self._create_fset() int_val = int(gb.RequirementFlag.integrity) - (fset & int_val).should_be(int_val) + self.assertEqual(fset & int_val, int_val) def test_and_set(self): fset1 = self._create_fset() @@ -1084,20 +985,20 @@ def test_and_set(self): fset1.add(gb.RequirementFlag.protection_ready) fset2.add(gb.RequirementFlag.out_of_sequence_detection) - (fset1 & fset2).should_be(fset3) + self.assertEqual(fset1 & fset2, fset3) def test_or_enum(self): fset1 = self._create_fset() fset2 = fset1 | gb.RequirementFlag.protection_ready - (fset1 < fset2).should_be_true() - fset2.should_include(gb.RequirementFlag.protection_ready) + self.assertLess(fset1, fset2) + self.assertIn(gb.RequirementFlag.protection_ready, fset2) def test_or_int(self): fset = self._create_fset() int_val = int(gb.RequirementFlag.integrity) - (fset | int_val).should_be(int(fset)) + self.assertEqual(fset | int_val, int(fset)) def test_or_set(self): fset1 = self._create_fset() @@ -1109,7 +1010,7 @@ def test_or_set(self): fset3.add(gb.RequirementFlag.protection_ready) fset3.add(gb.RequirementFlag.out_of_sequence_detection) - (fset1 | fset2).should_be(fset3) + self.assertEqual(fset1 | fset2, fset3) def test_xor_enum(self): fset1 = self._create_fset() @@ -1117,20 +1018,19 @@ def test_xor_enum(self): fset2 = fset1 ^ gb.RequirementFlag.protection_ready fset3 = fset1 ^ gb.RequirementFlag.integrity - fset2.should_have_length(3) - fset2.should_include(gb.RequirementFlag.protection_ready) - - fset3.should_have_length(1) - fset3.shouldnt_include(gb.RequirementFlag.integrity) + self.assertEqual(len(fset2), 3) + self.assertIn(gb.RequirementFlag.protection_ready, fset2) + self.assertEqual(len(fset3), 1) + self.assertNotIn(gb.RequirementFlag.integrity, fset3) def test_xor_int(self): fset = self._create_fset() - (fset ^ int(gb.RequirementFlag.protection_ready)).should_be( - int(fset) ^ gb.RequirementFlag.protection_ready) + self.assertEqual(fset ^ int(gb.RequirementFlag.protection_ready), + int(fset) ^ gb.RequirementFlag.protection_ready) - (fset ^ int(gb.RequirementFlag.integrity)).should_be( - int(fset) ^ gb.RequirementFlag.integrity) + self.assertEqual(fset ^ int(gb.RequirementFlag.integrity), + int(fset) ^ gb.RequirementFlag.integrity) def test_xor_set(self): fset1 = self._create_fset() @@ -1140,11 +1040,11 @@ def test_xor_set(self): fset2.add(gb.RequirementFlag.out_of_sequence_detection) fset3 = fset1 ^ fset2 - fset3.should_have_length(2) - fset3.shouldnt_include(gb.RequirementFlag.integrity) - fset3.shouldnt_include(gb.RequirementFlag.confidentiality) - fset3.should_include(gb.RequirementFlag.protection_ready) - fset3.should_include(gb.RequirementFlag.out_of_sequence_detection) + self.assertEqual(len(fset3), 2) + self.assertNotIn(gb.RequirementFlag.integrity, fset3) + self.assertNotIn(gb.RequirementFlag.confidentiality, fset3) + self.assertIn(gb.RequirementFlag.protection_ready, fset3) + self.assertIn(gb.RequirementFlag.out_of_sequence_detection, fset3) class TestInitContext(_GSSAPIKerberosTestCase): @@ -1157,24 +1057,17 @@ def tearDown(self): def test_basic_init_default_ctx(self): ctx_resp = gb.init_sec_context(self.target_name) - ctx_resp.shouldnt_be_none() + self.assertIsNotNone(ctx_resp) (ctx, out_mech_type, out_req_flags, out_token, out_ttl, cont_needed) = ctx_resp - - ctx.shouldnt_be_none() - ctx.should_be_a(gb.SecurityContext) - - out_mech_type.should_be(gb.MechType.kerberos) - - out_req_flags.should_be_a(Set) - out_req_flags.should_be_at_least_length(2) - - out_token.shouldnt_be_empty() - - out_ttl.should_be_greater_than(0) - - cont_needed.should_be_a(bool) + self.assertIsInstance(ctx, gb.SecurityContext) + self.assertEqual(out_mech_type, gb.MechType.kerberos) + self.assertIsInstance(out_req_flags, Set) + self.assertGreaterEqual(len(out_req_flags), 2) + self.assertGreater(len(out_token), 0) + self.assertGreater(out_ttl, 0) + self.assertIsInstance(cont_needed, bool) gb.delete_sec_context(ctx) @@ -1188,7 +1081,7 @@ def setUp(self): self.client_token = ctx_resp[3] self.client_ctx = ctx_resp[0] - self.client_ctx.shouldnt_be_none() + self.assertIsNotNone(self.client_ctx) self.server_name = gb.import_name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) @@ -1207,58 +1100,40 @@ def tearDown(self): def test_basic_accept_context_no_acceptor_creds(self): server_resp = gb.accept_sec_context(self.client_token) - server_resp.shouldnt_be_none() + self.assertIsNotNone(server_resp) (self.server_ctx, name, mech_type, out_token, out_req_flags, out_ttl, delegated_cred, cont_needed) = server_resp - - self.server_ctx.shouldnt_be_none() - self.server_ctx.should_be_a(gb.SecurityContext) - - name.shouldnt_be_none() - name.should_be_a(gb.Name) - - mech_type.should_be(gb.MechType.kerberos) - - out_token.shouldnt_be_empty() - - out_req_flags.should_be_a(Set) - out_req_flags.should_be_at_least_length(2) - - out_ttl.should_be_greater_than(0) + self.assertIsInstance(self.server_ctx, gb.SecurityContext) + self.assertIsInstance(name, gb.Name) + self.assertEqual(mech_type, gb.MechType.kerberos) + self.assertGreater(len(out_token), 0) + self.assertIsInstance(out_req_flags, Set) + self.assertGreaterEqual(len(out_req_flags), 2) + self.assertGreater(out_ttl, 0) + self.assertIsInstance(cont_needed, bool) if delegated_cred is not None: - delegated_cred.should_be_a(gb.Creds) - - cont_needed.should_be_a(bool) + self.assertIsInstance(delegated_cred, gb.Creds) def test_basic_accept_context(self): server_resp = gb.accept_sec_context(self.client_token, acceptor_creds=self.server_creds) - server_resp.shouldnt_be_none() + self.assertIsNotNone(server_resp) (self.server_ctx, name, mech_type, out_token, out_req_flags, out_ttl, delegated_cred, cont_needed) = server_resp - - self.server_ctx.shouldnt_be_none() - self.server_ctx.should_be_a(gb.SecurityContext) - - name.shouldnt_be_none() - name.should_be_a(gb.Name) - - mech_type.should_be(gb.MechType.kerberos) - - out_token.shouldnt_be_empty() - - out_req_flags.should_be_a(Set) - out_req_flags.should_be_at_least_length(2) - - out_ttl.should_be_greater_than(0) + self.assertIsInstance(self.server_ctx, gb.SecurityContext) + self.assertIsInstance(name, gb.Name) + self.assertEqual(mech_type, gb.MechType.kerberos) + self.assertGreater(len(out_token), 0) + self.assertIsInstance(out_req_flags, Set) + self.assertGreaterEqual(len(out_req_flags), 2) + self.assertGreater(out_ttl, 0) + self.assertIsInstance(cont_needed, bool) if delegated_cred is not None: - delegated_cred.should_be_a(gb.Creds) - - cont_needed.should_be_a(bool) + self.assertIsInstance(delegated_cred, gb.Creds) def test_channel_bindings(self): bdgs = gb.ChannelBindings(application_data=b'abcxyz', @@ -1273,7 +1148,7 @@ def test_channel_bindings(self): self.client_token = ctx_resp[3] self.client_ctx = ctx_resp[0] - self.client_ctx.shouldnt_be_none() + self.assertIsNotNone(self.client_ctx) self.server_name = gb.import_name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) @@ -1282,7 +1157,7 @@ def test_channel_bindings(self): server_resp = gb.accept_sec_context(self.client_token, acceptor_creds=self.server_creds, channel_bindings=bdgs) - server_resp.shouldnt_be_none + self.assertIsNotNone(server_resp) self.server_ctx = server_resp.context def test_bad_channel_binding_raises_error(self): @@ -1298,16 +1173,16 @@ def test_bad_channel_binding_raises_error(self): self.client_token = ctx_resp[3] self.client_ctx = ctx_resp[0] - self.client_ctx.shouldnt_be_none() + self.assertIsNotNone(self.client_ctx) self.server_name = gb.import_name(SERVICE_PRINCIPAL, gb.NameType.kerberos_principal) self.server_creds = gb.acquire_cred(self.server_name)[0] bdgs.acceptor_address = b'127.0.1.0' - gb.accept_sec_context.should_raise(gb.GSSError, self.client_token, - acceptor_creds=self.server_creds, - channel_bindings=bdgs) + self.assertRaises(gb.GSSError, gb.accept_sec_context, + self.client_token, acceptor_creds=self.server_creds, + channel_bindings=bdgs) class TestWrapUnwrap(_GSSAPIKerberosTestCase): @@ -1341,67 +1216,54 @@ def tearDown(self): def test_import_export_sec_context(self): tok = gb.export_sec_context(self.client_ctx) - - tok.shouldnt_be_none() - tok.should_be_a(bytes) - tok.shouldnt_be_empty() + self.assertIsInstance(tok, bytes) + self.assertGreater(len(tok), 0) imported_ctx = gb.import_sec_context(tok) - imported_ctx.shouldnt_be_none() - imported_ctx.should_be_a(gb.SecurityContext) + self.assertIsInstance(imported_ctx, gb.SecurityContext) self.client_ctx = imported_ctx # ensure that it gets deleted def test_get_mic(self): mic_token = gb.get_mic(self.client_ctx, b"some message") - - mic_token.shouldnt_be_none() - mic_token.should_be_a(bytes) - mic_token.shouldnt_be_empty() + self.assertIsInstance(mic_token, bytes) + self.assertGreater(len(mic_token), 0) def test_basic_verify_mic(self): mic_token = gb.get_mic(self.client_ctx, b"some message") qop_used = gb.verify_mic(self.server_ctx, b"some message", mic_token) - - qop_used.should_be_an_integer() + self.assertIsInstance(qop_used, int) # test a bad MIC - gb.verify_mic.should_raise(gb.GSSError, self.server_ctx, - b"some other message", b"some invalid mic") + self.assertRaises(gb.GSSError, gb.verify_mic, self.server_ctx, + b"some other message", b"some invalid mic") def test_wrap_size_limit(self): with_conf = gb.wrap_size_limit(self.client_ctx, 100) without_conf = gb.wrap_size_limit(self.client_ctx, 100, confidential=False) - - with_conf.should_be_an_integer() - without_conf.should_be_an_integer() - - without_conf.should_be_less_than(100) - with_conf.should_be_less_than(100) + self.assertIsInstance(with_conf, int) + self.assertIsInstance(without_conf, int) + self.assertLess(without_conf, 100) + self.assertLess(with_conf, 100) def test_basic_wrap_unwrap(self): - (wrapped_message, conf) = gb.wrap(self.client_ctx, b'test message') - - conf.should_be_a(bool) - conf.should_be_true() - - wrapped_message.should_be_a(bytes) - wrapped_message.shouldnt_be_empty() - wrapped_message.should_be_longer_than('test message') - - (unwrapped_message, conf, qop) = gb.unwrap(self.server_ctx, - wrapped_message) - conf.should_be_a(bool) - conf.should_be_true() - - qop.should_be_an_integer() - qop.should_be_at_least(0) - - unwrapped_message.should_be_a(bytes) - unwrapped_message.shouldnt_be_empty() - unwrapped_message.should_be(b'test message') + wrapped_message, conf = gb.wrap(self.client_ctx, b"test message") + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + self.assertIsInstance(wrapped_message, bytes) + self.assertGreater(len(wrapped_message), len("test message")) + + unwrapped_message, conf, qop = gb.unwrap(self.server_ctx, + wrapped_message) + self.assertIsInstance(unwrapped_message, bytes) + self.assertEqual(unwrapped_message, b'test message') + + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + self.assertIsInstance(qop, int) + self.assertGreaterEqual(qop, 0) @ktu.gssapi_extension_test('dce', 'DCE (IOV/AEAD)') def test_basic_iov_wrap_unwrap_prealloc(self): @@ -1410,42 +1272,37 @@ def test_basic_iov_wrap_unwrap_prealloc(self): init_signed_info = b'some sig data' init_message = gb.IOV((gb.IOVBufferType.sign_only, init_signed_info), init_data, init_other_data, auto_alloc=False) - - init_message[0].allocate.should_be_false() - init_message[4].allocate.should_be_false() - init_message[5].allocate.should_be_false() + self.assertFalse(init_message[0].allocate) + self.assertFalse(init_message[4].allocate) + self.assertFalse(init_message[5].allocate) conf = gb.wrap_iov_length(self.client_ctx, init_message) - - conf.should_be_a(bool) - conf.should_be_true() - - init_message[0].should_be_at_least_size(1) - init_message[5].should_be_at_least_size(1) + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + self.assertGreaterEqual(len(init_message[0]), 1) + self.assertGreaterEqual(len(init_message[5]), 1) conf = gb.wrap_iov(self.client_ctx, init_message) - - conf.should_be_a(bool) - conf.should_be_true() + self.assertIsInstance(conf, bool) + self.assertTrue(conf) # make sure we didn't strings used - init_data.should_be(b'some encrypted data') - init_other_data.should_be(b'some other encrypted data') - init_signed_info.should_be(b'some sig data') - - init_message[2].value.shouldnt_be(b'some encrypted data') - init_message[3].value.shouldnt_be(b'some other encrypted data') - - (conf, qop) = gb.unwrap_iov(self.server_ctx, init_message) + self.assertEqual(init_data, b'some encrypted data') + self.assertEqual(init_other_data, b'some other encrypted data') + self.assertEqual(init_signed_info, b'some sig data') - conf.should_be_a(bool) - conf.should_be_true() + self.assertNotEqual(init_message[2].value, b'some encrypted data') + self.assertNotEqual(init_message[3].value, + b'some other encrypted data') - qop.should_be_a(int) + conf, qop = gb.unwrap_iov(self.server_ctx, init_message) + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + self.assertIsInstance(qop, int) - init_message[1].value.should_be(init_signed_info) - init_message[2].value.should_be(init_data) - init_message[3].value.should_be(init_other_data) + self.assertEqual(init_message[1].value, init_signed_info) + self.assertEqual(init_message[2].value, init_data) + self.assertEqual(init_message[3].value, init_other_data) @ktu.gssapi_extension_test('dce', 'DCE (IOV/AEAD)') def test_basic_iov_wrap_unwrap_autoalloc(self): @@ -1456,94 +1313,75 @@ def test_basic_iov_wrap_unwrap_autoalloc(self): init_data, init_other_data) conf = gb.wrap_iov(self.client_ctx, init_message) - - conf.should_be_a(bool) - conf.should_be_true() + self.assertIsInstance(conf, bool) + self.assertTrue(conf) # make sure we didn't strings used - init_data.should_be(b'some encrypted data') - init_other_data.should_be(b'some other encrypted data') - init_signed_info.should_be(b'some sig data') - - init_message[2].value.shouldnt_be(b'some encrypted data') - init_message[3].value.shouldnt_be(b'some other encrypted data') - - (conf, qop) = gb.unwrap_iov(self.server_ctx, init_message) + self.assertEqual(init_data, b'some encrypted data') + self.assertEqual(init_other_data, b'some other encrypted data') + self.assertEqual(init_signed_info, b'some sig data') - conf.should_be_a(bool) - conf.should_be_true() + self.assertNotEqual(init_message[2].value, b'some encrypted data') + self.assertNotEqual(init_message[3].value, + b'some other encrypted data') - qop.should_be_a(int) + conf, qop = gb.unwrap_iov(self.server_ctx, init_message) + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + self.assertIsInstance(qop, int) - init_message[1].value.should_be(init_signed_info) - init_message[2].value.should_be(init_data) - init_message[3].value.should_be(init_other_data) + self.assertEqual(init_message[1].value, init_signed_info) + self.assertEqual(init_message[2].value, init_data) + self.assertEqual(init_message[3].value, init_other_data) @ktu.gssapi_extension_test('dce', 'DCE (IOV/AEAD)') def test_basic_aead_wrap_unwrap(self): assoc_data = b'some sig data' - (wrapped_message, conf) = gb.wrap_aead(self.client_ctx, - b'test message', assoc_data) - - conf.should_be_a(bool) - conf.should_be_true() - - wrapped_message.should_be_a(bytes) - wrapped_message.shouldnt_be_empty() - wrapped_message.should_be_longer_than('test message') - - (unwrapped_message, conf, qop) = gb.unwrap_aead(self.server_ctx, - wrapped_message, - assoc_data) - conf.should_be_a(bool) - conf.should_be_true() - - qop.should_be_an_integer() - qop.should_be_at_least(0) - - unwrapped_message.should_be_a(bytes) - unwrapped_message.shouldnt_be_empty() - unwrapped_message.should_be(b'test message') + wrapped_message, conf = gb.wrap_aead(self.client_ctx, b"test message", + assoc_data) + self.assertIsInstance(wrapped_message, bytes) + self.assertGreater(len(wrapped_message), len('test message')) + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + + unwrapped_message, conf, qop = \ + gb.unwrap_aead(self.server_ctx, wrapped_message, assoc_data) + self.assertIsInstance(unwrapped_message, bytes) + self.assertEqual(unwrapped_message, b'test message') + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + self.assertIsInstance(qop, int) + self.assertGreaterEqual(qop, 0) @ktu.gssapi_extension_test('dce', 'DCE (IOV/AEAD)') def test_basic_aead_wrap_unwrap_no_assoc(self): - (wrapped_message, conf) = gb.wrap_aead(self.client_ctx, - b'test message') - - conf.should_be_a(bool) - conf.should_be_true() - - wrapped_message.should_be_a(bytes) - wrapped_message.shouldnt_be_empty() - wrapped_message.should_be_longer_than('test message') - - (unwrapped_message, conf, qop) = gb.unwrap_aead(self.server_ctx, - wrapped_message) - conf.should_be_a(bool) - conf.should_be_true() - - qop.should_be_an_integer() - qop.should_be_at_least(0) - - unwrapped_message.should_be_a(bytes) - unwrapped_message.shouldnt_be_empty() - unwrapped_message.should_be(b'test message') + wrapped_message, conf = gb.wrap_aead(self.client_ctx, b"test message") + self.assertIsInstance(wrapped_message, bytes) + self.assertGreater(len(wrapped_message), len("test message")) + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + + unwrapped_message, conf, qop = gb.unwrap_aead(self.server_ctx, + wrapped_message) + self.assertIsInstance(unwrapped_message, bytes) + self.assertEqual(unwrapped_message, b"test message") + self.assertIsInstance(conf, bool) + self.assertTrue(conf) + self.assertIsInstance(qop, int) + self.assertGreaterEqual(qop, 0) @ktu.gssapi_extension_test('dce', 'DCE (IOV/AEAD)') def test_basic_aead_wrap_unwrap_bad_assoc_raises_error(self): assoc_data = b'some sig data' - (wrapped_message, conf) = gb.wrap_aead(self.client_ctx, - b'test message', assoc_data) - - conf.should_be_a(bool) - conf.should_be_true() - - wrapped_message.should_be_a(bytes) - wrapped_message.shouldnt_be_empty() - wrapped_message.should_be_longer_than('test message') + wrapped_message, conf = gb.wrap_aead(self.client_ctx, b"test message", + assoc_data) + self.assertIsInstance(wrapped_message, bytes) + self.assertGreater(len(wrapped_message), len("test message")) + self.assertIsInstance(conf, bool) + self.assertTrue(conf) - gb.unwrap_aead.should_raise(gb.BadMICError, self.server_ctx, - wrapped_message, b'some other sig data') + self.assertRaises(gb.BadMICError, gb.unwrap_aead, self.server_ctx, + wrapped_message, b'some other sig data') @ktu.gssapi_extension_test('iov_mic', 'IOV MIC') def test_get_mic_iov(self): @@ -1552,9 +1390,8 @@ def test_get_mic_iov(self): gb.IOVBufferType.mic_token, std_layout=False) gb.get_mic_iov(self.client_ctx, init_message) - - init_message[2].type.should_be(gb.IOVBufferType.mic_token) - init_message[2].value.shouldnt_be_empty() + self.assertEqual(init_message[2].type, gb.IOVBufferType.mic_token) + self.assertGreater(len(init_message[2].value), 0) @ktu.gssapi_extension_test('iov_mic', 'IOV MIC') def test_basic_verify_mic_iov(self): @@ -1563,13 +1400,11 @@ def test_basic_verify_mic_iov(self): gb.IOVBufferType.mic_token, std_layout=False) gb.get_mic_iov(self.client_ctx, init_message) - - init_message[2].type.should_be(gb.IOVBufferType.mic_token) - init_message[2].value.shouldnt_be_empty() + self.assertEqual(init_message[2].type, gb.IOVBufferType.mic_token) + self.assertGreater(len(init_message[2].value), 0) qop_used = gb.verify_mic_iov(self.server_ctx, init_message) - - qop_used.should_be_an_integer() + self.assertIsInstance(qop_used, int) @ktu.gssapi_extension_test('iov_mic', 'IOV MIC') def test_verify_mic_iov_bad_mic_raises_error(self): @@ -1579,8 +1414,8 @@ def test_verify_mic_iov_bad_mic_raises_error(self): std_layout=False) # test a bad MIC - gb.verify_mic_iov.should_raise(gb.GSSError, self.server_ctx, - init_message) + self.assertRaises(gb.GSSError, gb.verify_mic_iov, self.server_ctx, + init_message) @ktu.gssapi_extension_test('iov_mic', 'IOV MIC') def test_get_mic_iov_length(self): @@ -1590,9 +1425,8 @@ def test_get_mic_iov_length(self): auto_alloc=False) gb.get_mic_iov_length(self.client_ctx, init_message) - - init_message[2].type.should_be(gb.IOVBufferType.mic_token) - init_message[2].value.shouldnt_be_empty() + self.assertEqual(init_message[2].type, gb.IOVBufferType.mic_token) + self.assertGreater(len(init_message[2].value), 0) TEST_OIDS = {'SPNEGO': {'bytes': b'\053\006\001\005\005\002', @@ -1611,26 +1445,26 @@ class TestOIDTransforms(unittest.TestCase): def test_decode_from_bytes(self): for oid in TEST_OIDS.values(): o = gb.OID(elements=oid['bytes']) - text = repr(o) - text.should_be("".format(oid['string'])) + self.assertEqual(repr(o), f"") def test_encode_from_string(self): for oid in TEST_OIDS.values(): o = gb.OID.from_int_seq(oid['string']) - o.__bytes__().should_be(oid['bytes']) + self.assertEqual(o.__bytes__(), oid['bytes']) def test_encode_from_int_seq(self): for oid in TEST_OIDS.values(): int_seq = oid['string'].split('.') o = gb.OID.from_int_seq(int_seq) - o.__bytes__().should_be(oid['bytes']) + self.assertEqual(o.__bytes__(), oid['bytes']) def test_comparisons(self): krb5 = gb.OID.from_int_seq(TEST_OIDS['KRB5']['string']) krb5_other = gb.OID.from_int_seq(TEST_OIDS['KRB5']['string']) spnego = gb.OID.from_int_seq(TEST_OIDS['SPNEGO']['string']) - (krb5 == krb5_other).should_be(True) - (krb5 == spnego).should_be(False) - (krb5 != krb5_other).should_be(False) - (krb5 != spnego).should_be(True) + # Purpose here is to test comparisons themselves - don't simplify + self.assertTrue(krb5 == krb5_other) + self.assertFalse(krb5 == spnego) + self.assertFalse(krb5 != krb5_other) + self.assertTrue(krb5 != spnego) diff --git a/setup.py b/setup.py index 9d26fea..37bef14 100755 --- a/setup.py +++ b/setup.py @@ -37,8 +37,10 @@ def get_output(*args, **kwargs): # get the compile and link args +kc = "krb5-config" +posix = os.name != 'nt' link_args, compile_args = [ - shlex.split(os.environ[e]) if e in os.environ else None + shlex.split(os.environ[e], posix=posix) if e in os.environ else None for e in ['GSSAPI_LINKER_ARGS', 'GSSAPI_COMPILER_ARGS'] ] @@ -70,6 +72,26 @@ def get_output(*args, **kwargs): except ValueError: cygwinccompiler.get_msvcr = lambda *a, **kw: [] +if sys.platform.startswith("freebsd"): + # FreeBSD does $PATH backward, for our purposes. That is, the package + # manager's version of the software is in /usr/local, which is in PATH + # *after* the version in /usr. We prefer the package manager's version + # because the Heimdal in base is truly ancient, but this can be overridden + # - either in the "normal" fashion by putting something in PATH in front + # of it, or by removing /usr/local from PATH. + + bins = [] + for b in os.environ["PATH"].split(":"): + p = f"{b}/krb5-config" + if not os.path.exists(p): + continue + bins.append(p) + + if len(bins) > 1 and bins[0] == "/usr/bin/krb5-config" and \ + "/usr/local/bin/krb5-config" in bins: + kc = "/usr/local/bin/krb5-config" + print(f"Detected: {kc}") + if link_args is None: if osx_has_gss_framework: link_args = ['-framework', 'GSS'] @@ -84,7 +106,7 @@ def get_output(*args, **kwargs): elif os.environ.get('MINGW_PREFIX'): link_args = ['-lgss'] else: - link_args = shlex.split(get_output('krb5-config --libs gssapi')) + link_args = shlex.split(get_output(f"{kc} --libs gssapi")) if compile_args is None: if osx_has_gss_framework: @@ -97,14 +119,14 @@ def get_output(*args, **kwargs): elif os.environ.get('MINGW_PREFIX'): compile_args = ['-fPIC'] else: - compile_args = shlex.split(get_output('krb5-config --cflags gssapi')) + compile_args = shlex.split(get_output(f"{kc} --cflags gssapi")) # add in the extra workarounds for different include structures if winkrb_path: prefix = winkrb_path else: try: - prefix = get_output('krb5-config gssapi --prefix') + prefix = get_output(f"{kc} gssapi --prefix") except Exception: print("WARNING: couldn't find krb5-config; assuming prefix of %s" % str(sys.prefix)) @@ -140,6 +162,14 @@ def get_output(*args, **kwargs): main_path = "" if main_lib is None and osx_has_gss_framework: main_lib = ctypes.util.find_library('GSS') + if not main_lib: + # https://github.com/pythongssapi/python-gssapi/issues/235 + # CPython has a bug on Big Sur where find_library will fail to + # find the library path of shared frameworks. This has been fixed + # in newer versions but we have this fallback in case an older + # version is still in use. This fix is expected to be included in + # 3.8.8 and 3.9.2. + main_lib = '/System/Library/Frameworks/GSS.framework/GSS' elif os.environ.get('MINGW_PREFIX'): main_lib = os.environ.get('MINGW_PREFIX')+'/bin/libgss-3.dll' elif sys.platform == 'msys': @@ -164,11 +194,16 @@ def get_output(*args, **kwargs): # To support Heimdal on Debian, read the linker path. if opt.startswith('-Wl,/'): main_path = opt[4:] + "/" + if main_path == "": + for d in library_dirs: + if os.path.exists(os.path.join(d, main_lib)): + main_path = d + break if main_lib is None: raise Exception("Could not find main GSSAPI shared library. Please " "try setting GSSAPI_MAIN_LIB yourself or setting " - "ENABLE_SUPPORT_DETECTION to 'false'") + "GSSAPI_SUPPORT_DETECT to 'false'") GSSAPI_LIB = ctypes.CDLL(os.path.join(main_path, main_lib)) @@ -288,28 +323,28 @@ def gssapi_modules(lst): install_requires = [ 'decorator', - 'six >= 1.4.0' ] -if sys.version_info < (3, 4): - install_requires.append('enum34') setup( name='gssapi', - version='1.6.1', + version='1.6.12', author='The Python GSSAPI Team', - author_email='sross@redhat.com', + author_email='rharwood@redhat.com', packages=['gssapi', 'gssapi.raw', 'gssapi.raw._enum_extensions', 'gssapi.tests'], description='Python GSSAPI Wrapper', long_description=long_desc, license='LICENSE.txt', url="https://github.com/pythongssapi/python-gssapi", + python_requires=">=3.6.*", classifiers=[ - 'Development Status :: 4 - Beta', + 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', 'Intended Audience :: Developers', 'License :: OSI Approved :: ISC License (ISCL)', 'Programming Language :: Python :: Implementation :: CPython', diff --git a/test-requirements.txt b/test-requirements.txt index c4cc5df..7d800c7 100644 --- a/test-requirements.txt +++ b/test-requirements.txt @@ -1,7 +1,6 @@ flake8 nose parameterized -shouldbe -six Cython k5test +decorator diff --git a/tox.ini b/tox.ini index cb9a385..30da1a0 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py27,py33,py34,py35,py36,py37 +envlist = py36,py37,py38 [testenv] whitelist_externals=bash