From 2cec037b1cf5c72de89aee003ecd8a3891d1292e Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Sat, 25 Feb 2023 08:12:29 -0500 Subject: [PATCH 1/5] chore: adding some helper files, furo Signed-off-by: Henry Schreiner --- .gitignore | 4 +++ .pre-commit-config.yaml | 44 ++++++++++++++++++++++++++++ noxfile.py | 25 ++++++++++++++++ sphinx_presentation/requirements.txt | 1 + sphinx_presentation/source/conf.py | 6 ++-- 5 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 .pre-commit-config.yaml create mode 100644 noxfile.py diff --git a/.gitignore b/.gitignore index 525f2e2..fe450ff 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ .DS_Store .idea sphinx_presentation/build/ +sphinx_presentation/source/_build/ + +__pycache__ +.nox diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..f823f3d --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,44 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: "v4.4.0" + hooks: + - id: check-added-large-files + - id: check-case-conflict + - id: check-merge-conflict + - id: check-symlinks + - id: check-yaml + - id: debug-statements + - id: end-of-file-fixer + - id: mixed-line-ending + - id: name-tests-test + args: ["--pytest-test-first"] + - id: requirements-txt-fixer + - id: trailing-whitespace + + - repo: https://github.com/psf/black + rev: "23.1.0" + hooks: + - id: black + + - repo: https://github.com/asottile/blacken-docs + rev: "1.13.0" + hooks: + - id: blacken-docs + additional_dependencies: [black==23.1.0] + + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "v0.0.252" + hooks: + - id: ruff + + - repo: https://github.com/codespell-project/codespell + rev: "v2.2.2" + hooks: + - id: codespell + + - repo: https://github.com/pre-commit/pygrep-hooks + rev: "v1.10.0" + hooks: + - id: rst-backticks + - id: rst-directive-colons + - id: rst-inline-touching-normal diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 0000000..6a6fee8 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,25 @@ +from __future__ import annotations + +import nox + + +@nox.session(reuse_venv=True) +def docs(session: nox.Session) -> None: + """ + Build the docs. Pass "serve" to serve. + """ + + session.install("-r", "sphinx_presentation/requirements.txt") + session.chdir("sphinx_presentation/source") + + if "pdf" in session.posargs: + session.run("sphinx-build", "-M", "latexpdf", ".", "_build") + return + + session.run("sphinx-build", "-M", "html", ".", "_build") + + if "serve" in session.posargs: + session.log("Launching docs at http://localhost:8000/ - use Ctrl-C to quit") + session.run("python", "-m", "http.server", "8000", "-d", "_build/html") + elif session.posargs: + session.error("Unsupported argument to docs") diff --git a/sphinx_presentation/requirements.txt b/sphinx_presentation/requirements.txt index f4c15c3..8ffbf2a 100644 --- a/sphinx_presentation/requirements.txt +++ b/sphinx_presentation/requirements.txt @@ -1,2 +1,3 @@ +furo hieroglyph sphinx diff --git a/sphinx_presentation/source/conf.py b/sphinx_presentation/source/conf.py index b8ae3a4..0ef809f 100644 --- a/sphinx_presentation/source/conf.py +++ b/sphinx_presentation/source/conf.py @@ -60,7 +60,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -76,7 +76,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'alabaster' +html_theme = 'furo' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -157,4 +157,4 @@ ] -# -- Extension configuration ------------------------------------------------- \ No newline at end of file +# -- Extension configuration ------------------------------------------------- From f6fdf5e70695eecd1a2cded4e3db5f1a47162786 Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Sat, 25 Feb 2023 12:10:42 -0500 Subject: [PATCH 2/5] style: fix issues Signed-off-by: Henry Schreiner --- .pre-commit-config.yaml | 7 +-- CHANGES.rst | 8 +-- LICENSE | 16 +++--- README.md | 2 +- conda_build_recipes/03_copy_file/bld.bat | 2 +- .../04_python_in_build/bld.bat | 2 +- conda_build_recipes/05_test_python/bld.bat | 2 +- .../06_has_run_exports/meta.yaml | 2 +- .../capitalize/capitalize/cap_data.txt | 4 +- .../capitalize/capitalize/capital_mod.py | 10 ++-- .../capitalize/test/test_capital_mod.py | 33 +++++------ sphinx_presentation/Makefile | 2 +- sphinx_presentation/README.rst | 2 +- sphinx_presentation/slides2rst.py | 15 ++--- .../source/binaries_dependencies.rst | 16 +++--- sphinx_presentation/source/changes.rst | 2 +- sphinx_presentation/source/conda.rst | 2 +- sphinx_presentation/source/conf.py | 55 +++++++++++-------- sphinx_presentation/source/index.rst | 3 +- sphinx_presentation/source/schedule.rst | 3 +- sphinx_presentation/source/setup_py.rst | 4 +- sphinx_presentation/source/uploading_pypi.rst | 1 - test_recipes/hello-cython/hello/__main__.py | 2 +- test_recipes/hello-cython/hello/_hello.pyx | 1 - test_recipes/hello-cython/setup.py | 6 +- 25 files changed, 96 insertions(+), 106 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f823f3d..dc81e8c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,6 +7,7 @@ repos: - id: check-merge-conflict - id: check-symlinks - id: check-yaml + exclude: test_recipes/bitarray/meta.yaml - id: debug-statements - id: end-of-file-fixer - id: mixed-line-ending @@ -20,12 +21,6 @@ repos: hooks: - id: black - - repo: https://github.com/asottile/blacken-docs - rev: "1.13.0" - hooks: - - id: blacken-docs - additional_dependencies: [black==23.1.0] - - repo: https://github.com/charliermarsh/ruff-pre-commit rev: "v0.0.252" hooks: diff --git a/CHANGES.rst b/CHANGES.rst index 171177e..29cba9c 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -18,9 +18,9 @@ The sections are ordered from most recent to the oldest. Better handling data file in :ref:`setup_py_exercise_small_example_package` section ----------------------------------------------------------------------------------- -* Put package data in `data` directory. +* Put package data in ``data`` directory. * Reflect this change in the code. -* Add `package_data` to setup function. +* Add ``package_data`` to setup function. 2018-07 @@ -38,6 +38,4 @@ Making a Python Package Building and Uploading to PyPI ------------------------------ -* Update `Installing a wheel` tutorial adding :ref:`Install a package from TestPyPI ` section. - - +* Update ``Installing a wheel`` tutorial adding :ref:`Install a package from TestPyPI ` section. diff --git a/LICENSE b/LICENSE index abe0eac..b31879a 100644 --- a/LICENSE +++ b/LICENSE @@ -1,13 +1,13 @@ This repository contain the materials for a tutorial. - + The code is licences under the Apache licence, version 2.0. - + Text and other non-code written materials are licenced under the Creative Commons Attribution-ShareAlike 4.0 International licence. - + Text of both licenses below: - - + + Apache License ============== Version 2.0, January 2004 @@ -531,8 +531,8 @@ Section 8 – Interpretation. --------------------------- a. For the avoidance of doubt, this Public License does not, and - shall not be interpreted to, reduce, limit, restrict, or impose - conditions on any use of the Licensed Material that could lawfully + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully be mad without permission under this Public License. b. To the extent possible, if any provision of this Public License is @@ -548,5 +548,5 @@ Section 8 – Interpretation. d. Nothing in this Public License constitutes or may be interpreted as a limitation upon, or waiver of, any privileges and immunities - that apply to the Licensor or You, including from the legal + that apply to the Licensor or You, including from the legal processes of any jurisdiction or authority. diff --git a/README.md b/README.md index 9a09bba..aff0479 100644 --- a/README.md +++ b/README.md @@ -71,7 +71,7 @@ anaconda upload /Users/msarahan/miniconda3/conda-bld/osx-64/bitarray-0.8.1-py36h anaconda_upload is not set. Not uploading wheels: [] #################################################################################### -Resoource usage summary: +Resource usage summary: Total time: 0:00:36.9 CPU usage: sys=0:00:00.2, user=0:00:00.2 diff --git a/conda_build_recipes/03_copy_file/bld.bat b/conda_build_recipes/03_copy_file/bld.bat index 576c301..f91ba5a 100644 --- a/conda_build_recipes/03_copy_file/bld.bat +++ b/conda_build_recipes/03_copy_file/bld.bat @@ -1 +1 @@ -COPY somefile.txt %PREFIX% \ No newline at end of file +COPY somefile.txt %PREFIX% diff --git a/conda_build_recipes/04_python_in_build/bld.bat b/conda_build_recipes/04_python_in_build/bld.bat index d96252f..48057d2 100644 --- a/conda_build_recipes/04_python_in_build/bld.bat +++ b/conda_build_recipes/04_python_in_build/bld.bat @@ -1 +1 @@ -COPY somefile.py %SP_DIR% \ No newline at end of file +COPY somefile.py %SP_DIR% diff --git a/conda_build_recipes/05_test_python/bld.bat b/conda_build_recipes/05_test_python/bld.bat index d96252f..48057d2 100644 --- a/conda_build_recipes/05_test_python/bld.bat +++ b/conda_build_recipes/05_test_python/bld.bat @@ -1 +1 @@ -COPY somefile.py %SP_DIR% \ No newline at end of file +COPY somefile.py %SP_DIR% diff --git a/conda_build_recipes/06_has_run_exports/meta.yaml b/conda_build_recipes/06_has_run_exports/meta.yaml index eb09ba4..8e0a856 100644 --- a/conda_build_recipes/06_has_run_exports/meta.yaml +++ b/conda_build_recipes/06_has_run_exports/meta.yaml @@ -4,4 +4,4 @@ package: build: run_exports: - - {{ pin_subpackage("has_run_exports") }} + - '{{ pin_subpackage("has_run_exports") }}' diff --git a/setup_example/capitalize/capitalize/cap_data.txt b/setup_example/capitalize/capitalize/cap_data.txt index 10bbc59..d2e8e48 100644 --- a/setup_example/capitalize/capitalize/cap_data.txt +++ b/setup_example/capitalize/capitalize/cap_data.txt @@ -1,7 +1,7 @@ # list of words that don't get capitalized is -or +or a the it -to \ No newline at end of file +to diff --git a/setup_example/capitalize/capitalize/capital_mod.py b/setup_example/capitalize/capitalize/capital_mod.py index 66ddd6d..ef635fe 100644 --- a/setup_example/capitalize/capitalize/capital_mod.py +++ b/setup_example/capitalize/capitalize/capital_mod.py @@ -14,13 +14,13 @@ def load_special_words(data_file_name, words=None): from the data file in the package data file is a text file with one work per line - the # charactor is a comment -- everything after it will be ignored + the # character is a comment -- everything after it will be ignored """ words = set() if words is None else words with open(data_file_name) as data_file: for line in data_file: - word = line.split('#')[0].strip() + word = line.split("#")[0].strip() if word: words.add(word.lower()) return words @@ -45,7 +45,7 @@ def capitalize_line(instr, special_words=special_words): :type instr: string :param special_words: set of words that should not be capitalized - defaults to the words in the encosed data file + defaults to the words in the enclosed data file :type special_words: set of str :returns: a capitalized version of instr @@ -83,8 +83,8 @@ def capitalize(infilename, outfilename): :raises: IOError if infilename doesn't exist. """ - infile = open(infilename, 'U') - outfile = open(outfilename, 'w') + infile = open(infilename, "U") + outfile = open(outfilename, "w") for line in infile: outfile.write(capitalize_line(line)) diff --git a/setup_example/capitalize/capitalize/test/test_capital_mod.py b/setup_example/capitalize/capitalize/test/test_capital_mod.py index c6f16c7..f97a3c8 100644 --- a/setup_example/capitalize/capitalize/test/test_capital_mod.py +++ b/setup_example/capitalize/capitalize/test/test_capital_mod.py @@ -13,8 +13,9 @@ import capital_mod + # fixture that creates and removes a file with special words in it -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def special_words_path(): """ fixture to generate a special words file to test reading @@ -25,35 +26,37 @@ def special_words_path(): # A couple words for the file words = ["in", "As", "the"] temp_path = Path("special_words_file") - with open(temp_path, 'w') as outfile: + with open(temp_path, "w") as outfile: for word in words: outfile.write(word + "\n") # test comments, too: outfile.write("# random stuff") outfile.write(" in # comment after a word\n") - # the file wil be created and filled, then the path passed on + # the file will be created and filled, then the path passed on yield temp_path # at "teardown", the file will be removed os.remove(temp_path) # fixture that creates and removes a file with some test lines in it. -@pytest.fixture(scope='module') +@pytest.fixture(scope="module") def test_file_path(): """ Fixture to generate a file with some sample data in it """ # A couple words for the file temp_path = Path("input_test_file.txt") - with open(temp_path, 'w') as outfile: - outfile.write("""This is a really simple Text file. + with open(temp_path, "w") as outfile: + outfile.write( + """This is a really simple Text file. It is here so that I can test the capitalize script. And that's only there to try out packaging. So there. -""") - # the file wil be created and filled, then the path passed on +""" + ) + # the file will be created and filled, then the path passed on yield temp_path # at "teardown", the file will be removed os.remove(temp_path) @@ -70,15 +73,16 @@ def test_load_special_words(special_words_path): def test_capitalize_line(): - special = {'is', 'a', 'to'} - line = "this is a Line to capitalize" + special = {"is", "a", "to"} + line = "this is a Line to capitalize" expected = "This is a Line to Capitalize" result = capital_mod.capitalize_line(line, special_words=special) - assert result == expected + assert result == expected + def test_capitalize(test_file_path): - """ test an actual file """ + """test an actual file""" p = test_file_path new_file_path = (p.parent / (p.stem + "_cap")).with_suffix(p.suffix) @@ -92,7 +96,4 @@ def test_capitalize(test_file_path): And That's Only There to Try Out Packaging. So There.""" - assert contents.strip() == expected - - - + assert contents.strip() == expected diff --git a/sphinx_presentation/Makefile b/sphinx_presentation/Makefile index f122f0d..94e8cda 100644 --- a/sphinx_presentation/Makefile +++ b/sphinx_presentation/Makefile @@ -17,4 +17,4 @@ help: # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/sphinx_presentation/README.rst b/sphinx_presentation/README.rst index 75d23ff..85ef5ff 100644 --- a/sphinx_presentation/README.rst +++ b/sphinx_presentation/README.rst @@ -35,7 +35,7 @@ Thanks to the integration of this GitHub project with readthedocs, this happens automatically after the ``master`` branch is updated. Historically, the website was only updated if a contributor was locally generating -the associated web pages and commiting them on the `gh-pages` branch. +the associated web pages and committing them on the ``gh-pages`` branch. Building the slides diff --git a/sphinx_presentation/slides2rst.py b/sphinx_presentation/slides2rst.py index c0373f8..4580608 100755 --- a/sphinx_presentation/slides2rst.py +++ b/sphinx_presentation/slides2rst.py @@ -17,7 +17,7 @@ rst.append("") # find end of title page: line = inlines.pop(0) -while line.strip() != '1': +while line.strip() != "1": rst.append(line) line = inlines.pop(0) @@ -44,22 +44,17 @@ rst.append("-" * len(header)) # content is the stuff above it - for line in inlines[:start_ind - 1]: + for line in inlines[: start_ind - 1]: rst.append(line) # footer is the stuff in between rst.append("") - for line in inlines[start_ind + 1:end_ind]: + for line in inlines[start_ind + 1 : end_ind]: rst.append(line) # clear it all out: - del inlines[:end_ind + 1] + del inlines[: end_ind + 1] # and clear out any empty lines while inlines and (not inlines[0].strip()): del inlines[0] - - - - -open(outfilename, 'w').write("\n".join(rst)) - +open(outfilename, "w").write("\n".join(rst)) diff --git a/sphinx_presentation/source/binaries_dependencies.rst b/sphinx_presentation/source/binaries_dependencies.rst index 9287335..3f385f1 100644 --- a/sphinx_presentation/source/binaries_dependencies.rst +++ b/sphinx_presentation/source/binaries_dependencies.rst @@ -16,7 +16,7 @@ In this section we will ... *headers, libraries, compilers, linkers, build systems, system introspection tools, package managers* * Understand basic requirements for binary compatibility: a) **C-runtime library - compatibility** and b) **shared library compatibilty** + compatibility** and b) **shared library compatibility** * Understand **scikit-build**'s role in coordinating components of the binary build process and **conda**'s role in resolving dependencies and creating compatible platform binaries @@ -39,7 +39,7 @@ Tutorial Within this the context, we explain how *scikit-build* and *conda-build* make life easier when we want to satisfy these requirements. - Finally, run an exercise where we build a native Python wth native binaries + Finally, run an exercise where we build a native Python with native binaries package and analyze the different stages of the build process. Motivation @@ -55,7 +55,7 @@ domains because of the: In order to achieve **high performance**, programs can: -1. **Minimized the number of operations** on the CPU required to acheive a certain +1. **Minimized the number of operations** on the CPU required to achieve a certain task 2. **Execute in parallel** to leverage multi-core, many-core, and GPGPU system architectures @@ -179,7 +179,7 @@ available for the build.* build host artifacts These are files required on the *host* system performing the build. This - includes **header files**, `*.h` files, which define the C program **symbols**, + includes **header files**, ``*.h`` files, which define the C program **symbols**, i.e. variable and function names, for the native binary with which we want to integrate. This also usually includes the native binaries themselves, i.e. the **executable or shared library**. An important exception to this rule @@ -248,15 +248,15 @@ packages. .. nextslide:: The C-runtime on macOS is determined by a build time option, the *osx -deployment target*, which defines the minmum version of macOS to support, e.g. -`10.9`. +deployment target*, which defines the minimum version of macOS to support, e.g. +``10.9``. A macOS system comes with support for running building binaries for its version of OSX and older versions of OSX. The XCode toolchain comes with SDK's that support multiple target versions of OSX. -When building a wheel, this can be specified with `--plat-name`:: +When building a wheel, this can be specified with ``--plat-name``:: python setup.py bdist_wheel --plat-name macosx-10.6-x86_64 @@ -303,7 +303,7 @@ build requirements. .. nextslide:: The **scikit-build** package is fundamentally just glue between -the `setuptools` Python module and `CMake `_. +the ``setuptools`` Python module and `CMake `_. .. nextslide:: diff --git a/sphinx_presentation/source/changes.rst b/sphinx_presentation/source/changes.rst index 1cb32f6..d76c92b 100644 --- a/sphinx_presentation/source/changes.rst +++ b/sphinx_presentation/source/changes.rst @@ -1 +1 @@ -.. include:: ../../CHANGES.rst \ No newline at end of file +.. include:: ../../CHANGES.rst diff --git a/sphinx_presentation/source/conda.rst b/sphinx_presentation/source/conda.rst index 357638d..b5be7a9 100644 --- a/sphinx_presentation/source/conda.rst +++ b/sphinx_presentation/source/conda.rst @@ -47,7 +47,7 @@ Introducing conda-build https://github.com/conda/conda-build -Excercise: let’s use ``conda-build`` +Exercise: let’s use ``conda-build`` ------------------------------------ .. code-block:: bash diff --git a/sphinx_presentation/source/conf.py b/sphinx_presentation/source/conf.py index 0ef809f..7776556 100644 --- a/sphinx_presentation/source/conf.py +++ b/sphinx_presentation/source/conf.py @@ -19,14 +19,14 @@ # -- Project information ----------------------------------------------------- -project = 'The Joy of Packaging' -copyright = '2018, Assorted' -author = 'Assorted' +project = "The Joy of Packaging" +copyright = "2018, Assorted" +author = "Assorted" # The short X.Y version -version = '' +version = "" # The full version, including alpha/beta/rc tags -release = '0.1' +release = "0.1" # -- General configuration --------------------------------------------------- @@ -39,28 +39,28 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - 'sphinx.ext.githubpages', - 'hieroglyph', + "sphinx.ext.githubpages", + "hieroglyph", ] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = 'en' +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -68,7 +68,7 @@ exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # -- Options for HTML output ------------------------------------------------- @@ -76,7 +76,7 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -html_theme = 'furo' +html_theme = "furo" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the @@ -103,7 +103,7 @@ # -- Options for HTMLHelp output --------------------------------------------- # Output file base name for HTML help builder. -htmlhelp_basename = 'TheJoyofPackagingdoc' +htmlhelp_basename = "TheJoyofPackagingdoc" # -- Options for LaTeX output ------------------------------------------------ @@ -112,15 +112,12 @@ # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. # # 'preamble': '', - # Latex figure (float) alignment # # 'figure_align': 'htbp', @@ -130,8 +127,13 @@ # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - (master_doc, 'TheJoyofPackaging.tex', 'The Joy of Packaging Documentation', - 'Assorted', 'manual'), + ( + master_doc, + "TheJoyofPackaging.tex", + "The Joy of Packaging Documentation", + "Assorted", + "manual", + ), ] @@ -140,8 +142,7 @@ # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - (master_doc, 'thejoyofpackaging', 'The Joy of Packaging Documentation', - [author], 1) + (master_doc, "thejoyofpackaging", "The Joy of Packaging Documentation", [author], 1) ] @@ -151,9 +152,15 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - (master_doc, 'TheJoyofPackaging', 'The Joy of Packaging Documentation', - author, 'TheJoyofPackaging', 'One line description of project.', - 'Miscellaneous'), + ( + master_doc, + "TheJoyofPackaging", + "The Joy of Packaging Documentation", + author, + "TheJoyofPackaging", + "One line description of project.", + "Miscellaneous", + ), ] diff --git a/sphinx_presentation/source/index.rst b/sphinx_presentation/source/index.rst index bb6fe23..f7d3070 100644 --- a/sphinx_presentation/source/index.rst +++ b/sphinx_presentation/source/index.rst @@ -1,7 +1,7 @@ .. The Joy of Packaging documentation master file, created by sphinx-quickstart on Fri Jul 6 14:55:10 2018. You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. + contain the root ``toctree`` directive. *************************** The Sheer Joy of Packaging! @@ -85,4 +85,3 @@ Jonathan Helmus: .. * :ref:`genindex` .. * :ref:`modindex` .. * :ref:`search` - diff --git a/sphinx_presentation/source/schedule.rst b/sphinx_presentation/source/schedule.rst index 9d9f26d..9323a50 100644 --- a/sphinx_presentation/source/schedule.rst +++ b/sphinx_presentation/source/schedule.rst @@ -105,7 +105,7 @@ Clone that repo now -- so you can follow along. *headers, libraries, compilers, linkers, build systems, system introspection tools, package managers* * Basic requirements for binary compatibility: a) **C-runtime library - compatibility** and b) **shared library compatibilty** + compatibility** and b) **shared library compatibility** * Joyous tools: **scikit-build**'s role in coordinating components of the binary build process and **conda**'s role in resolving dependencies and creating compatible platform binaries @@ -171,4 +171,3 @@ CI service overview & Conda-forge -- what are the pieces and how do they fit tog * Example to go from the conda-skeleton to a PR on staged-recipes * Comment on some special cases: cython extensions, non-python pkgs, the use of the CIs, etc. * Exercise: put a package on staged-recipes - diff --git a/sphinx_presentation/source/setup_py.rst b/sphinx_presentation/source/setup_py.rst index d75bb6d..4986d85 100644 --- a/sphinx_presentation/source/setup_py.rst +++ b/sphinx_presentation/source/setup_py.rst @@ -708,7 +708,7 @@ And try it:: FileNotFoundError: [Errno 2] No such file or directory: '.../capitalize/cap_data.txt' Our script cannot find the data file. We changed it's location but not the path -in the `capital_mod.py`. +in the ``capital_mod.py``. Let's fix this. On line 32 replace:: @@ -872,5 +872,3 @@ Break time! ----------- Up next: producing redistributable artifacts - - diff --git a/sphinx_presentation/source/uploading_pypi.rst b/sphinx_presentation/source/uploading_pypi.rst index b4b6407..6f65cb2 100644 --- a/sphinx_presentation/source/uploading_pypi.rst +++ b/sphinx_presentation/source/uploading_pypi.rst @@ -485,4 +485,3 @@ And one written by the author of the opinionated blog post above: https://github.com/ionelmc/cookiecutter-pylibrary Either are great starting points. - diff --git a/test_recipes/hello-cython/hello/__main__.py b/test_recipes/hello-cython/hello/__main__.py index 59019cf..c229bec 100644 --- a/test_recipes/hello-cython/hello/__main__.py +++ b/test_recipes/hello-cython/hello/__main__.py @@ -1,4 +1,4 @@ - if __name__ == "__main__": from . import _hello as hello + hello.hello("World") diff --git a/test_recipes/hello-cython/hello/_hello.pyx b/test_recipes/hello-cython/hello/_hello.pyx index 1ae0086..4c237b7 100644 --- a/test_recipes/hello-cython/hello/_hello.pyx +++ b/test_recipes/hello-cython/hello/_hello.pyx @@ -6,4 +6,3 @@ cpdef void hello(str strArg): cpdef long size(): "Returns elevation of Nevado Sajama." return 21463L - diff --git a/test_recipes/hello-cython/setup.py b/test_recipes/hello-cython/setup.py index 1618a2d..8e7d049 100644 --- a/test_recipes/hello-cython/setup.py +++ b/test_recipes/hello-cython/setup.py @@ -4,8 +4,8 @@ name="hello-cython", version="1.2.3", description="a minimal example package (cython version)", - author='The scikit-build team', + author="The scikit-build team", license="MIT", - packages=['hello_cython'], - package_dir={'hello_cython': 'hello'}, + packages=["hello_cython"], + package_dir={"hello_cython": "hello"}, ) From 08a2972ef0581dd5402bc487e1395b2b7e7db2df Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Mon, 27 Feb 2023 16:19:10 -0500 Subject: [PATCH 3/5] feat: Content for 2023 Signed-off-by: Henry Schreiner --- sphinx_presentation/requirements.txt | 2 + sphinx_presentation/source/2_env.md | 290 +++++ sphinx_presentation/source/3_pyproject.md | 526 +++++++++ sphinx_presentation/source/6_ci.md | 151 +++ .../source/binaries_dependencies.rst | 428 ------- sphinx_presentation/source/changes.md | 38 + sphinx_presentation/source/changes.rst | 1 - sphinx_presentation/source/conda.rst | 1005 ----------------- sphinx_presentation/source/conf.py | 17 +- sphinx_presentation/source/index.md | 43 + sphinx_presentation/source/index.rst | 87 -- sphinx_presentation/source/overview.rst | 129 --- sphinx_presentation/source/schedule.md | 82 ++ sphinx_presentation/source/schedule.rst | 173 --- sphinx_presentation/source/setup_py.rst | 874 -------------- sphinx_presentation/source/uploading_pypi.rst | 487 -------- 16 files changed, 1143 insertions(+), 3190 deletions(-) create mode 100644 sphinx_presentation/source/2_env.md create mode 100644 sphinx_presentation/source/3_pyproject.md create mode 100644 sphinx_presentation/source/6_ci.md delete mode 100644 sphinx_presentation/source/binaries_dependencies.rst create mode 100644 sphinx_presentation/source/changes.md delete mode 100644 sphinx_presentation/source/changes.rst delete mode 100644 sphinx_presentation/source/conda.rst create mode 100644 sphinx_presentation/source/index.md delete mode 100644 sphinx_presentation/source/index.rst delete mode 100644 sphinx_presentation/source/overview.rst create mode 100644 sphinx_presentation/source/schedule.md delete mode 100644 sphinx_presentation/source/schedule.rst delete mode 100644 sphinx_presentation/source/setup_py.rst delete mode 100644 sphinx_presentation/source/uploading_pypi.rst diff --git a/sphinx_presentation/requirements.txt b/sphinx_presentation/requirements.txt index 8ffbf2a..debaf93 100644 --- a/sphinx_presentation/requirements.txt +++ b/sphinx_presentation/requirements.txt @@ -1,3 +1,5 @@ furo hieroglyph +myst-docutils sphinx +sphinx-copybutton diff --git a/sphinx_presentation/source/2_env.md b/sphinx_presentation/source/2_env.md new file mode 100644 index 0000000..3ba742f --- /dev/null +++ b/sphinx_presentation/source/2_env.md @@ -0,0 +1,290 @@ +# Environments and task runners + +You will see two _very_ common recommendations when installing a package: + +```console +$ pip install # Use only in virtual environment! +$ pip install --user # Almost never use +``` + +Don't use them unless you know exactly what you are doing! The first one will +try to install globally, and if you don't have permission, will install to your +user site packages. In global site packages, you can get conflicting versions +of libraries, you can't tell what you've installed for what, packages can +update and break your system; it's a mess. And user site packages are worse, +because all installs of Python on your computer share it, so you might override +and break things you didn't intend to. And with pip's new smart solver, +updating packages inside a global environment can take many minutes and produce +unexpectedly solves that are technically "correct" but don't work because it +backsolved conflicts to before issues were discovered. + +There is a solution: virtual environments (libraries) or pipx (applications). + +There are likely a _few_ libraries (ideally just `pipx`) that you just have to +install globally. Go ahead, but be careful (and always use your system package +manager instead if you can, like [`brew` on macOS](https://brew.sh) or the +Windows ones -- Linux package managers tend to be too old to use for Python libraries). + + +## Virtual Environments + +:::{note} +The following uses the standard library `venv` module. The `virtualenv` +module can be installed from PyPI, and works identically, though is a bit +faster and provides newer pip by default. +::: + +Python 3 comes with the `venv` module built-in, which supports making virtual environments. +To make one, you call the module with + + +```console +$ python3 -m venv .venv +``` + +This creates links to Python and pip in `.venv/bin`, and creates a +site-packages directory at `.venv/lib`. You can just use `.venv/bin/python` if +you want, but many users prefer to source the activation script: + +```console +$ . .venv/bin/activate +``` + +(Shell specific, but there are activation scripts for all common shells here). +Now `.venv/bin` has been added to your PATH, and usually your shell's prompt +will be modified to indicate you are "in" a virtual environment. You can now +use `python`, `pip`, and anything you install into the virtualenv without +having to prefix it with `.venv/bin/`. + +:::{attention} +Check the version of pip installed! If it's old, you might want to run +`pip install -U pip` or, for modern versions of Python, you can add +`--upgrade-deps` to the venv creation line. +::: + +To "leave" the virtual environment, you +undo those changes by running the deactivate function the activation added to +your shell: + +```bash +deactivate +``` + +:::{admonition} What about conda? + +The same concerns apply to Conda. You should avoid installing things to the +`base` environment, and instead make environments and use those above. Quick tips: + +```console +$ conda config --set auto_activate_base false # turn off the default environment +$ conda env create -n some_name # or use paths with `-p` +$ conda activate some_name +$ conda deactivate +``` +::: + + +## Pipx + +There are many Python packages that provide a command line interface and are +not really intended to be imported (`pip`, for example, should not be +imported). It is really inconvenient to have to set up venvs for every command +line tool you want to install, however. `pipx`, from the makers of `pip`, +solves this problem for you. If you `pipx install` a package, it will be +created inside a new virtual environment, and just the executable scripts will +be exposed in your regular shell. + +Pipx also has a `pipx run ` command, which will download a package and +run a script of the same name, and will cache the temporary environment for a +week. This means you have all of PyPI at your fingertips in one line on any +computer that has pipx installed! + +## Task runner (nox) + + +A task runner, like [make][] (fully general), [rake][] (Ruby general), +[invoke][] (Python general), [tox][] (Python packages), or [nox][] (Python +simi-general), is a tool that lets you specify a set of tasks via a common +interface. These can be a crutch, allowing poor packaging practices to be +employed behind a custom script, and they can hide what is actually happening. + +Nox has two strong points that help with this concern. First, it is very +explicit, and even prints what it is doing as it operates. Unlike the older +tox, it does not have any implicit assumptions built-in. Second, it has very +elegant built-in support for both virtual and Conda environments. This can +greatly reduce new contributor friction with your codebase. + +A daily developer is not expected to use nox for simple tasks, like running +tests or linting. You should not rely on nox to make a task that should be made +simple and standard (like building a package) complicated. You are not expected +to use nox for linting on CI, or sometimes even for testing on CI, even if +those tasks are provided for users. Nox is a few seconds slower than running +directly in a custom environment - but for new users and rarely run tasks, it +is _much_ faster than explaining how to get setup or manually messing with +virtual environments. It is also highly reproducible, creating and destroying +the temporary environment each time by default. + +You should use nox to make it easy and simple for new contributors to run +things. You should use nox to make specialized developer tasks easy. You should +use nox to avoid making single-use virtual environments for docs and other +rarely run tasks. + +[nox]: https://nox.thea.codes +[tox]: https://tox.readthedocs.io +[invoke]: https://www.pyinvoke.org +[rake]: https://ruby.github.io/rake/ +[make]: https://www.gnu.org/software/make/ + +Since nox is an application, you should install it with `pipx`. If you use +Homebrew, you can install `nox` with that (Homebrew isolates Python apps it +distributes too, just like pipx). + +## Running nox + +If you see a `noxfile.py` in a repository, that means nox is supported. You can start +by checking to see what the different tasks (called `sessions` in nox) are provided +by the noxfile author. For example, if we do this on `packaging.python.org`'s repository: + +```console +$ nox -l # or --list-sessions +Sessions defined in /github/pypa/packaging.python.org/noxfile.py: + +- translation -> Build the gettext .pot files. +- build -> Make the website. +- preview -> Make and preview the website. +- linkcheck -> Check for broken links. + +sessions marked with * are selected, sessions marked with - are skipped. +``` + +You can see that there are several different sessions. You can run them with `-s`: + +```console +$ nox -s preview +``` + +Will build and start up a preview of the site. + +If you need to pass options to the session, you can separate nox options with +and the session options with `--`. + +## Writing a Noxfile + +For this example, we'll need a minimal test file for pytest to run. Let's make +this file in a local directory: + +```python +# test_nox.py + +def test_runs(): + assert True +``` + +Let's write our own noxfile. If you are familiar with pytest, this should look +familiar as well; it's intentionally rather close to pytest. We'll make a +minimal session that runs pytest: + +```python +# noxfile.py +import nox + +@nox.session() +def tests(session): + session.install("pytest") + session.run("pytest") +``` + +A noxfile is valid Python, so we import nox. The session decorator tells nox +that this function is going to be a session. By default, the name will be the +function name, the description will be the function docstring, it will run on +the current version of Python (the one nox is using), and it will make a +virtual environment each time the session runs, though all of this is +changeable via keyword arguments to session. + +The session function will be given a `nox.Session` object that has various +useful methods. `.install` will install things with pip, and `.run` will run a +command in a sesson. The `.run` command will print a warning if you use an +executable outside the virtual environment unless `external=True` is passed. +Errors will exit the session. + +Let's expand this a little: + + +```python +# noxfile.py +import nox + +@nox.session() +def tests(session: nox.Session) -> None: + """ + Run our tests. + """ + session.install("pytest") + session.run("pytest", *session.posargs) +``` + +This adds a type annotation to the session object, so that IDE's and type +checkers can help you write the code in the function. There's a docstring, +which will print out nice help text when a user lists the sessions. And we pass +through to pytest anything the user passes in via `session.posargs` + + +Let's try running it: + +```console +$ nox -s tests +nox > Running session tests +nox > Creating virtual environment (virtualenv) using python3.10 in .nox/tests +nox > python -m pip install pytest +nox > pytest +==================================== test session starts ==================================== +platform darwin -- Python 3.10.5, pytest-7.1.2, pluggy-1.0.0 +rootdir: /Users/henryschreiner/git/teaching/packaging +collected 1 item + +test_nox.py . [100%] + +===================================== 1 passed in 0.05s ===================================== +nox > Session tests was successful. +``` + + + + +Nox is really just doing the same thing we would do manually (and printing all +the steps except the exact details of creating the virtual environment). You can +see the virtual environment in `.nox/tests`! + + + +:::{admonition} Passing arguments through +Try passing `-v` to pytest. + + +
Solution + +```console +$ nox -s tests -- -v +``` + +
+ +::: + +:::{admonition} Virtual environments +How would you activate this environment? + + +
Solution + +```console +$ source .nox/tests/bin/activate +``` + +
+ +::: + +In general, packages you work on daily are worth fully setting up with virtual +environments, but if you are new to development or just occasionally +contributing to a package, nox is a huge help. diff --git a/sphinx_presentation/source/3_pyproject.md b/sphinx_presentation/source/3_pyproject.md new file mode 100644 index 0000000..0fc2462 --- /dev/null +++ b/sphinx_presentation/source/3_pyproject.md @@ -0,0 +1,526 @@ +# The pyproject.toml + +Much research software is initially developed by hacking away in an interactive +setting, such as in a [Jupyter Notebook](https://jupyter.org) or a Python shell. +However, at some point when you have a more-complicated workflow that you want +to repeat, and/or make available to others, it makes sense to package your +functions into modules and ultimately a software package that can be installed. +This lesson will walk you through that process. + +Consider the `rescale()` function written as an exercise in the Software +Carpentry [Programming with Python](https://swcarpentry.github.io/python-novice-inflammation/08-func/index.html) +lesson. + +First, as needed, create your virtual environment and install NumPy with + +```console +$ virtualenv .venv +$ source .venv/bin/activate +$ pip install numpy +``` + +Then, in a Python shell or Jupyter Notebook, declare the function: + +```python +import numpy as np + +def rescale(input_array): + """Rescales an array from 0 to 1. + + Takes an array as input, and returns a corresponding array scaled so that 0 + corresponds to the minimum and 1 to the maximum value of the input array. + """ + L = np.min(input_array) + H = np.max(input_array) + output_array = (input_array - L) / (H - L) + return output_array +``` + +and call the function: + +```python +>>> rescale(np.linspace(0, 100, 5)) +array([ 0. , 0.25, 0.5 , 0.75, 1. ]) +``` + +## Creating our package in six lines + +Let's create a Python package that contains this function. + +First, create a new directory for your software package, called `package`, and move into that: + +```console +$ mkdir package +$ cd package +``` + +You should immediately initialize an empty Git repository in this directory; if +you need a refresher on using Git for version control, check out the Software +Carpentry [Version Control with Git](https://swcarpentry.github.io/git-novice/) +lesson. (This lesson will not explicitly remind you to commit your work after +this point.) + +```console +$ git init +``` + +Next, we want to create the necessary directory structure for your package. +This includes: +- a `src` directory, which will contain another directory called `rescale` for the source files of your package itself; +- a `tests` directory, which will hold tests for your package and its modules/functions (this can also go inside the `rescale` directory, but we recommend keeping it at the top level so that your test suite is not installed along with the package itself); +- a `docs` directory, which will hold the files necessary for documenting your software package. + +```console +$ mkdir -p src/rescale tests docs +``` + +(The `-p` flag tells `mkdir` to create the `src` parent directory for `rescale`.) + +Putting the package directory and source code inside the `src` directory is not actually *required*; +instead, if you put the `` directory at the same level as `tests` and `docs` then you could actually import or call the package directory from that location. +However, this can cause several issues, such as running tests with the local version instead of the installed version. +In addition, this package structure matches that of compiled languages, and lets your package easily contain non-Python compiled code, if necessary. + +Inside `src/rescale`, create the files `__init__.py` and `rescale.py`: + +```console +$ touch src/rescale/__init__.py src/rescale/rescale.py +``` + +`__init__.py` is required to import this directory as a package, and should remain empty (for now). +`rescale.py` is the module inside this package that will contain the `rescale()` function; +copy the contents of that function into this file. (Don't forget the NumPy import!) + +The last element your package needs is a `pyproject.toml` file. Create this with + +```console +$ touch pyproject.toml +``` + +and then provide the minimally required metadata, which include information about the build system (hatchling) and the package itself (`name` and `version`): + +```toml +# contents of pyproject.toml +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "package" +version = "0.1.0" +``` + +The package name given here, "package," matches the directory `package` that contains our project's code. We've chosen 0.1.0 as the starting version for this package; you'll see more in a later episode about versioning, and how to specify this without manually writing it here. + +The only elements of your package truly **required** to install and import it are the `pyproject.toml`, `__init__.py`, and `rescale.py` files. +At this point, your package's file structure should look like this: + +```bash +. +├── docs +├── pyproject.toml +├── src +│ └── package +│ │ ├── __init__.py +│ │ └── rescale.py +└── tests +``` + +## Installing and using your package + +Now that your package has the necessary elements, you can install it into your virtual environment (which should already be active). From the top level of your project's directory, enter + +```bash +$ pip install -e . +``` + +The `-e` flag tells `pip` to install in editable mode, meaning that you can continue developing your package on your computer as you test it. + +Then, in a Python shell or Jupyter Notebook, import your package and call the (single) function: + +```python +>>> import numpy as np +>>> from package.rescale import rescale +>>> rescale(np.linspace(0, 100, 5)) +``` + +``` +array([0. , 0.25, 0.5 , 0.75, 1. ]) +``` +{: .output} + +This matches the output we expected based on our interactive testing above! 😅 + +## Your first test + +Now that we have installed our package and we have manually tested that it works, let's set up this situation as a test that can be automatically run using `nox` and `pytest`. + +In the `tests` directory, create the `test_rescale.py` file: + +```bash +touch tests/test_rescale.py +``` + +In this file, we need to import the package, and check that a call to the `rescale` function with our known input returns the expected output: +```python +# contents of tests/test_rescale.py +import numpy as np +from package.rescale import rescale + +def test_rescale(): + np.testing.assert_allclose( + rescale(np.linspace(0, 100, 5)), + np.array([0., 0.25, 0.5, 0.75, 1.0 ]), + ) +``` + +Next, take the `noxfile.py` you created in an earlier episode, and modify it to + - install `numpy`, necessary to run the package; + - install `pytest`, necessary to automatically find and run the test(s); + - install the package itself; and + - run the test(s) + +with: + +```python +# contents of noxfile.py +import nox + +@nox.session +def tests(session): + session.install('numpy', 'pytest') + session.install('.') + session.run('pytest') +``` + +Now, with the added test file and `noxfile.py`, your package's directory structure should look like: + +```bash +. +├── docs +├── noxfile.py +├── pyproject.toml +├── src +│ └── package +│ │ ├── __init__.py +│ │ └── rescale.py +└── tests + └── test_rescale.py +``` + +(You may also see some `__pycache__` directories, which contain compiled Python bytecode that was generated when calling your package.) + +Have `nox` run your tests. This should give you some information about what +`nox` is doing, and show output along the lines of + +```console +$ nox +nox > Running session tests +nox > Creating virtual environment (virtualenv) using python in .nox/tests +nox > python -m pip install numpy pytest +nox > python -m pip install . +nox > pytest +======================================================================= test session starts ================================================= +platform darwin -- Python 3.9.13, pytest-7.1.2, pluggy-1.0.0 +rootdir: /Users/niemeyek/Desktop/rescale +collected 1 item + +tests/test_rescale.py . [100%] + +======================================================================== 1 passed in 0.07s ================================================== +nox > Session tests was successful. +``` + +This tells us that the output of the test function matches the expected result, and therefore the test passes! 🎉 + +We now have a package that is installed, can be interacted with properly, and has a passing test. +Next, we'll look at other files that should be included with your package. + + +## Informational metadata + +We left the metadata in our `project.toml` quite minimal; we just had +a name and a version. There are quite a few other fields that can really help +your package on PyPI, however. We'll look at them, split into categories: +Informational (like author, description) and Functional (like requirements). +There's also a special `dynamic` field that lets you list values that are going +to come from some other source. + +### Name + +Required. `.`, `-`, and `_` are all equivalent characters, and may be normalized +to `_`. Case is unimportant. This is the only field that must exist statically +in this table. + +```toml +name = "some_project" +``` + +### Version + +Required. Many backends provide ways to read this from a file or from a version +control system, so in those cases you would add `"version"` to the `dynamic` +field and leave it off here. + +```toml +version = "1.2.3" +version = "0.2.1b1" +``` + + +### Description + +A string with a short description of your project. + + +```toml +description = "This is a very short summary of a very cool project." +``` + + + +### Readme + +The name of the readme. Most of the time this is `README.md` or `README.rst`, +though there is a more complex mechanism if a user really desires to embed the +readme into your `pyproject.toml` file (you don't). + +```toml +readme = "README.md" +readme = "README.rst" +``` + +### Authors and maintainers + +This is a list of authors (or maintainers) as (usually inline) tables. A TOML table is very much like a Python dict. + +```python +authors = [ + {name="Me Myself", email="email@mail.com"}, + {name="You Yourself", email="email2@mail.com"}, +] +maintainers = [ + {name="It Itself", email="email3@mail.com"}, +] +``` + +Note that TOML supports two ways two write tables and two ways to write arrays, so you might see this in a different form, but it should be recognizable. + +### Keywords + +A list of keywords for the project. This is mostly used to improve searchability. + +```toml +keywords = ["example", "tutorial"] +``` + +### URLs + +A set of links to help users find various things for your code; some common ones +are `Homepage`, `Source Code`, `Documentation`, `Bug Tracker`, `Changelog`, +`Discussions`, and `Chat`. It's a free-form name, though many common names get +recognized and have nice icons on PyPI. + +```toml +# Inline form +urls.Homepage = "https://pypi.org" +urls."Source Code" = "https://pypi.org" + +# Sectional form +[project.urls] +Homepage = "https://pypi.org" +"Source Code" = "https://pypi.org" +``` + +### Classifiers + +This is a collection of classifiers as listed at +. You select the classifiers that match your +projects from there. Usually, this includes a "Development Status" to tell users +how stable you think your project is, and a few things like "Intended Audience" +and "Topic" to help with search engines. There are some important ones though: +the "License" (s) is used to indicate your license. You also can give an idea of +supported Python versions, Python implementations, and "Operating System"s as +well. If you have statically typed Python code, you can tell users about that, +too. + +```toml +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Information Analysis", + "Topic :: Scientific/Engineering :: Mathematics", + "Topic :: Scientific/Engineering :: Physics", + "Typing :: Typed", +] +``` + +### License (special mention) + +There also is a license field, but that was rather inadequate; it didn't support +multiple licenses, for example. Currently, it's best to indicate the license +with a Trove Classifier, and make sure your file is called `LICENSE*` so build +backends pick it up and include it in SDist and wheels. There's work on +standardizing an update to the format in the future. You can manually specify a +license file if you want: + +```toml +license = {file = "LICENSE"} +``` + +:::{admonition} Verify file contents +Always verify the contents of your SDist and Wheel(s) manually to make sure the license file is included. +```bash +tar -tvf dist/package-0.0.1.tar.gz +unzip -l dist/package-0.0.1-py3-none-any.whl +``` +::: + +## Functional metadata + +The remaining fields actually change the usage of the package. + +### Requires-Python + +This is an important and sometimes misunderstood field. It looks like this: + +```toml +requires-python = ">=3.7" +``` + +Pip will see if the current version of Python it's installing for passes this +expression. If it doesn't, pip will start checking older versions of the package +until it finds on that passes. This is how `pip install numpy` still works on +Python 3.7, even though NumPy has already dropped support for it. + +You need to make sure you always have this and it stays accurate, since you +can't edit metadata after releasing - you can only yank or delete release(s) and +try again. + +:::{admonition} Upper caps +Upper caps are generally discouraged in the Python ecosystem, but they are (even +more that usual) broken here, since this field was added to help users drop old +Python versions, and the idea it would be used to restrict newer versions was +not considered. The above procedures is not the right one for an upper cap! +Never upper cap this and instead use Trove Classifiers to tell users what +versions of Python your code was tested with. +::: + +### Dependencies + +Your package likely will need other packages from PyPI to run. + +```toml +dependencies = [ + "numpy>=1.18", +] +``` + +You can list dependencies here without minimum versions, but if you have a lot of users, you might want minimum versions; pip will only upgrade an installed package if it's no longer viable via your requirements. You can also use a variety of markers to specify operating system specific packages. + +:::{admonition} project.dependencies vs. build-system.requires + +What is the difference between `project.dependencies` vs. `build-system.requires`? +
Answer + +`build-system.requires` describes what your project needs to "build", that is, +produce an SDist or wheel. Installing a built wheel will _not_ install anything +from `build-system.requires`, in fact, the `pyproject.toml` is not even present +in the wheel! `project.dependencies`, on the other hand, is added to the wheel +metadata, and pip will install anything in that field if not already present +when installing your wheel. + +
+::: + +### Optional Dependencies + +Sometimes you have dependencies that are only needed some of the time. These can +be specified as optional dependencies. Unlike normal dependencies, these are +specified in a table, with the key being the option you pass to pip to install +it. For example: + +```toml +[project.optional-dependenices] +test = ["pytest>=6"] +check = ["flake8"] +plot = ["matplotlib"] +``` + +Now, you can run `pip install 'package[test,check]'`, and pip will install both +the required and optional dependencies `pytest` and `flake8`, but not +`matplotlib`. + +### Entry Points + +A Python package can have entry points. There are three kinds: command-line +entry points (`scripts`), graphical entry points (`gui-scripts`), and general +entry points (`entry-points`). As an example, let's say you have a `main()` +function inside `__main__.py` that you want to run to create a command +`project-cli`. You'd write: + +```toml +[project.scripts] +project-cli = "project.__main__:main" +``` + +The command line name is the table key, and the form of the entry point is +`package.module:function`. Now, when you install your package, you'll be able to +type `project-cli` on the command line and it will run your Python function. + +## Dynamic + +Any field from above that are specified by your build backend instead should be +listed in the special `dynamic` field. For example, if you want `hatchling` to +read `__version__.py` from `src/package/__init__.py`: + +```toml +[project] +name = "package" +dynamic = ["version"] + +[tool.hatch] +version.path = "src/package/__init__.py" +``` + +## All together + + +Now let's take our previous example and expand it with more information. Here's an example: + +```toml +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "package" +version = "0.0.1" +authors = [ + { name="Example Author", email="author@example.com" }, +] +description = "A small example package" +readme = "README.md" +license = { file="LICENSE" } +requires-python = ">=3.7" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", +] + +[project.urls] +"Homepage" = "https://github.com/pypa/sampleproject" +"Bug Tracker" = "https://github.com/pypa/sampleproject/issues" +``` diff --git a/sphinx_presentation/source/6_ci.md b/sphinx_presentation/source/6_ci.md new file mode 100644 index 0000000..5b2dba1 --- /dev/null +++ b/sphinx_presentation/source/6_ci.md @@ -0,0 +1,151 @@ +# Continuous Integration + +Continuous Integration (CI) allows you to perform tasks on a server +for various events on your repository (called triggers). For example, +you can use GitHub Actions (GHA) to run a test suite on every pull request. + +GHA is made up of workflows which consist of actions. Workflows are files +in the `.github/workflows` folder ending in `.yml`. + +## Triggers + +Workflows start with triggers, which define when things run. Here are three +triggers: + +```yaml +on: + pull_request: + push: + branches: + - main +``` + +This will run on all pull requests and pushes to main. You can also specify +specific branches for pull requests instead of running on all PRs (will run on +PRs targeting those branches only). + +## Running unit tests + +Let's set up a basic test. We will define a jobs dict, with a single job named +"tests". For all jobs, you need to select an image to run on - there are images +for Linux, macOS, and Windows. We'll use `ubuntu-latest`. + +```yaml +jobs: + tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Install package + run: python -m pip install -e .[test] + + - name: Test package + run: python -m pytest +``` + +This has five steps: + +1. Checkout the source (your repo). +2. Prepare Python 3.10 (will use a preinstalled version if possible, otherwise will download a binary). +3. Install your package with testing extras - this is just an image that will be removed at the end of the run, so "global" installs are fine. We also provide a nice name for the step. +4. Run your package's tests. + +By default, if any step fails, the run immediately quits and fails. + + +## Running in a matrix + +You can parametrize values, such as Python version or operating system. Do do +this, make a `strategy: matrix:` dict. Every key in that dict (except `include:` +and `exclude` should be set with a list, and a job will be generated with every +possible combination of values. You can access these values via the `matrix` +variable; they do not "automatically" change anything. + +For example: + +```yaml +example: + strategy: + matrix: + onetwothree: [1, 2, 3] + name: Job ${{ matrix.onetwothree }} +``` + + +would produce three jobs, with names `Job 1`, `Job 2`, and `Job 3`. Elsewhere, +if you refer to the `exmaple` job, it will implicitly refer to all three. + +This is commonly used to set Python and operating system versions: + +```yaml +tests: + strategy: + fail-fast: false + matrix: + python-version: ["3.7", "3.11"] + runs-on: [ubuntu-latest, windows-latest, macos-latest] + name: Check Python ${{ matrix.python-version }} on ${{ matrix.runs-on }} + runs-on: ${{ matrix.runs-on }} + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 # Only needed if using setuptools-scm + + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install package + run: python -m pip install -e .[test] + + - name: Test package + run: python -m pytest +``` + +There are two special keys: `include:` will take a list of jobs to include one +at a time. For example, you could add Python 3.9 on Linux (but not the others): + +```yaml +include: + - python-version: 3.9 + runs-on: ubuntu-latest +``` + +`include` can also list more keys than were present in the original +parametrization; this will add a key to an existing job. + +The `exclude:` key does the opposite, and lets you remove jobs from the matrix. + +## Other actions + +GitHub Actions has the concept of actions, which are just GitHub repositories of the form `org/name@tag`, and there are lots of useful actions to choose from (and you can write your own by composing other actions, or you can also create them with JavaScript or Dockerfiles). Here are a few: + +There are some GitHub supplied ones: + +- [actions/checkout](https://github.com/actions/checkout): Almost always the first action. v2+ does not keep Git history unless `with: fetch-depth: 0` is included (important for SCM versioning). v1 works on very old docker images. +- [actions/setup-python](https://github.com/actions/setup-python): Do not use v1; v2+ can setup any Python, including uninstalled ones and pre-releases. v4 requires a Python version to be selected. +- [actions/cache](https://github.com/actions/cache): Can store files and restore them on future runs, with a settable key. +- [actions/upload-artifact](https://github.com/actions/upload-artifact): Upload a file to be accessed from the UI or from a later job. +- [actions/download-artifact](https://github.com/actions/download-artifact): Download a file that was previously uploaded, often for releasing. Match upload-artifact version. + +And many other useful ones: + +- [ilammy/msvc-dev-cmd](https://github.com/ilammy/msvc-dev-cmd): Setup MSVC compilers. +- [jwlawson/actions-setup-cmake](https://github.com/jwlawson/actions-setup-cmake): Setup any version of CMake on almost any image. +- [wntrblm/nox](https://github.com/wntrblm/nox): Setup all versions of Python and provide nox. +- [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish): Publish Python packages to PyPI. +- [pre-commit/action](https://github.com/pre-commit/action): Run pre-commit with built-in caching. +- [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda): Setup conda or mamba on GitHub Actions. +- [peaceiris/actions-gh-pages](https://github.com/peaceiris/actions-gh-pages): Deploy built files to to GitHub Pages +- [ruby/setup-miniconda](https://github.com/ruby/setup-ruby) Setup Ruby if you need it for something. + + +## Exercise + +Add a CI file for your package. diff --git a/sphinx_presentation/source/binaries_dependencies.rst b/sphinx_presentation/source/binaries_dependencies.rst deleted file mode 100644 index 3f385f1..0000000 --- a/sphinx_presentation/source/binaries_dependencies.rst +++ /dev/null @@ -1,428 +0,0 @@ -.. _binaries: - -************************* -Binaries and Dependencies -************************* - -Learning Objectives -=================== - -In this section we will ... ---------------------------- - -* Understand why we build Python packages with native binaries: 1) - **performance** and 2) **library integration** -* Understand different components of the binary build process and their role: - *headers, libraries, compilers, linkers, build systems, system introspection - tools, package managers* -* Understand basic requirements for binary compatibility: a) **C-runtime library - compatibility** and b) **shared library compatibility** -* Understand **scikit-build**'s role in coordinating components of the binary - build process and **conda**'s role in resolving dependencies and creating compatible platform binaries - - -Tutorial -======== - -.. ifnotslides:: - - Introduction - ------------ - - This section discusses the creation of Python packages that contain **native - binaries**. - - First, we explain why building Python packages with native binaries is often - *desirable* or *necessary* for *scientific applications*. - - Next, an overview of the requirements to build native binaries is provided. - Within this the context, we explain how *scikit-build* and *conda-build* make - life easier when we want to satisfy these requirements. - - Finally, run an exercise where we build a native Python with native binaries - package and analyze the different stages of the build process. - -Motivation ----------- - -Scientific computing applications demand **higher performance** than other -domains because of the: - -1. **Size** of the **datasets** to be analyzed -2. **Complexity** of the **algorithms** evaluated - -.. nextslide:: - -In order to achieve **high performance**, programs can: - -1. **Minimized the number of operations** on the CPU required to achieve a certain - task -2. **Execute in parallel** to leverage multi-core, many-core, and GPGPU system - architectures -3. Carefully and precisely **manage memory** allocation and use - -.. nextslide:: - -Greater performance is achieved with native binaries over CPython because: - -1. Tasks are **compiled down to minimal processor operations**, - as opposed to high level programming language instructions that must be - **interpreted** -2. Parallel computing is not impared by CPython's `Global Interpreter Lock - (GIL) `_ -3. **Memory** can be managed **explicitly** and **deterministically** - -.. nextslide:: - -Many existing scientific codes are written in **programming languages other than Python**. -It is necessary to **re-use** these libraries since: - -- **Resources** are not available to re-implement work that is sometimes the - result of multiple decades of effort from multiple researchers. -- The **scientific endeavor** is built on the practice of **reproducing** and **building on the top** of the efforts of our predecessors. - -.. nextslide:: - -The *lingua franca* of computing is the **C programming language** because -most operating systems themselves are written in C. - -As a consequence, - -* **Native binaries** reflect characteristics and compatibility with of the C language -* The reference implementation of Python, *CPython*, is implemented in C -* CPython supports **binary extension modules written in C** -* Most other pre-compiled programming languages have a **compatibility layer - with C** -* CPython is an excellent language to **integrate scientific codes**! - -.. nextslide:: - -Common programming languages compiled into native libraries for scientific -computing include: - -- Fortran -- C -- C++ -- Cython -- Rust - -Build Components and Requirements ---------------------------------- - -Build component categories: - -build tools - Tools use in the build process, such as the compiler, linker, build systems, - system introspection tool, and package manager - -.. nextslide:: - -Example compilers: - -- GCC -- Clang -- Visual Studio - -*Compilers translate source code from a human readable to a machine readable -form.* - -.. nextslide:: - -Example linkers: - -- ld -- ld.gold -- link.exe - -*Linkers combine the results of compilers into a shared library that is -executed at program runtime.* - -.. nextslide:: - -Example build systems: - -- distutils.build_ext -- Unix Makefiles -- Ninja -- MSBuild in Visual Studio - -*Builds systems coordinate invocation of the compiler and linker, passing -flags, and only out-of-date build targets are built.* - -.. nextslide:: - -Example system introspection tools: - -- CMake -- GNU Autotools -- Meson - -*System introspection tools examine the host system for available build tools, -the location of build dependencies, and properties of the build target to -generate the appropriate build system configuration files.* - -.. nextslide:: - -Example package managers: - -- conda -- pip -- apt -- yum -- chocolatey -- homebrew - -*Package managers resolve dependencies so the required build host artifacts are -available for the build.* - -.. nextslide:: - -build host artifacts - These are files required on the *host* system performing the build. This - includes **header files**, ``*.h`` files, which define the C program **symbols**, - i.e. variable and function names, for the native binary with which we want - to integrate. This also usually includes the native binaries themselves, - i.e. the **executable or shared library**. An important exception to this rule - is *libpython*, which we do not need on some platforms due to `weak linking - rules `_. - -.. nextslide:: - -target system artifacts - These are artifacts intended to be run on the **target** system, typically the - shared library C-extension. - -.. nextslide:: - -When the build *host* system is different from the *target* system, we are -**cross-compiling**. - -For example, when we are building a Linux Python package on macOS is -cross-compiling. In this case macOS is the *host* system and Linux is the -*target* system. - -.. nextslide:: - -Distributable binaries must use a **compatible C-runtime**. - -The table below lists the different C runtime implementations, compilers and -their usual distribution mechanisms for each operating systems. - -.. table:: - - +------------------+---------------------------+-------------------------+-----------------------------------+ - | | Linux | MacOSX | Windows | - +==================+===========================+=========================+===================================+ - | **C runtime** | `GNU C Library (glibc)`_ | `libSystem library`_ | `Microsoft C run-time library`_ | - +------------------+---------------------------+-------------------------+-----------------------------------+ - | **Compiler** | `GNU compiler (gcc)`_ | `clang`_ | Microsoft C/C++ Compiler (cl.exe) | - +------------------+---------------------------+-------------------------+-----------------------------------+ - | **Provenance** | `Package manager`_ | OSX SDK within `XCode`_ | - `Microsoft Visual Studio`_ | - | | | | - `Microsoft Windows SDK`_ | - +------------------+---------------------------+-------------------------+-----------------------------------+ - -.. _GNU C Library (glibc): https://en.wikipedia.org/wiki/GNU_C_Library -.. _Package manager: https://en.wikipedia.org/wiki/Package_manager -.. _Microsoft C run-time library: https://en.wikipedia.org/wiki/Microsoft_Windows_library_files#Runtime_libraries -.. _libSystem library: https://www.safaribooksonline.com/library/view/mac-os-x/0596003560/ch05s02.html -.. _XCode: https://en.wikipedia.org/wiki/Xcode#Version_comparison_table -.. _Microsoft Windows SDK: https://en.wikipedia.org/wiki/Microsoft_Windows_SDK -.. _Microsoft Visual Studio: https://en.wikipedia.org/wiki/Microsoft_Visual_Studio -.. _GNU compiler (gcc): https://en.wikipedia.org/wiki/GNU_Compiler_Collection -.. _clang: https://en.wikipedia.org/wiki/Clang - -.. nextslide:: - -Linux C-runtime compatibility is determined by the version of **glibc** used -for the build. - -The glibc library shared by the system is forwards compatible but not -backwards compatible. That is, a package built on an older system *will* -work on a newer system, while a package built on a newer system will not -work on an older system. - -The `manylinux `_ project provides Docker -images that have an older version of glibc to use for distributable Linux -packages. - -.. nextslide:: - -The C-runtime on macOS is determined by a build time option, the *osx -deployment target*, which defines the minimum version of macOS to support, e.g. -``10.9``. - -A macOS system comes with support for running building binaries for its version of -OSX and older versions of OSX. - -The XCode toolchain comes with SDK's that support multiple target versions of OSX. - -When building a wheel, this can be specified with ``--plat-name``:: - - python setup.py bdist_wheel --plat-name macosx-10.6-x86_64 - -.. nextslide:: - -The C-runtime used on Windows is associated with the version of Visual Studio. - -.. table:: - - +-------------------+------------------------------------------------------+ - | | Architecture | - +-------------------+------------------------+-----------------------------+ - | CPython Version | x86 (32-bit) | x64 (64-bit) | - +===================+========================+=============================+ - | **3.5 and above** | Visual Studio 14 2015 | Visual Studio 14 2015 Win64 | - +-------------------+------------------------+-----------------------------+ - | **3.3 to 3.4** | Visual Studio 10 2010 | Visual Studio 10 2010 Win64 | - +-------------------+------------------------+-----------------------------+ - | **2.7 to 3.2** | Visual Studio 9 2008 | Visual Studio 9 2008 Win64 | - +-------------------+------------------------+-----------------------------+ - -.. nextslide:: - -Distributable binaries are also built to be compatible with a certain -CPU architecture class. For example - -- x86_64 (currently the most common) -- x86 -- ppc64le - - -Scientific Python Build Tools ------------------------------ - -**scikit-build** is an improved build system generator for CPython C/C++/Fortran/Cython -extensions. - -.. nextslide:: - -**scikit-build** provides better support for additional compilers, build -systems, cross compilation, and locating dependencies and their associated -build requirements. - -.. nextslide:: - -The **scikit-build** package is fundamentally just glue between -the ``setuptools`` Python module and `CMake `_. - -.. nextslide:: - -To build and install a project configured with scikit-build:: - - pip install . - -.. nextslide:: - -To build and install a project configured with scikit-build for development:: - - pip install -e . - -.. nextslide:: - -To build and package a project configured with scikit-build:: - - pip wheel -w dist . - -.. nextslide:: - -**Conda** is an open source package management system and environment management system that runs on Windows, macOS and Linux. - -.. nextslide:: - -**Conda** quickly installs, runs and updates packages and their dependencies. Conda easily creates, saves, loads and switches between environments on your local computer. - -.. nextslide:: - -**Conda** was created for Python programs, but it can package and distribute software for any language. - -.. nextslide:: - -*scikit-build* and *conda* **abstract away** and **manage platform-specific details** for you! - -Exercises -========= - -Exercise 1: Build a Python Package with a C++ Extension Module ---------------------------------------------------------------- - -Download the `hello-cpp `_ example C++ project and build a wheel package -with the commands:: - - cd hello-cpp - pip wheel -w dist --verbose . - -Examine files referenced in the build output. What is the purpose of all -referenced files? - -Exercise 2: Build a Python Package with a Cython Extension Module ------------------------------------------------------------------ - -Download the `hello-cython -`_ example C++ project and build a wheel package -with the commands:: - - cd hello-cython - pip wheel -w dist --verbose . - -Examine files referenced in the build output. What is the purpose of all -referenced files? - -Bonus Exercise 3: Build a Distributable Linux Wheel Package ------------------------------------------------------------ - -If Docker is installed, create a `dockcross -`_ `manylinux`_ bash driver script. -From a bash shell, run:: - - # cd into the hello-cpp project from Exercise 1 - cd hello-cpp - docker run --rm dockcross/manylinux-x64 > ./dockcross-manylinux-x64 - chmod +x ./dockcross-manylinux-x64 - -The *dockcross* driver script simplifies execution of commands in the isolated -Docker build environment that use sources in the current working directory. - -.. nextslide:: - -To build a distributable Python 3.6 Python wheel, run:: - - ./dockcross-manylinux-x64 /opt/python/cp36-cp36m/bin/pip wheel -w dist . - -Which will output:: - - Processing /work - Building wheels for collected packages: hello-cpp - Running setup.py bdist_wheel for hello-cpp ... done - Stored in directory: /work/dist - Successfully built hello-cpp - -and produce the wheel:: - - ./dist/hello_cpp-1.2.3-cp36-cp36m-linux_x86_64.whl - -.. nextslide:: - -To find the version of glibc required by the extension, run:: - - ./dockcross-manylinux-x64 bash -c 'cd dist && unzip -o hello_cpp-1.2.3-cp36-cp36m-linux_x86_64.whl && objdump -T hello/_hello.cpython-36m-x86_64-linux-gnu.so | grep GLIBC' - -What glibc version compatibility is required for this binary? - - -manylinux: https://github.com/pypa/manylinux - - -Bonus Exercise 4: Setting up continuous integration ---------------------------------------------------- - -* See branch `master-with-ci `_ - branch associated with ``hello-cpp`` example: - - * Use `scikit-ci `_ for simpler and centralized CI configuration for - Python extensions. - - * Use `scikit-ci-addons `_, a set of scripts useful - to help drive CI. - - * On CircleCI, use manylinux dockcross images including `scikit-build `_, - `cmake `__ and `ninja `_ packages. diff --git a/sphinx_presentation/source/changes.md b/sphinx_presentation/source/changes.md new file mode 100644 index 0000000..73492b8 --- /dev/null +++ b/sphinx_presentation/source/changes.md @@ -0,0 +1,38 @@ +# Tutorial Content Updates + +You will find here the list of changes integrated in the tutorial after +it was first given at the SciPy 2018 conference. + +Changes are grouped in sections identified using `YYYY-MM` representing +the year and month when the related changes were done. + +The sections are ordered from most recent to the oldest. + +## 2032-02 + +Started rewrite for modern packaging. + +## 2018-08 + +### Better handling data file in `setup_py_exercise_small_example_package` section + +- Put package data in `data` directory. +- Reflect this change in the code. +- Add `package_data` to setup function. + +## 2018-07 + +This is the first set of changes incorporating the feedback from +attendees. + +### Making a Python Package + +- Add directory + [setup_example/capitalize](https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/setup_example/capitalize) + discussed in `setup_py_exercise_small_example_package` section. + +### Building and Uploading to PyPI + +- Update `Installing a wheel` tutorial adding + `Install a package from TestPyPI ` + section. diff --git a/sphinx_presentation/source/changes.rst b/sphinx_presentation/source/changes.rst deleted file mode 100644 index d76c92b..0000000 --- a/sphinx_presentation/source/changes.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../../CHANGES.rst diff --git a/sphinx_presentation/source/conda.rst b/sphinx_presentation/source/conda.rst deleted file mode 100644 index b5be7a9..0000000 --- a/sphinx_presentation/source/conda.rst +++ /dev/null @@ -1,1005 +0,0 @@ -.. _conda_build: - -############## -Conda Packages -############## - - -Building Conda Packages -======================= - -A package system for anything... - - -Wheels vs. Conda packages -------------------------- - -+-------------------------------------+-------------------------------------+ -| Wheels | Conda packages | -+=====================================+=====================================+ -| Employed by pip, blessed by PyPA | Foundation of Anaconda ecosystem | -+-------------------------------------+-------------------------------------+ -| Used by any python installation | Used by conda python installations | -+-------------------------------------+-------------------------------------+ -| Mostly specific to Python ecosystem | General purpose (any ecosystem) | -+-------------------------------------+-------------------------------------+ -| Good mechanism for specifying range | Primitive support for multiple | -| of python compatibility | python versions (noarch) | -+-------------------------------------+-------------------------------------+ -| Depends on static linking or other | Can bundle core system-level shared | -| system package managers to provide | libraries as packages, and resolve | -| core libraries | dependencies | -+-------------------------------------+-------------------------------------+ - - - -Introducing conda-build ------------------------ - -* Orchestrates environment creation, activation, and build/test processes - -* Can build conda packages and/or wheels - -* Separate project from conda, but very tightly integrated - -* Open-source, actively developed: - - https://github.com/conda/conda-build - - -Exercise: let’s use ``conda-build`` ------------------------------------- - -.. code-block:: bash - - conda install conda-build - -* Windows only: - -.. code-block:: bash - - conda install m2-patch posix - -* All platforms: - -.. code-block:: bash - - cd python-packaging-tutorial/conda_build_recipes - conda build 01_minimum - - -What happened? --------------- - -* Templates filled in, recipe interpreted - -* Build environment created (isolated) - -* Build script run - -* New files in build environment bundled into package - -* Test environment created (isolated) - -* Tests run on new package - -* cleanup - - -Obtaining recipes ------------------- - -* Existing recipes (best) - - - https://github.com/AnacondaRecipes - - - https://github.com/conda-forge - -* Skeletons from other repositories - (PyPI, CRAN, CPAN, RPM) - -* DIY - - -Anaconda Recipes ----------------- - -* https://github.com/AnacondaRecipes - -* Official recipes that Anaconda uses for building packages - -* Since Anaconda 5.0, forked from conda-forge recipes. - -* Intended to be compatible with conda-forge long-term - -* Presently, ahead of conda-forge on use of conda-build 3 features - - -Conda-forge ------------ - -.. image:: images/conda-forge.png - -https://conda-forge.org - -.. nextslide:: - -* https://conda-forge.org - -* Numfocus-affiliated community organization made up of volunteers - -* One github repository per recipe - - - Fine granularity over permissions - -* Heavy use of automation for building, deploying, and updating recipes - -* Free builds on public CI services (TravisCI, CircleCI, Appveyor) - - -Skeletons ---------- - -* Read metadata from upstream repository - -* Translate that into a recipe - -| - -* **Will** save you some boilerplate work - -* **Might** work out of the box - - - (should not assume automatic, though) - - - -conda skeleton --------------- - -**conda skeleton pypi:** - -.. code-block:: bash - - conda skeleton pypi - - conda skeleton pypi click - - conda skeleton pypi --recursive pyinstrument - - -**conda skeleton cran** - -.. code-block:: bash - - conda skeleton cran - - conda skeleton cran acs - - conda skeleton cran --recursive biwt - - - -When all else fails, write a recipe ------------------------------------ - -Only required section: - - -.. code-block:: yaml - - package: - name: abc - version: 1.2.3 - -Exercise: create a basic recipe -------------------------------- - -https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/conda_build_recipes/01_minimum - - -Source types ------------- - -* url - -* git - -* hg - -* svn - -* local path - -| - -`meta.yaml source section `_ - - -Exercise: point your recipe at local files ------------------------------------------- - -https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/conda_build_recipes/02_local_source - - - -Building packages ------------------ - -Lots of ways, but let’s start simple: - -* build.sh (unix) -* bld.bat (windows) - -Filenames are of paramount importance here - - -build.sh: stuff to run on mac/linux ------------------------------------ - -* It’s a shell script: do what you want -* Snapshot files in $PREFIX before running script; again after -* Files that are new in $PREFIX are what make up your package -* Several useful env vars for use in build.sh: https://conda.io/docs/user-guide/tasks/build-packages/environment-variables.html - - -bld.bat: stuff to run on windows --------------------------------- - -* It’s a batch script: do what you want -* Snapshot files in %PREFIX% before running script; again after -* Files that are new in %PREFIX% are what make up your package -* Several useful env vars for use in bld.bat: https://conda.io/docs/user-guide/tasks/build-packages/environment-variables.html - - -Exercise: Copy a file into the package --------------------------------------- - -https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/conda_build_recipes/03_copy_file - - -Build options -------------- - -``number``: - version reference of recipe (as opposed to version of source code) - -``script``: - quick build steps, avoid separate build.sh/bld.bat files - -``skip``: - skip building recipe on some platforms - -``entry_points``: - python code locations to create executables for - -``run_exports``: - add dependencies to downstream consumers to ensure compatibility - -`meta.yaml build section `_ - - -Requirements ------------- - -.. image:: images/build_host_run.png - - -Build requirements ------------------- - -* Tools to build packages with; things that don’t directly go into headers or linking -* Compilers -* autotools, pkg-config, m4, cmake -* archive tools - - -Host requirements ------------------ - -* External dependencies for the package that need to be present at build time -* Headers, libraries, python/R/perl -* Python deps used in setup.py -* Not available at runtime, unless also specified in run section - - -Run requirements ----------------- - -* Things that need to be present when the package is installed on the end-user system -* Runtime libraries -* Python dependencies at runtime -* Not available at build time unless also specified in build/host section - - -Requirements: build vs. host ----------------------------- - -* Historically, only build - -* Still fine to use only build - -* host introduced for cross compiling - -* host also useful for separating build tools from packaging environment - - -**If in doubt, put everything in host** - -* build is treated same as host for old-style recipes - (only build, no ``{{ compiler() }}``) - -* packages are bundled from host env, not build env - - -Exercise: use Python in a build script --------------------------------------- - -https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/conda_build_recipes/04_python_in_build - - -Post-build Tests ----------------- - -* Help ensure that you didn’t make a packaging mistake - -* Ideally checks that necessary shared libraries are included as dependencies - - - -**Dependencies** - -Describe dependencies that are required for the tests -(but not for normal package usage) - -.. code-block:: yaml - - test: - requires: - - pytest - - - -Post-build tests: test files ----------------------------- - -All platforms: - ``run_test.pl``, ``run_test.py``, ``run_test.r``, ``run_test.lua`` - -| - -Windows: - ``run_test.bat`` - -| - -Linux / Mac: - ``run_test.sh`` - - - -Post-build tests ----------------- - -* May have specific requirements - -* May specify files that must be bundled for tests (``source_files``) - -* ``imports:`` - language specific imports to try, to verify correct installation - -* ``commands:`` - sequential shell-based commands to run (not OS-specific) - -https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#test-section - - -Import Tests ------------- - -.. code-block:: yaml - - test: - imports: - - dateutil - - dateutil.rrule - - dateutil.parser - - dateutil.tz - - -Test commands -------------- - -.. code-block:: yaml - - test: - commands: - - curl --version - - curl-config --features # [not win] - - curl-config --protocols # [not win] - - curl https://some.website.com - - -Exercise: add some tests ------------------------- - -https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/conda_build_recipes/05_test_python - - -Outputs - more than one pkg per recipe --------------------------------------- - -.. code-block:: yaml - - package: - name: some-split - version: 1.0 - - outputs: - - name: subpkg - - name: subpkg2 - - -.. nextslide:: - -* Useful for consolidating related recipes that share (large) source - -* Reduce update burden - -* Reduce build time by keeping some parts of the build, while looping over other parts - -* Also output different types of packages from one recipe (wheels) - - -https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#outputs-section - - -About section -------------- - -| - -.. image:: images/about_section.png - - - -Extra section: free-for-all ---------------------------- - -* Used for external tools or state management - -* No schema - -* Conda-forge’s maintainer list - -* Conda-build’s notion of whether a recipe is “final” - - -https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#extra-section - - - -Conditional lines (selectors) ------------------------------ - -:: - - some_content # [some expression] - - -* content inside ``[...]`` is eval’ed - -* namespace includes OS info, python info, and a few others - - -https://conda.io/docs/user-guide/tasks/build-packages/define-metadata.html#preprocessing-selectors - - -Exercise: Limit a Recipe to Only Linux --------------------------------------- - -.. code-block:: yaml - - package: - name: example_skip_recipe - version: 1.0 - - build: - skip: True - -.. nextslide:: - -.. code-block:: yaml - - package: - name: example_skip_recipe - version: 1.0 - - build: - skip: True # [not linux] - - -Intro to Templating with Jinja2 --------------------------------- - -* Fill in information dynamically - - - git tag info - - - setup.py recipe data - - - centralized version numbering - - - string manipulation - -How does Templating Save You Time? ----------------------------------- - -:: - - {% set version = "3.0.2" %} - - package: - name: example - version: {{ version }} - source: - url: https://site/{{version}}.tgz - - -Jinja2 Templating in ``meta.yaml`` ----------------------------------- - -Set variables:: - - {% set somevar=”someval” %} - -Use variables:: - - {{ somevar }} - -Expressions in ``{{ }}`` are roughly python - - -Jinja2 conditionals -------------------- - -Selectors are one line only. When you want to toggle a block, use jinja2:: - - {%- if foo -%} - - toggled content - - on many lines - - {% endif %} - - -Exercise: use Jinja2 to reduce edits ------------------------------------- - -.. code-block:: yaml - - package: - name: abc - version: 1.2.3 - - source: - url: http://my.web/abc-1.2.3.tgz - - -.. nextslide:: - -:: - - {% set version=”1.2.3” %} - package: - name: abc - version: {{ version }} - - source: - url: http://w/abc-{{version}}.tgz - - -Variants: Jinja2 on steroids ----------------------------- - -Matrix specification in yaml files - -.. code-block:: yaml - - somevar: - - 1.0 - - 2.0 - - anothervar: - - 1.0 - - -All variant variables exposed in jinja2 ---------------------------------------- - -In meta.yaml, - -``{{ somevar }}`` - -And this loops over values - - -Exercise: try looping ---------------------- - -meta.yaml: - -.. code-block:: yaml - - package: - name: abc - version: 1.2.3 - - build: - skip: True # [skipvar] - -conda_build_config.yaml: - -.. code-block:: yaml - - skipvar: - - True - - False - - -.. nextslide:: - -meta.yaml: - -.. code-block:: yaml - - package: - name: abc - version: 1.2.3 - - requirements: - build: - - python {{ python }} - - run: - - python {{ python }} - -conda_build_config.yaml: - -.. code-block:: yaml - - python: - - 2.7 - - 3.6 - -.. nextslide:: - -meta.yaml: - -.. code-block:: yaml - - package: - name: abc - version: 1.2.3 - - requirements: - build: - - python - run: - - python - -.. nextslide:: - -conda_build_config.yaml: - -.. code-block:: yaml - - python: - - 2.7 - - 3.6 - - -Jinja2 functions ----------------- - -loading source data: - - ``load_setup_py_data`` - - ``load_file_regex`` - -Dynamic Pinning: - - ``pin_compatible`` - - ``pin_subpackage`` - -Compatibility Control: - - ``compiler`` - - ``cdt`` - - -Loading setup.py data ---------------------- - -:: - - {% set setup_data = load_setup_py_data() %} - - package: - name: abc - version: {{ setup_data[‘version’] }} - - -* Primarily a development recipe tool - release recipes specify version instead, and template source download link - -* Centralizing version info is very nice - see also ``versioneer``, ``setuptools_scm``, ``autover``, and many other auto-version tools - - -Loading arbitrary data ----------------------- - -:: - - {% set data = load_file_regex(load_file='meta.yaml', - regex_pattern='git_tag: ([\\d.]+)') %} - - package: - name: conda-build-test-get-regex-data - version: {{ data.group(1) }} - -* Useful when software provides version in some arbitrary file - -* Primarily a development recipe tool - release recipes specify version instead, and template source download link - - -Dynamic pinning ---------------- - -Use in meta.yaml, generally in requirements section: - -.. code-block:: yaml - - requirements: - host: - - numpy - run: - - {{ pin_compatible(‘numpy’) }} - -.. nextslide:: - -Use in meta.yaml, generally in requirements section: - -.. code-block:: yaml - - requirements: - host: - - numpy - run: - - {{ pin_compatible(‘numpy’) }} - - -* Pin run req based on what is present at build time - - -Dynamic pinning in practice ---------------------------- - -Used a lot with numpy: - -https://github.com/AnacondaRecipes/scikit-image-feedstock/blob/master/recipe/meta.yaml - - -Dynamic pinning within recipes ------------------------------- - -Refer to other outputs within the same recipe - - - When intradependencies exist - - - When shared libraries are consumed by other libraries - -https://github.com/AnacondaRecipes/aggregate/blob/master/clang/meta.yaml - - -Compilers ---------- - -Use in meta.yaml in requirements section: - -.. code-block:: yaml - - requirements: - build: - - {{ compiler(‘c’) }} - -* explicitly declare language needs - -* compiler packages can be actual compilers, or just activation scripts - -* Compiler packages utilize run_exports to add necessary runtime dependencies automatically - - -Why put compilers into Conda? ------------------------------ - -* Explicitly declaring language needs makes reproducing packages with recipe simpler -* Binary compatibility can be versioned and tracked better -* No longer care what the host OS used to build packages is -* Can still use system compilers - just need to give conda-build information on metadata about them. Opportunity for version check enforcement. - -``run_exports`` ---------------- - -“if you build and link against library abc, you need a runtime dependency on library abc” - -This is annoying to keep track of in recipes. - - -.. nextslide:: - -.. image:: images/run_exports.png - -.. nextslide:: - -* Add host or run dependencies for downstream packages that depend on upstream that specifies run_exports - -* Expresses idea that “if you build and link against library abc, you need a runtime dependency on library abc” - -* Simplifies version tracking - -Exercise: make a run_exports package ------------------------------------- - -https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/conda_build_recipes/06_has_run_exports - - -Exercise: use a run_exports package ------------------------------------- - -https://github.com/python-packaging-tutorial/python-packaging-tutorial/tree/master/conda_build_recipes/07_uses_run_exports - - -Uploading packages: anaconda.org --------------------------------- - -* Sign-up: - - - ``https://anaconda.org/`` - -* Requirement: - - - ``conda install anaconda-client`` - -* CLI: anaconda upload path-to-package - -* conda-build auto-upload: - - - ``conda config --set anaconda_upload True`` - -Fin -=== - -Extra slides -============ - -Source Patches --------------- - -* patch files live alongside meta.yaml - -* create patches with: - - - ``diff`` - - - ``git diff`` - - - ``git format-patch`` - -| - -`meta.yaml source section `_ - - -Exercise: let’s make a patch ----------------------------- - -.. code-block:: yaml - - package: - name: test-patch - version: 1.2.3 - - source: - url: https://zlib.net/zlib-1.2.11.tar.gz - - build: - script: exit 1 - - -.. nextslide:: - -* Builds that fail leave their build folders in place - -* look in output for source tree in: - - ``*/conda-bld/test-patch_/work`` - -* ``cd`` there - -.. nextslide:: - -.. code-block:: bash - - git init - - git add * - - git commit -am “init” - - edit file of choice - - git commit -m “changing file because …” - - git format-patch HEAD~1 - - -* copy that patch back alongside meta.yaml - -* modify meta.yaml to include the patch - - -Multiple sources ----------------- - -.. code-block:: yaml - - source: - - url: https://package1.com/a.tar.bz2 - folder: stuff - - url: https://package1.com/b.tar.bz2 - folder: stuff - patches: - - something.patch - - git_url: https://github.com/conda/conda-build - folder: conda-build - -`meta.yaml source section `_ - - -Outputs rules -------------- - -* List of dicts - -* Each list must have ``name`` or ``type`` key - -* May use all entries from ``build``, ``requirements``, ``test``, ``about`` sections - -* May specify files to bundle either using globs or by running a script - - -**Outputs Examples** - -https://github.com/AnacondaRecipes/curl-feedstock/blob/master/recipe/meta.yaml - - -https://github.com/AnacondaRecipes/aggregate/blob/master/ctng-compilers-activation-feedstock/recipe/meta.yaml - - -Exercise: Split a Package -------------------------- - -Curl is a library and an executable. Splitting them lets us clarify where Curl is only a build time dependency, and where it also needs to be a runtime dependency. - -**Starting point:** - -https://github.com/conda-forge/curl-feedstock/tree/master/recipe - - -**Solution:** - -https://github.com/AnacondaRecipes/curl-feedstock/tree/master/recipe diff --git a/sphinx_presentation/source/conf.py b/sphinx_presentation/source/conf.py index 7776556..03e12fa 100644 --- a/sphinx_presentation/source/conf.py +++ b/sphinx_presentation/source/conf.py @@ -19,14 +19,14 @@ # -- Project information ----------------------------------------------------- -project = "The Joy of Packaging" -copyright = "2018, Assorted" +project = "The Sheer Joy of Modern Binary Packaging" +copyright = "2023, Assorted" author = "Assorted" # The short X.Y version version = "" # The full version, including alpha/beta/rc tags -release = "0.1" +release = "0.2" # -- General configuration --------------------------------------------------- @@ -39,8 +39,10 @@ # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - "sphinx.ext.githubpages", "hieroglyph", + "myst_parser", + "sphinx.ext.githubpages", + "sphinx_copybutton", ] # Add any paths that contain templates here, relative to this directory. @@ -49,8 +51,7 @@ # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # -# source_suffix = ['.rst', '.md'] -source_suffix = ".rst" +source_suffix = [".rst", ".md"] # The master toctree document. master_doc = "index" @@ -165,3 +166,7 @@ # -- Extension configuration ------------------------------------------------- +myst_enable_extensions = [ + "colon_fence", + "deflist", +] diff --git a/sphinx_presentation/source/index.md b/sphinx_presentation/source/index.md new file mode 100644 index 0000000..b6f1cea --- /dev/null +++ b/sphinx_presentation/source/index.md @@ -0,0 +1,43 @@ +# The sheer joy of modern binary packaging + +Scipy 2023 Tutorial + +## Packaging + +Packaging from start to finish including binary extensions using modern tools. + + +## Topics + +```{toctree} +:maxdepth: 1 + +schedule +2_env +3_pyproject +6_ci +``` + + +```{toctree} +:hidden: + +changes +``` + + +## Your Guides + +Henry Schreiner +: Maintainer of scikit-build, scikit-hep, cibuildwheel, build, meson-python, python-metadata, and other packages. + +Matt McCormick: +: Maintainer of dockcross, of Python packages for the Insight Toolkit (ITK) + +Jean-Christophe Fillion-Robin: +: Maintainer of scikit-build, scikit-ci, scikit-ci-addons and python-cmake-buildsystem diff --git a/sphinx_presentation/source/index.rst b/sphinx_presentation/source/index.rst deleted file mode 100644 index f7d3070..0000000 --- a/sphinx_presentation/source/index.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. The Joy of Packaging documentation master file, created by - sphinx-quickstart on Fri Jul 6 14:55:10 2018. - You can adapt this file completely to your liking, but it should at least - contain the root ``toctree`` directive. - -*************************** -The Sheer Joy of Packaging! -*************************** - -Scipy 2018 Tutorial - -Packaging -========= - -Packaging from start to finish for both PyPI and conda - -.. slide:: - :level: 3 - - .. warning:: - - The list of changes integrated in the tutorial after it was first - given at the SciPy 2018 conference can be found in the - :ref:`tutorial_content_updates` document. - -.. ifnotslides:: - - .. warning:: - - The list of changes integrated in the tutorial after it was first - given at the SciPy 2018 conference can be found in the - :ref:`tutorial_content_updates` document. - -Topics ------- - -.. toctree:: - :maxdepth: 1 - - schedule - overview - setup_py - uploading_pypi - binaries_dependencies - conda - -If we have time: `conda-forge `_ - -.. toctree:: - :hidden: - - changes - - -Your Guides ------------ - -Michael Sarahan: - Conda-build tech lead, Anaconda, Inc. - -Matt McCormick: - Maintainer of dockcross, of Python packages for the Insight Toolkit (ITK) - -Jean-Christophe Fillion-Robin: - Maintainer of scikit-build, scikit-ci, scikit-ci-addons and python-cmake-buildsystem - - -.. nextslide:: - -Filipe Fernandes: - Conda-forge core team, Maintainer of folium and a variety of libraries for ocean sciences. - -Chris Barker: - Python instructor for the Univ. Washington Continuing Education Program, Contributor to conda-forge project. Lead developer for assorted oceanography / oil spill packages. - -Jonathan Helmus: - Conda-forge core team. Maintainer of Berryconda. Anaconda, Inc. Builds Tensorflow for fun. - - - - -.. Indices and tables -.. ================== - -.. * :ref:`genindex` -.. * :ref:`modindex` -.. * :ref:`search` diff --git a/sphinx_presentation/source/overview.rst b/sphinx_presentation/source/overview.rst deleted file mode 100644 index d9c7578..0000000 --- a/sphinx_presentation/source/overview.rst +++ /dev/null @@ -1,129 +0,0 @@ -.. _overview: - -******** -Overview -******** - - -Packages -======== - - -What is a “package”? --------------------- - -* In a broad sense, anything you install using your package manager - -* Some kinds of packages have implied behavior and requirements - -* Unfortunate overloading: python “package”: a folder that python imports - - -Package Managers and Repos --------------------------- - -* Many package managers: some OS specific: - - - apt, yum, dnf, chocolatey, homebrew, etc. - -* Some language specific: - - - NPM, pip, RubyGems - -* And there are many online repositories of packages: - - - PyPI, anaconda.org, CRAN, CPAN - -But they all contain: - -* Some form of dependency management - -* Artifact and/or source repository - -The idea is that you install something, and have it *just work*. - - -Package types: --------------- - -A package can be essentially in two forms: - -* source -* binary - -.. nextslide:: - -Focusing now on the Python world: - -As Python is a dynamic language, this distinction can get a bit blurred: - -There is little difference between a source and binary package *for a pure python package* - -But if there is any compiled code in there, building from source can be a challenge: - - - Binary packages are very helpful - -Source Packages ---------------- - -A source package is all the source code required to build the package. - -Package managers (like pip) can automatically build your package from source. - -**But:** - - - Your system needs the correct tools installed, compilers, build tools, etc - - You need to have the dependencies available - - Sometimes it takes time, sometimes a LONG time - - -Binary Packages ---------------- - -A collection of code all ready to run. - - - Everything is already compiled and ready to go -- makes it easy. - -**But:** - - - It's likely to be platform dependent - - May require dependencies to be installed - -How do you manage that if the dependencies aren't in the same language/system? - - -Python Packaging ----------------- - -There are two package managers widely used for Python. - -**pip:** The "official" solution. - - - Pulls packages from PyPI - - - Handles both source and binary packages (wheels) - - - Python only - -.. nextslide:: - -**conda:** Widely used in the scipy community. - - - Pulls packages from anaconda.org - - - Binary only (does not compile code when installing) - - - Supports other languages / libraries: C, Fortran, R, Perl, Java (anything, really) - - - Manages Python itself! - -OS package managers: --------------------- - -* Linux: apt, conda, dnf, homebrew, nix, pacman, spack, yum - -* OS-X: conda, homebrew, macports, spack - -* Windows: chocolatey, conda, cygwin, pacman (msys2) - -Sometimes handle python packages -- but we won't talk about those here. diff --git a/sphinx_presentation/source/schedule.md b/sphinx_presentation/source/schedule.md new file mode 100644 index 0000000..b9a2f66 --- /dev/null +++ b/sphinx_presentation/source/schedule.md @@ -0,0 +1,82 @@ +# Schedule + +0:00: Overview of packaging +: - SDists vs. wheels + - Pure Python vs. compiled packages + - Wheel vs conda packages + - PyPI / anaconda.org + - Links packaging documentation such as PyPA, Packaging Native + +0:20: Exercise +: - Identify platforms supported for the xxx packages on PyPI and anaconda.org + +0:25: Virtual environments +: - Setting up a virtual environment + - Setting up a conda environment + - Using a task runner (nox) + +0:45: Exercise writing a noxfile +: - Take existing working package and add a simple noxfile + +0:50: Break & catch up + +1:00: Pyproject.toml +: - Essential specifications + - Optional specifications + - Specifying requirements + - Introduce the concept of "build-backend" + +1:10: Exercise +: - Fill in the missing pieces in a project.toml for a sample package + - Build a source distribution for the package + +1:20: Building and uploading to PyPI: tools and package types +: - Core tools + - Pipx + - build + - twine: the secure way to upload to PyPI + - For consolidated experience & dependency management + - Pdm (https://pdm.fming.dev/latest/) + - May be Hatch (https://hatch.pypa.io) (more like a replacement for tox and nox) + - flit: great for simple packages + - Building a source distribution + - Building a wheel + - Discuss use of delocate/Auditwheel/… + - Difference between linux & manylinux wheels (internalize dependencies, glibc compatibility, …) + +1:45: Worked example/exercise: building a package and uploading to pypi +: - Continuing from the the previous exercise, build a wheel for the package + - Register the package on the pypi testing server + - Upload the built distributions using twine + - Delete one of the uploaded files on pypi and try re-uploading (will fail) + - Introduce the idea of .post releases (it will happen to everyone who uploads) + +1:55: Coffee break + +2:15: Binaries and dependencies: how scikit-build can make life easier +: - Scikit-build overview & motivation + +2:40: Exercise: add CMake project that generates python extension. +: - Tie it into previous python project. + +3:00: Break & catch up + +3:10: Automated building with cloud-based CI services +: - GitHub action + - Pre-commit.yml + - Ruff + - Static analysis + - https://cibuildwheel.readthedocs.io/en/stable/ + +3:30: Exercise: (10 min) +: - Update previous example adding cibuildwheel support + - Linting using pre-commit + ruff + - Automated PyPI release + + +3:40: Handling dependencies +: - "In-project" compilation (pybind11) + - External: see https://github.com/pypa/cibuildwheel/issues/1251#issuecomment-1236364876 for example + +3:50: Exercise +: - Add a dependency to the project diff --git a/sphinx_presentation/source/schedule.rst b/sphinx_presentation/source/schedule.rst deleted file mode 100644 index 9323a50..0000000 --- a/sphinx_presentation/source/schedule.rst +++ /dev/null @@ -1,173 +0,0 @@ -***************** -Tutorial Schedule -***************** - -Outline -======= - -How are we spending our afternoon? - -Agenda ------- - -* 0:00-0:20 Getting setup and overview of packaging -* 0:20-0:45 python packages: the setup.py file -* **Break** -* 1:00-1:30 Building and uploading to PyPI -* 1:30-2:00 Binaries and dependencies -* 2:00-2:45 Exercises -* **Break** -* 3:00-3:15 Conda-build overview -* 3:15-3:45 Exercise -* 3:45-4:00 conda-forge - -0:00-00:10 Getting setup for this Tutorial ------------------------------------------- - -There is a repo for this tutorial here: - -https://github.com/python-packaging-tutorial/python-packaging-tutorial - -or: - -http://bit.ly/JoyOfPackaging - -And the materials are rendered as html here: - -https://python-packaging-tutorial.github.io/python-packaging-tutorial/ - -(linked from the git repo) - -Clone that repo now -- so you can follow along. - -``git clone https://github.com/python-packaging-tutorial/python-packaging-tutorial.git`` - -0:10-00:20 Overview of packaging --------------------------------- - -.. ifnotslides:: - - :ref:`overview` - -* What is a package, anyway? -* Source/binary -* Wheel vs conda packages -* PyPI/anaconda.org - - -0:20-0:45 python packages: the setup.py file --------------------------------------------- - -.. ifnotslides:: - - :ref:`setup` - - -* Python packages -- what are they? -* The setup.py file -* Specifying requirements -* When and how to "pin" requirements -* Let's make a package! - - -0:45-1:00 Building and uploading to PyPI ----------------------------------------- - -.. ifnotslides:: - - :ref:`uploading` - -* Packaging Terminology 101 -* Building and publishing a python distribution - - -1:00-1:10 Break ---------------- - -1:10-1:30 Exercises -------------------- - -* Prepare environment. -* Build source distribution and wheel. -* Publish artifacts on PyPI. - - -1:30-2:00 Binaries and dependencies ------------------------------------ - -.. ifnotslides:: - - :ref:`binaries` - -* Why we build Python packages with native binaries: 1) - **performance** and 2) **library integration** -* Different components of the binary build process and their role: - *headers, libraries, compilers, linkers, build systems, system introspection - tools, package managers* -* Basic requirements for binary compatibility: a) **C-runtime library - compatibility** and b) **shared library compatibility** -* Joyous tools: **scikit-build**'s role in coordinating components of the binary - build process and **conda**'s role in resolving dependencies and creating compatible platform binaries - -2:00-2:45 Exercises -------------------- - -* Build a Python package with a C++-based C-extension. -* Build a Python package with a Cython-based C-extension. -* Build a distributable Linux wheel package. - - -2:45-3:00 Break ---------------- - - -3:00-3:15 Conda-build overview ------------------------------- - -.. ifnotslides:: - - :ref:`conda_build` - - -3:15-3:30 Exercises -------------------- - -* Write a conda recipe for the sample package from previous exercises (pure python) -* noarch packages -* Upload to anaconda cloud - - -3:30-3:45 Exercises -------------------- - -* Recipe for package with compiled extensions -* Add compiled extension (source will be provided to students) to sample package -* Modify recipe, if needed -* Rebuild the package -* Version pinning (python, numpy) -* Split packages - multi-ecosystem ones -* Compiler packages + pin_downstream -* Interoperation with scikit-build - - -3:45-4:00 Automated building with cloud-based CI services ---------------------------------------------------------- - - -**conda-forge** (optional; as time allows) - -CI service overview & Conda-forge -- what are the pieces and how do they fit together? - -* Recipe format -* staged-recipes -* feedstocks -* Re-rendering and conda-smithy -* Updating package when new version released -* Future direction/community needs -* Invitation to sprints -* Contributing to Conda-forge -* Intro to conda-forge: staged-recipes, maintainer role, contributing to an existing package -* conda-smithy lint/rerender -* Example to go from the conda-skeleton to a PR on staged-recipes -* Comment on some special cases: cython extensions, non-python pkgs, the use of the CIs, etc. -* Exercise: put a package on staged-recipes diff --git a/sphinx_presentation/source/setup_py.rst b/sphinx_presentation/source/setup_py.rst deleted file mode 100644 index 4986d85..0000000 --- a/sphinx_presentation/source/setup_py.rst +++ /dev/null @@ -1,874 +0,0 @@ -.. _setup: - -*********************** -Making a Python Package -*********************** - -Specifying how to build your python package - - -Python Packages -=============== - -What is a "package" in Python ? - - -Packages, modules, imports, oh my! ----------------------------------- - -**Modules** - -A python "module" is a single namespace, with a collection of values: - - * functions - * constants - * class definitions - * really any old value. - -A module usually corresponds to a single file: ``something.py`` - - -Packages --------- - -A "package" is essentially a module, except it can have other modules (and indeed other packages) inside it. - -A package usually corresponds to a directory with a file in it called ``__init__.py`` and any number of python files or other package directories:: - - a_package - __init__.py - module_a.py - a_sub_package - __init__.py - module_b.py - -.. nextslide:: - -The ``__init__.py`` can be totally empty -- or it can have arbitrary python code in it. - -The code will be run when the package is imported -- just like a module, - -modules inside packages are *not* automatically imported. So, with the above structure:: - - import a_package - -will run the code in ``a_package/__init__.py``. - -.. nextslide:: - -Any names defined in the -``__init__.py`` will be available in:: - - a_package.a_name - -but:: - - a_package.module_a - -will not exist. To get submodules, you need to explicitly import them: - -``import a_package.module_a`` - - -https://docs.python.org/3/tutorial/modules.html#packages - - -The module search path ----------------------- - -The interpreter keeps a list of all the places that it looks for modules or packages when you do an import: - -.. code-block:: python - - import sys - for p in sys.path: - print p - -You can manipulate that list to add or remove paths to let python find modules on a new place. - -And every module has a ``__file__`` name that points to the path it lives in. This lets you add paths relative to where you are, etc. - -*NOTE:* it's usually better to use setuptools' "develop" mode instead -- see below. - - -Building Your Own Package -========================= - -The very basics of what you need to know to make your own package. - - -Why Build a Package? --------------------- - -There are a bunch of nifty tools that help you build, install and -distribute packages. - -Using a well structured, standard layout for your package makes it -easy to use those tools. - -Even if you never want to give anyone else your code, a well -structured package simplifies development. - - -What is a Package? --------------------- - -**A collection of modules** - -... and the documentation - -... and the tests - -... and any top-level scripts - -... and any data files required - -... and a way to build and install it... - - -Python packaging tools: ------------------------- - -``distutils``: included with python - -.. code-block:: python - - from distutils.core import setup - -Getting clunky, hard to extend, maybe destined for deprecation ... - -``setuptools``: for extra features, technically third party - -- present in most modern Python installations - -"The Python Packaging Authority" -- PyPA - -https://www.pypa.io/en/latest/ - -setuptools ------------ - -``setuptools`` is an extension to ``distutils`` that provides a number of extensions: - -.. code-block:: python - - from setuptools import setup - -superset of the ``distutils setup`` - -This buys you a bunch of additional functionality: - - * auto-finding packages - * better script installation - * resource (non-code files) management - * **develop mode** - * a LOT more - -http://pythonhosted.org//setuptools/ - - -Where do I go to figure this out? ---------------------------------- - -This is a really good guide: - -Python Packaging User Guide: - -https://packaging.python.org/ - -and a more detailed tutorial: - -http://python-packaging.readthedocs.io/en/latest/ - -**Follow one of them** - -.. nextslide:: - -There is a sample project here: - -https://github.com/pypa/sampleproject - -(this has all the complexity you might need...) - -You can use this as a template for your own packages. - -Here is an opinionated update -- a little more fancy, but some good ideas: - -https://blog.ionelmc.ro/2014/05/25/python-packaging/ - - -Basic Package Structure: ------------------------- - -:: - - package_name/ - bin/ - CHANGES.txt - docs/ - LICENSE.txt - MANIFEST.in - README.txt - setup.py - package_name/ - __init__.py - module1.py - module2.py - test/ - __init__.py - test_module1.py - test_module2.py - -.. nextslide:: - -``CHANGES.txt``: log of changes with each release - -``LICENSE.txt``: text of the license you choose (do choose one!) - -``MANIFEST.in``: description of what non-code files to include - -``README.txt``: description of the package -- should be written in ReST -or Markdown (for PyPi): - -``setup.py``: the script for building/installing package. - -.. nextslide:: - -``bin/``: This is where you put top-level scripts - -( some folks use ``scripts`` ) - -``docs/``: the documentation - -``package_name/``: The main package -- this is where the code goes. - -.. nextslide:: - -``test/``: your unit tests. Options here: - -Put it inside the package -- supports :: - - $ pip install package_name - >> import package_name.test - >> package_name.test.runall() - -Or keep it at the top level. - -Some notes on that: - -` Where to put Tests `_ - -The ``setup.py`` File ----------------------- - -Your ``setup.py`` file is what describes your package, and tells setuptools how to package, build and install it - -It is python code, so you can add anything custom you need to it - -But in the simple case, it is essentially declarative. - -http://docs.python.org/3/distutils/ - - -What Does ``setup.py`` Do? --------------------------- - -* Version & package metadata - -* List of packages to include - -* List of other files to include - -* List of dependencies - -* List of extensions to be compiled (if you are not using `scikit-build `_. - - -An example ``setup.py``: ------------------------- - -.. code-block:: python - - from setuptools import setup - - setup( - name='PackageName', - version='0.1.0', - author='An Awesome Coder', - author_email='aac@example.com', - packages=['package_name', 'package_name.test'], - scripts=['bin/script1','bin/script2'], - url='http://pypi.python.org/pypi/PackageName/', - license='LICENSE.txt', - description='An awesome package that does something', - long_description=open('README.txt').read(), - install_requires=[ - "Django >= 1.1.1", - "pytest", - ], - ) - - -``setup.cfg`` --------------- - -Provides a way to give the end user some ability to customize the install - -It's an ``ini`` style file:: - - [command] - option=value - ... - -simple to read and write. - -``command`` is one of the Distutils commands (e.g. build_py, install) - -``option`` is one of the options that command supports. - -Note that an option spelled ``--foo-bar`` on the command-line is spelled -``foo_bar`` in configuration files. - - -Running ``setup.py`` --------------------- - -With a ``setup.py`` script defined, setuptools can do a lot: - -Builds a source distribution (a tar archive of all the files needed to build and install the package):: - - python setup.py sdist - -Builds wheels:: - - ./setup.py bdist_wheel - -(you need the wheel package for this to work:) - -``pip install wheel`` - -.. nextslide:: - -Build from source:: - - python setup.py build - -And install:: - - python setup.py install - -Develop mode ------------- - -Install in "develop" or "editable" mode:: - - python setup.py develop - -or:: - - pip install . - - -Under Development ------------------- - -Develop mode is *really*, *really* nice:: - - $ python setup.py develop - -or:: - - $ pip install -e ./ - -(the e stands for "editable" -- it is the same thing) - -.. nextslide:: - -It puts a link (actually ``*.pth`` files) into the python installation to your code, so that your package is installed, but any changes will immediately take effect. - -This way all your test code, and client code, etc, can all import your package the usual way. - -No ``sys.path`` hacking - -Good idea to use it for anything more than a single file project. - -.. nextslide:: - -+--------------------------------------+----------------------------------------+ -| Install | Development Install | -+======================================+========================================+ -| Copies package into site-packages | Adds a ``.pth`` file to site-packages, | -| | pointed at package source root | -+--------------------------------------+----------------------------------------+ -| Used when creating conda packages | Used when developing software locally | -+--------------------------------------+----------------------------------------+ -| Normal priority in sys.path | End of ``sys.path`` (only found if | -| | nothing else comes first) | -+--------------------------------------+----------------------------------------+ - - -https://grahamwideman.wikispaces.com/Python-+site-package+dirs+and+.pth+files - - -Aside on pip and dependencies ------------------------------ - -* ``pip`` does not currently have a solver: http://github.com/pypa/pip/issues/988 - -* pip may replace packages in your environment with incompatible versions. Things will break when that happens. - -* use caution (and ideally, disposable environments) when using pip - - -Getting Started With a New Package ----------------------------------- - -For anything but a single-file script (and maybe even then): - -1. Create the basic package structure - -2. Write a ``setup.py`` - -3. ``pip install -e .`` - -4. Put some tests in ``package/test`` - -5. ``pytest`` in the test dir, or ``pytest --pyargs package_name`` - -or use "Cookie Cutter": - -https://cookiecutter.readthedocs.io/en/latest/ - -.. _setup_py_exercise_small_example_package: - -Exercise: A Small Example Package ---------------------------------- - -* Create a small package - - - package structure - - - ``setup.py`` - - - ``python setup.py develop`` - - - ``at least one working test`` - -Start with the silly code in the tutorial repo in: - -``python-packaging-tutorial/setup_example/`` - -or you can download a zip file here: - -:download:`capitalize.zip ` - - -capitalize ----------- - -capitalize is a useless little utility that will capitalize the words in a text file. - -But it has the core structure of a python package: - -* a library of "logic code" -* a command line script -* a data file -* tests - -.. nextslide:: - -So let's see what's in there:: - - $ ls - capital_mod.py test_capital_mod.py - cap_data.txt main.py - cap_script.py sample_text_file.txt - - -What are these files? ---------------------- - -``capital_mod.py`` - The core logic code - -``main.py`` - The command line app - -``test_capital_mod.py`` - Test code for the logic - -``cap_script.py`` - top-level script - -``cap_data.txt`` - data file - -``sample_text_file.txt`` - sample example file to test with. - -.. nextslide:: - -Try it out: - -:: - - $ cd capitalize/ - - $ python3 cap_script.py sample_text_file.txt - - Capitalizing: sample_text_file.txt and storing it in - sample_text_file_cap.txt - - I'm done - -So it works, as long as you are in the directory with all the code. - - -Setting up a package structure ------------------------------- - -Create a basic package structure:: - - package_name/ - bin/ - README.txt - setup.py - package_name/ - __init__.py - module1.py - test/ - __init__.py - test_module1.py - -Let's create all that for capitalize: - - -.. nextslide:: - -Make the package: - -.. code-block:: bash - - $ mkdir capitalize - - $ cd capitalize/ - - $ touch __init__.py - -Move the code into it: - -.. code-block:: bash - - $ mv ../capital_mod.py ./ - $ mv ../main.py ./ - -.. nextslide:: - -Create a dir for the tests: - -.. code-block:: bash - - $ mkdir test - -Move the tests into that: - -.. code-block:: bash - - $ mv ../test_capital_mod.py test/ - - -.. nextslide:: - -Create a dir for the script: - -.. code-block:: bash - - $ mkdir bin - -Move the script into that: - -.. code-block:: bash - - $ mv ../cap_script.py bin - -Create directory for data: - -.. code-block:: bash - - $ mkdir data - -Move data into that: - -.. code-block:: bash - - $ mv ../cap_data.txt data - -Now we have a package! - -.. nextslide:: - -Let's try it:: - - $ python bin/cap_script.py - Traceback (most recent call last): - File "bin/cap_script.py", line 8, in - import capital_mod - ImportError: No module named capital_mod - -OK, that didn't work. Why not? - -Well, we've moved everytihng around: - -The modules don't know how to find each other. - -Let’s Write a ``setup.py`` --------------------------- - -.. code-block:: python - - #!/usr/bin/env python - - from setuptools import setup - - setup(name='capitalize', - version='1.0', - # list folders, not files - packages=['capitalize', - 'capitalize.test'], - scripts=['capitalize/bin/cap_script.py'], - package_data={'capitalize': ['data/cap_data.txt']}, - ) - - -(remember that a "package" is a folder with a ``__init__.py__`` file) - -That's about the minimum you can do. - -.. nextslide:: - -Save it as ``setup.py`` *outside* the capitalize package dir. - -Install it in "editable" mode: - -.. code-block:: bash - - $ pip install -e ./ - Obtaining file:///Users/chris.barker/HAZMAT/Conferences/SciPy-2018/PackagingTutorial/TutorialDay/capitalize - Installing collected packages: capitalize - Running setup.py develop for capitalize - Successfully installed capitalize - -.. nextslide:: - -Try it out:: - - $ cap_script.py - Traceback (most recent call last): - File "/Users/chris.barker/miniconda2/envs/py3/bin/cap_script.py", line 6, in - exec(compile(open(__file__).read(), __file__, 'exec')) - File "/Users/chris.barker/HAZMAT/Conferences/SciPy-2018/PackagingTutorial/TutorialDay/capitalize/capitalize/bin/cap_script.py", line 8, in - import capital_mod - ModuleNotFoundError: No module named 'capital_mod' - -Still didn't work -- why not? - -We need to update some imports. - -.. nextslide:: - -in cap_script.py:: - - import main - import capital_mod - -should be:: - - from capitalize import main - from capitalize import capital_mod - -and similarly in main.py:: - - from capitalize import capital_mod - -.. nextslide:: - -And try it:: - - $ cap_script.py sample_text_file.txt - - Traceback (most recent call last): - File ".../cap_script.py", line 6, in - exec(compile(open(__file__).read(), __file__, 'exec')) - File ".../cap_script.py", line 8, in - from capitalize import capital_mod - File "/.../capital_mod.py", line 35, in - special_words = load_special_words(get_datafile_name()) - File ".../capital_mod.py", line 21, in load_special_words - with open(data_file_name) as data_file: - FileNotFoundError: [Errno 2] No such file or directory: '.../capitalize/cap_data.txt' - -Our script cannot find the data file. We changed it's location but not the path -in the ``capital_mod.py``. - -Let's fix this. On line 32 replace:: - - return Path(__file__).parent / "cap_data.txt" - -with:: - - return Path(__file__).parent / "data/cap_data.txt" - - -Running the tests: ------------------- - -Option 1: cd to the test dir:: - - $ cd capitalize/test/ - - $ pytest - $ =================================== - test session starts - ==================================== - ... - - Traceback: - test_capital_mod.py:14: in - import capital_mod - E ModuleNotFoundError: No module named 'capital_mod' - -Whoops -- we need to fix that import, too:: - - from capitalize import capital_mod - -.. nextslide:: - - -And now we're good:: - - $ pytest - ======test session starts ===== - - collected 3 items - - test_capital_mod.py ... - - ============== 3 passed in 0.06 seconds ============ - -.. nextslide:: - -You can also run the tests from anywhere on the command line:: - - $ pytest --pyargs capitalize - - collected 3 items - - capitalize/capitalize/test/test_capital_mod.py ... [100%] - - =============== 3 passed in 0.03 seconds ========== - - - -Making Packages the Easy Way ----------------------------- - -To auto-build a full package structure: - -| - -.. image:: images/cookiecutter.png - - -.. nextslide:: - -Rather than doing it by hand, you can use the nifty "cookie cutter" project: - -https://cookiecutter.readthedocs.io/en/latest/ - -And there are a few templates that can be used with that. - -The core template written by the author: - -https://github.com/audreyr/cookiecutter-pypackage - -And one written by the author of the opinionated blog post above: - -https://github.com/ionelmc/cookiecutter-pylibrary - -Either are great starting points. - -.. nextslide:: - -.. code-block:: bash - - conda install -c conda-forge cookiecutter - -or - -.. code-block:: bash - - pip install cookiecutter - -No time for that now :-( - - -Handling Requirements -===================== - -Only the simplest of packages need only the Python standard library. - - -Requirements in ``setup.py`` ----------------------------- - -.. code-block:: python - - #!/usr/bin/env python - from distutils.core import setup - - setup(name='mypkg', - version='1.0', - # list folders, not files - packages=['mypkg', 'mypkg.subpkg'], - install_requires=['click'], - ) - - -Requirements in ``requirements.txt`` ------------------------------------- - -**Common Mistake:** - -* requirements.txt often from pip freeze - -* Pinned way too tightly. OK for env creation, bad for packaging. - -| - -* Donald Stufft (PyPA): `Abstract vs. Concrete dependencies `_ - - -Requirements in ``setup.cfg`` (ideal) -------------------------------------- - -:: - - [metadata] - name = my_package - version = attr: - src.VERSION - - [options] - packages = find: - install_requires = click - - -Parse-able without execution, unlike ``setup.py`` - -`configuring setup using setup cfg files `_ - - -Break time! ------------ - -Up next: producing redistributable artifacts diff --git a/sphinx_presentation/source/uploading_pypi.rst b/sphinx_presentation/source/uploading_pypi.rst deleted file mode 100644 index 6f65cb2..0000000 --- a/sphinx_presentation/source/uploading_pypi.rst +++ /dev/null @@ -1,487 +0,0 @@ -.. _uploading: - -****************************** -Building and Uploading to PyPi -****************************** - -Learning Objectives -=================== - -In the following section we will ... ------------------------------------- - -* Review the packaging terminology -* Understand how to build, package and publish a python package - - -Packaging Terminology 101 -========================= - -Introduction ------------- - -This section reviews the key python packaging concepts and definitions. - - -PyPI ----- - -PyPI is the default `Package Index `_ for the Python community. -It is open to all Python developers to consume and distribute their **distributions**. - -.. nextslide:: - -There are two instances of the Package Index: - -* PyPI: Python Package Index hosted at https://pypi.org/ - -* TestPyPI: a separate instance of the Python Package Index (PyPI) that allows you to try out the - distribution tools and process without worrying about affecting the real index. - TestPyPI is hosted at https://test.pypi.org - -Reference: https://packaging.python.org/glossary/#term-python-package-index-pypi - - -pip ---- - -The `PyPA `_ recommended tool for installing Python packages. - -.. nextslide:: - -A multi-faceted tool: - -* It is an *integration frontend* that takes a set of package requirements (e.g. a requirements.txt file) - and attempts to update a working environment to satisfy those requirements. This may require locating, - building, and installing a combination of **distributions**. - -* It is a **build frontend** that can takes arbitrary source trees or source distributions and builds wheels - from them. - -Reference: http://pip.readthedocs.io/ - - -PyPA ----- - -The Python Packaging Authority (PyPA) is a working group that maintains many of the relevant -projects in Python packaging. - -.. nextslide:: - -The associated website https://www.pypa.io references the PyPA Goals, Specifications and Roadmap -as well as `Python Packaging User Guide `_, a collection of tutorials -and references to help you distribute and install Python packages with modern tools. - -Reference: https://www.pypa.io - - -Source distribution -------------------- - -* Synonyms: sdist, Source release - -* provides metadata + source files - -* needed for installing - - * by a tool like pip - * or for generating a Built Distribution - -Reference: https://packaging.python.org/glossary/#term-source-distribution-or-sdist - - -Built Distribution ------------------- - -* Synonyms: bdist - -* provides metadata + pre-built files - -* only need to be moved (usually by pip) to the correct locations on the target system - -Reference: https://packaging.python.org/glossary/#term-built-distribution - - -Python Distribution: pure vs non-pure -------------------------------------- - -* **pure**: - - * Not specific to a CPU architecture - * No `ABI (Application Binary Interface) `_ - -.. nextslide:: - -* **non-pure** - - * `ABI `_ - * Platform specific - -Reference: https://packaging.python.org/glossary/#term-module - - -Binary Distribution -------------------- - -* is a **Built Distribution** -* is **non-pure** -* uses platform-specific compiled extensions - -Reference: https://packaging.python.org/glossary/#term-binary-distribution - - -Wheel ------ - -* a **Built Distribution** - -* a ZIP-format archive with .whl extension - - * ``{distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl`` - -* described by `PEP 427 `_ - -Reference: https://packaging.python.org/glossary/#term-wheel - - -Wheels vs. Conda packages -------------------------- - -+-------------------------------------+-------------------------------------+ -| Wheels | Conda packages | -+=====================================+=====================================+ -| Employed by pip, blessed by PyPA | Foundation of Anaconda ecosystem | -+-------------------------------------+-------------------------------------+ -| Used by any python installation | Used by conda python installations | -+-------------------------------------+-------------------------------------+ -| Mostly specific to Python ecosystem | General purpose (any ecosystem) | -+-------------------------------------+-------------------------------------+ -| Good mechanism for specifying range | Primitive support for multiple | -| of python compatibility | python versions (noarch) | -+-------------------------------------+-------------------------------------+ -| Depends on static linking or other | Can bundle core system-level shared | -| system package managers to provide | libraries as packages, and resolve | -| core libraries | dependencies | -+-------------------------------------+-------------------------------------+ - - -To learn more about Conda, see :ref:`conda_build` section. - - -Virtual Environment -------------------- - -An isolated Python environment that allows packages to be installed for use by a -particular application, rather than being installed system wide. - -Learn more reading `Creating Virtual Environments `_ - - -Build system ------------- - -Synonym: Build backend - -* `setuptools `_ associated with the `wheel `_ package - form the default build system. They support the creation of source and **built distributions** based on a ``setup.py`` and - optionally a ``setup.cfg`` file. - -* `flit `_ is an alternative backend allowing to also create (and also publish) - **built distributions**. - - -Python Package Lifecycle ------------------------- - -.. image:: images/python-package-life-cycle.png - - -Tutorial -======== - -Introduction ------------- - -This section discusses how to build python packages (or distributions) and publish -them in a central repository to streamline their installation. Finally, we conclude -with exercises where we publish a package with the `Test Python Package Index `_. - - -Creating an environment ------------------------ - -Before developing or building your distribution, we highly recommend to create a -dedicated environment. This is supported by both ``conda`` and ``pip``. - - -Building a source distribution ------------------------------- - -By leveraging the ``setup.py`` script, setuptools can build a source -distribution (a tar archive of all the files needed to build and install the package): - -.. code-block:: bash - - $ python setup.py sdist - - $ ls -1 dist - SomePackage-1.0.tar.gz - - -Building a wheel ----------------- - -.. code-block:: bash - - $ pip wheel . -w dist - - $ ls -1 dist - SomePackage-1.0-py2.py3-none-any.whl - - -.. nextslide:: - -This is equivalent to: - -.. code-block:: bash - - $ python setup.py bdist_wheel - - -Installing a wheel ------------------- - -* Install a package from PyPI: - -.. code-block:: bash - - $ pip install SomePackage - [...] - Successfully installed SomePackage - -.. nextslide:: - -.. _install_wheel_from_testpypi: - -* Install a package from TestPyPI: - -.. code-block:: bash - - $ pip install -i https://test.pypi.org/simple SomePackage - [...] - Successfully installed SomePackage - -.. nextslide:: - -* Install a package file: - -.. code-block:: bash - - $ pip install SomePackage-1.0-py2.py3-none-any.whl - [...] - Successfully installed SomePackage - -For more details, see `QuickStart guide from pip documentation `_. - - -Installing a source distribution --------------------------------- - -.. code-block:: bash - - $ pip install SomePackage-1.0.tar.gz - [...] - Successfully installed SomePackage - -It transparently builds the associated wheel and install it. - - -Publishing to PyPI ------------------- - -`twine `_ utility is used for publishing -Python packages on PyPI. - -It is available as both a conda and a pypi package. - -Learn more reading `Using TestPiPY `_. - - -Exercises -========= - -Exercise 1: Prepare environment -------------------------------- - -* In the context of this tutorial, because participants already `installed miniconda `_, - we will create a conda environment and install packages using ``conda install SomePackage``. - -.. code-block:: bash - - # create and activate a dedicated environment named "hello-pypi" - conda create -n hello-pypi -y -c conda-forge - conda activate hello-pypi - - # install pip, wheel and twine - conda install -y twine wheel pip - -.. nextslide:: - -* Create an account on TestPyPI (https://test.pypi.org/account/register/) - - -Exercise 2: Build source distribution and wheel ------------------------------------------------ - -* `Download `_ (or - `checkout `_ using git) the sources - of our ``hello-pypi`` sample project: - -.. code-block:: bash - - conda install -y wget - wget https://github.com/python-packaging-tutorial/hello-pypi/archive/master.zip - - -.. nextslide:: - -* Extract sources - -.. code-block:: bash - - conda install -y unzip - unzip master.zip - cd hello-pypi-master - -.. nextslide:: - -* Modify package name so that it is unique - -.. nextslide:: - -* Then, build the source distribution: - - -.. code-block:: bash - - $ python setup.py sdist - - -* And finally, build the wheel: - -.. code-block:: bash - - $ pip wheel . -w dist - -* Make sure artifacts have been generated in the ``dist`` subdirectory. - - -Exercise 3: Publish artifacts on PyPI -------------------------------------- - -.. code-block:: bash - - $ twine upload --repository-url https://test.pypi.org/legacy/ dist/* - - -Bonus Exercise 4: Publish artifacts automating authentication -------------------------------------------------------------- - -* Delete ``hello-pypi-master`` directory and extract archive again. - -* Update name of package and rebuild source distribution and wheel. - -.. nextslide:: - -* Create file ``.pypirc`` in your home directory with the following content: - -:: - - [distutils] - index-servers= - pypi - testpypi - - [testpypi] - repository: https://test.pypi.org/legacy/ - username: your testpypi username - password: your testpypi password - - [pypi] - username: your testpypi username - password: your testpypi password - -.. nextslide:: - -* Publish package on TestPyPI: - -.. code-block:: bash - - $ twine upload --repository testpypi dist/* - - -Omitting the ``-repository testpypi`` argument allows to upload -to the regular PyPI server. - - -Bonus Exercise 5: Setting up continuous integration ---------------------------------------------------- - -* See branch `master-with-ci `_ - branch associated with ``hello-pypi`` example. - - -Resources -========= - -Where do I go to figure this out? ---------------------------------- - -This is a really good guide: - -Python Packaging User Guide: - -https://packaging.python.org/ - -and a more detailed tutorial: - -http://python-packaging.readthedocs.io/en/latest/ - -**Follow one of them** - -.. nextslide:: - -There is a sample project here: - -https://github.com/pypa/sampleproject - -(this has all the complexity you might need...) - -You can use this as a template for your own packages. - -.. nextslide:: - -Here is an opinionated update -- a little more fancy, but some good ideas: - -https://blog.ionelmc.ro/2014/05/25/python-packaging/ - -.. nextslide:: - -Rather than doing it by hand, you can use the nifty "cookie cutter" project: - -https://cookiecutter.readthedocs.io/en/latest/ - -.. nextslide:: - -And there are a few templates that can be used with that. - -The core template written by the author: - -https://github.com/audreyr/cookiecutter-pypackage - -And one written by the author of the opinionated blog post above: - -https://github.com/ionelmc/cookiecutter-pylibrary - -Either are great starting points. From d2729b81787fd674af709429145e00dd9caff6d0 Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Tue, 28 Feb 2023 14:35:29 -0500 Subject: [PATCH 4/5] feat: adding stubs Signed-off-by: Henry Schreiner --- sphinx_presentation/source/1_overview.md | 7 +++++++ sphinx_presentation/source/3_pyproject.md | 2 +- sphinx_presentation/source/4_tools.md | 12 ++++++++++++ sphinx_presentation/source/5_binary.md | 6 ++++++ sphinx_presentation/source/6_ci.md | 15 +++++++++++---- sphinx_presentation/source/7_dependencies.md | 5 +++++ sphinx_presentation/source/index.md | 8 +++----- sphinx_presentation/source/schedule.md | 1 - 8 files changed, 45 insertions(+), 11 deletions(-) create mode 100644 sphinx_presentation/source/1_overview.md create mode 100644 sphinx_presentation/source/4_tools.md create mode 100644 sphinx_presentation/source/5_binary.md create mode 100644 sphinx_presentation/source/7_dependencies.md diff --git a/sphinx_presentation/source/1_overview.md b/sphinx_presentation/source/1_overview.md new file mode 100644 index 0000000..d12fe8c --- /dev/null +++ b/sphinx_presentation/source/1_overview.md @@ -0,0 +1,7 @@ +# Overview of packaging for Python + +## SDists vs. wheels +## Pure Python vs. compiled packages +## Wheel vs conda packages +## PyPI / anaconda.org +## Links packaging documentation such as PyPA, Packaging Native diff --git a/sphinx_presentation/source/3_pyproject.md b/sphinx_presentation/source/3_pyproject.md index 0fc2462..2648e19 100644 --- a/sphinx_presentation/source/3_pyproject.md +++ b/sphinx_presentation/source/3_pyproject.md @@ -1,4 +1,4 @@ -# The pyproject.toml +# The pyproject.toml file Much research software is initially developed by hacking away in an interactive setting, such as in a [Jupyter Notebook](https://jupyter.org) or a Python shell. diff --git a/sphinx_presentation/source/4_tools.md b/sphinx_presentation/source/4_tools.md new file mode 100644 index 0000000..02f5678 --- /dev/null +++ b/sphinx_presentation/source/4_tools.md @@ -0,0 +1,12 @@ +# Tools for building and uploading +## Core tools +### Pipx +### build +### twine: the secure way to upload to PyPI +## For consolidated experience & dependency management +### Pdm +### Hatch +## Building a source distribution +## Building a wheel +## Discuss use of delocate/Auditwheel/... +## Difference between linux & manylinux wheels diff --git a/sphinx_presentation/source/5_binary.md b/sphinx_presentation/source/5_binary.md new file mode 100644 index 0000000..88cc1b0 --- /dev/null +++ b/sphinx_presentation/source/5_binary.md @@ -0,0 +1,6 @@ +# Binary extensions + + +## Scikit-build overview & motivation + + diff --git a/sphinx_presentation/source/6_ci.md b/sphinx_presentation/source/6_ci.md index 5b2dba1..ac6813d 100644 --- a/sphinx_presentation/source/6_ci.md +++ b/sphinx_presentation/source/6_ci.md @@ -4,10 +4,12 @@ Continuous Integration (CI) allows you to perform tasks on a server for various events on your repository (called triggers). For example, you can use GitHub Actions (GHA) to run a test suite on every pull request. +## GitHub Actions + GHA is made up of workflows which consist of actions. Workflows are files in the `.github/workflows` folder ending in `.yml`. -## Triggers +### Triggers Workflows start with triggers, which define when things run. Here are three triggers: @@ -24,7 +26,7 @@ This will run on all pull requests and pushes to main. You can also specify specific branches for pull requests instead of running on all PRs (will run on PRs targeting those branches only). -## Running unit tests +### Running unit tests Let's set up a basic test. We will define a jobs dict, with a single job named "tests". For all jobs, you need to select an image to run on - there are images @@ -58,7 +60,7 @@ This has five steps: By default, if any step fails, the run immediately quits and fails. -## Running in a matrix +### Running in a matrix You can parametrize values, such as Python version or operating system. Do do this, make a `strategy: matrix:` dict. Every key in that dict (except `include:` @@ -122,7 +124,7 @@ parametrization; this will add a key to an existing job. The `exclude:` key does the opposite, and lets you remove jobs from the matrix. -## Other actions +### Other actions GitHub Actions has the concept of actions, which are just GitHub repositories of the form `org/name@tag`, and there are lots of useful actions to choose from (and you can write your own by composing other actions, or you can also create them with JavaScript or Dockerfiles). Here are a few: @@ -146,6 +148,11 @@ And many other useful ones: - [ruby/setup-miniconda](https://github.com/ruby/setup-ruby) Setup Ruby if you need it for something. +## Pre-commit + +## Building wheels with cibuildwheel + + ## Exercise Add a CI file for your package. diff --git a/sphinx_presentation/source/7_dependencies.md b/sphinx_presentation/source/7_dependencies.md new file mode 100644 index 0000000..821b50c --- /dev/null +++ b/sphinx_presentation/source/7_dependencies.md @@ -0,0 +1,5 @@ +# Handling Dependencies +## "In-project" compilation (pybind11) +## External + +See https://github.com/pypa/cibuildwheel/issues/1251#issuecomment-1236364876 for example diff --git a/sphinx_presentation/source/index.md b/sphinx_presentation/source/index.md index b6f1cea..2725fe5 100644 --- a/sphinx_presentation/source/index.md +++ b/sphinx_presentation/source/index.md @@ -13,16 +13,14 @@ Packaging from start to finish including binary extensions using modern tools. :maxdepth: 1 schedule +1_overview 2_env 3_pyproject -6_ci -``` - +``` ```{toctree} :hidden: diff --git a/sphinx_presentation/source/schedule.md b/sphinx_presentation/source/schedule.md index b9a2f66..807205f 100644 --- a/sphinx_presentation/source/schedule.md +++ b/sphinx_presentation/source/schedule.md @@ -38,7 +38,6 @@ - For consolidated experience & dependency management - Pdm (https://pdm.fming.dev/latest/) - May be Hatch (https://hatch.pypa.io) (more like a replacement for tox and nox) - - flit: great for simple packages - Building a source distribution - Building a wheel - Discuss use of delocate/Auditwheel/… From 464e20b9e0d4251252cfbd679e8da6f45a48b352 Mon Sep 17 00:00:00 2001 From: Henry Schreiner Date: Tue, 28 Feb 2023 16:24:10 -0500 Subject: [PATCH 5/5] chore: some updates for submission Signed-off-by: Henry Schreiner --- sphinx_presentation/source/schedule.md | 39 +++++++++++++++----------- 1 file changed, 22 insertions(+), 17 deletions(-) diff --git a/sphinx_presentation/source/schedule.md b/sphinx_presentation/source/schedule.md index 807205f..41e514c 100644 --- a/sphinx_presentation/source/schedule.md +++ b/sphinx_presentation/source/schedule.md @@ -7,15 +7,15 @@ - PyPI / anaconda.org - Links packaging documentation such as PyPA, Packaging Native -0:20: Exercise +0:15: Exercise : - Identify platforms supported for the xxx packages on PyPI and anaconda.org -0:25: Virtual environments +0:20: Virtual environments : - Setting up a virtual environment - Setting up a conda environment - Using a task runner (nox) -0:45: Exercise writing a noxfile +0:30: Exercise writing a noxfile : - Take existing working package and add a simple noxfile 0:50: Break & catch up @@ -43,39 +43,44 @@ - Discuss use of delocate/Auditwheel/… - Difference between linux & manylinux wheels (internalize dependencies, glibc compatibility, …) -1:45: Worked example/exercise: building a package and uploading to pypi +1:35: Worked example/exercise: building a package and uploading to pypi : - Continuing from the the previous exercise, build a wheel for the package - Register the package on the pypi testing server - Upload the built distributions using twine - Delete one of the uploaded files on pypi and try re-uploading (will fail) - Introduce the idea of .post releases (it will happen to everyone who uploads) -1:55: Coffee break +1:45: Coffee break -2:15: Binaries and dependencies: how scikit-build can make life easier +2:05: Binaries and dependencies: how scikit-build can make life easier : - Scikit-build overview & motivation + - Adding a minimal CMakeLists.txt + - Building the extension + - Adding options and controlling the build -2:40: Exercise: add CMake project that generates python extension. +2:30: Exercise: add CMake project that generates python extension. : - Tie it into previous python project. + - Setup build caching -3:00: Break & catch up +2:50: Break & catch up -3:10: Automated building with cloud-based CI services +3:00: Automated building with cloud-based CI services : - GitHub action - Pre-commit.yml - - Ruff - - Static analysis + - Ruff - https://cibuildwheel.readthedocs.io/en/stable/ -3:30: Exercise: (10 min) +3:15: Exercise: : - Update previous example adding cibuildwheel support - - Linting using pre-commit + ruff + - Linting using pre-commit + Ruff - Automated PyPI release -3:40: Handling dependencies -: - "In-project" compilation (pybind11) - - External: see https://github.com/pypa/cibuildwheel/issues/1251#issuecomment-1236364876 for example +3:30: Handling dependencies +: - "In-project" compilation + - External -3:50: Exercise +3:45: Exercise : - Add a dependency to the project + - pybind11 (in-project) + - lz4 (external)