diff --git a/.cirrus.yml b/.cirrus.yml new file mode 100644 index 00000000..0aff9456 --- /dev/null +++ b/.cirrus.yml @@ -0,0 +1,30 @@ +build_and_store_wheels: &BUILD_AND_STORE_WHEELS + install_cibuildwheel_script: + - python -m pip install cibuildwheel==2.16.2 + run_cibuildwheel_script: + - cibuildwheel + wheels_artifacts: + path: "wheelhouse/*" + + # Upload only for tagged commit + only_if: $CIRRUS_TAG != '' + publish_script: + - python -m pip install twine + - python -m twine upload --repository-url https://upload.pypi.org/legacy/ --username __token__ wheelhouse/*.whl + + +linux_aarch64_task: + name: Build Linux aarch64 wheels. + compute_engine_instance: + image_project: cirrus-images + image: family/docker-builder-arm64 + architecture: arm64 + platform: linux + cpu: 4 + memory: 4G + environment: + TWINE_PASSWORD: ENCRYPTED[ade2037764e68fea251152f7585f3f77cdd748af06dc0f06942c45a8a8770fff19032c985f8dc193229c8adb2c0fecb9] + + install_pre_requirements_script: + - apt install -y python3-venv python-is-python3 + <<: *BUILD_AND_STORE_WHEELS diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 00000000..0fd988d9 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,15 @@ +# These are supported funding model platforms + +github: syoyo # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry +polar: # Replace with a single Polar username +buy_me_a_coffee: # Replace with a single Buy Me a Coffee username +thanks_dev: # Replace with a single thanks.dev username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 00000000..b37104a2 --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,115 @@ +name: Build and upload to PyPI + +# Build on every branch push, tag push, and pull request change: +on: [push, pull_request] + +jobs: + + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true # Optional, use if you use setuptools_scm + + - name: Build wheels + uses: pypa/cibuildwheel@v2.16.5 + # to supply options, put them in 'env', like: + # env: + # CIBW_SOME_OPTION: value + # Disable building PyPy wheels on all platforms + env: + CIBW_ARCHS_MACOS: "x86_64 universal2 arm64" + CIBW_ARCHS_WINDOWS: "AMD64 x86" + # disable aarm64 build since its too slow to build(docker + qemu) + CIBW_ARCHS_LINUX: "x86_64 i686" + # it looks cibuildwheel fails to add version string to wheel file for python 3.6, so skip it + CIBW_SKIP: pp* + + - uses: actions/upload-artifact@v4 + with: + name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} + path: ./wheelhouse/*.whl + + # It looks cibuildwheels did not clean build folder(CMake), and it results to Windows arm64 build failure(trying to reuse x86 build of .obj) + # So supply separated build job for Windows ARM64 build + # TODO: clean build folder using CIBW_BEFORE_ALL? + build_win_arm64_wheels: + name: Build ARM64 wheels on Windows. + runs-on: windows-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + fetch-tags: true # Optional, use if you use setuptools_scm + + - name: Build wheels + uses: pypa/cibuildwheel@v2.16.5 + # to supply options, put them in 'env', like: + # env: + # CIBW_SOME_OPTION: value + # Disable building PyPy wheels on all platforms + env: + CIBW_ARCHS_WINDOWS: "ARM64" + CIBW_SKIP: pp* + + - uses: actions/upload-artifact@v4 + with: + name: cibw-wheels-${{ matrix.os }}-${{ strategy.job-index }} + path: ./wheelhouse/*.whl + + make_sdist: + name: Make SDist + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 # Optional, use if you use setuptools_scm + fetch-tags: true # Optional, use if you use setuptools_scm + + - name: Build SDist + run: pipx run build --sdist + + - uses: actions/upload-artifact@v4 + with: + name: cibw-sdist + path: dist/*.tar.gz + + upload_all: + needs: [build_wheels, build_wheels, make_sdist] + runs-on: ubuntu-latest + environment: release + permissions: + # IMPORTANT: this permission is mandatory for trusted publishing + id-token: write + # upload to PyPI on every tag starting with 'v' + # NOTE: Without github.event_name & githug.ref check, `upload_all` task is still triggered on 'main' branch push. + # (then get 'Branch "main" is not allowed to deploy to release due to environment protection rules.' error) + # So still do event_name and github.ref check. + # TODO: Make it work only using Github `environment` feature. + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/v') + # alternatively, to publish when a GitHub Release is created, use the following rule: + # if: github.event_name == 'push' && github.event.action == 'published' + steps: + - uses: actions/download-artifact@v4 + with: + pattern: cibw-* + path: dist + merge-multiple: true + + - uses: pypa/gh-action-pypi-publish@release/v1 + with: + # Use Trusted Publisher feature: + # https://docs.pypi.org/trusted-publishers/ + # so no use of PYPI_API_TOKEN + #password: ${{ secrets.PYPI_API_TOKEN }} + # + # Avoid race condition when using multiple CIs + skip-existing: true + verbose: true diff --git a/.gitignore b/.gitignore index cd219d8e..f9b4d691 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,5 @@ build/ /python/*.egg-info /python/.eggs /python/tiny_obj_loader.h +/tests/tester +/tests/tester.dSYM diff --git a/BUILD.bazel b/BUILD.bazel new file mode 100644 index 00000000..dda60c76 --- /dev/null +++ b/BUILD.bazel @@ -0,0 +1,9 @@ +cc_library( + name = "tinyobjloader", + hdrs = ["tiny_obj_loader.h"], + copts = select({ + "@platforms//os:windows": [], + "//conditions:default": ["-Wno-maybe-uninitialized"], + }), + visibility = ["//visibility:public"], +) diff --git a/CMakeLists.txt b/CMakeLists.txt index f64b42ce..9aea91fa 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,13 +1,23 @@ #Tiny Object Loader Cmake configuration file. #This configures the Cmake system with multiple properties, depending #on the platform and configuration it is set to build in. -project(tinyobjloader) -cmake_minimum_required(VERSION 3.2) +cmake_minimum_required(VERSION 3.16) +project(tinyobjloader CXX) set(TINYOBJLOADER_SOVERSION 2) -set(TINYOBJLOADER_VERSION 2.0.0-rc.9) +set(TINYOBJLOADER_VERSION 2.0.0-rc.13) +set(PY_TARGET "pytinyobjloader") #optional double precision support option(TINYOBJLOADER_USE_DOUBLE "Build library with double precision instead of single (float)" OFF) +option(TINYOBJLOADER_WITH_PYTHON "Build Python module(for developer). Use pyproject.toml/setup.py to build Python module for end-users" OFF) +option(TINYOBJLOADER_PREFER_LOCAL_PYTHON_INSTALLATION + "Prefer locally-installed Python interpreter than system or conda/brew installed Python. Please specify your Python interpreter with `Python3_EXECUTABLE` cmake option if you enable this option." + OFF) + +list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake) +list(APPEND CMAKE_MODULE_PATH ${PROJECT_SOURCE_DIR}/cmake/sanitizers) +find_package(Sanitizers) # Address sanitizer (-DSANITIZE_ADDRESS=ON) + if(TINYOBJLOADER_USE_DOUBLE) set(LIBRARY_NAME ${PROJECT_NAME}_double) @@ -15,7 +25,6 @@ else() set(LIBRARY_NAME ${PROJECT_NAME}) endif() - #Folder Shortcuts set(TINYOBJLOADEREXAMPLES_DIR ${CMAKE_CURRENT_SOURCE_DIR}/examples) @@ -46,7 +55,36 @@ set(TINYOBJLOADER_RUNTIME_DIR ${CMAKE_INSTALL_BINDIR}) option(TINYOBJLOADER_BUILD_TEST_LOADER "Build Example Loader Application" OFF) +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_CXX_STANDARD_REQUIRED ON) +set(CMAKE_CXX_EXTENSIONS OFF) + +# Build standalone .so for Python binding(for developer) +if (TINYOBJLOADER_WITH_PYTHON) + + if(TINYOBJLOADER_PREFER_LOCAL_PYTHON_INSTALLATION) + #message(STATUS "Local Python") + set(Python3_FIND_FRAMEWORK NEVER) # Do not search framework python + set(Python3_FIND_STRATEGY LOCATION) + set(Python3_FIND_REGISTRY NEVER) # Windows only + else() + set(Python3_FIND_FRAMEWORK LAST + )# Prefer Brew/Conda to Apple framework python + endif() + + find_package( + Python3 + COMPONENTS Interpreter Development + REQUIRED) + + find_package(pybind11 CONFIG REQUIRED) + +endif() + + + add_library(${LIBRARY_NAME} ${tinyobjloader-Source}) +add_sanitizers(${LIBRARY_NAME}) if(BUILD_SHARED_LIBS) set_target_properties(${LIBRARY_NAME} PROPERTIES @@ -85,6 +123,24 @@ if(TINYOBJLOADER_BUILD_OBJ_STICHER) ) endif() +if (TINYOBJLOADER_WITH_PYTHON) + # pybind11 method: + pybind11_add_module(${PY_TARGET} ${CMAKE_SOURCE_DIR}/python/bindings.cc ${CMAKE_SOURCE_DIR}/python/tiny_obj_loader.cc) + + add_sanitizers(${PY_TARGET}) + set_target_properties(${PY_TARGET} PROPERTIES OUTPUT_NAME "tinyobjloader") + + # copy .so to jdepp/ + add_custom_command( + TARGET ${PY_TARGET} + POST_BUILD + COMMAND "${CMAKE_COMMAND}" -E copy "$" + "${CMAKE_SOURCE_DIR}/python/$" + COMMENT "copying tinyobjloader native python module file to python/" + VERBATIM) + +endif() + #Write CMake package config files include(CMakePackageConfigHelpers) diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 00000000..d2632da5 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,8 @@ +include pyproject.toml +include setup.py +include README.md +include LICENSE +include python/sample.py +include python/bindings.cc +include python/tiny_obj_loader.cc +include tiny_obj_loader.h diff --git a/MODULE.bazel b/MODULE.bazel new file mode 100644 index 00000000..f8859286 --- /dev/null +++ b/MODULE.bazel @@ -0,0 +1,9 @@ +module( + name = "tinyobjloader", + compatibility_level = 1, +) + +bazel_dep( + name = "platforms", + version = "0.0.8", +) diff --git a/README.md b/README.md index 143e0426..8569a352 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # tinyobjloader -[![Build Status](https://travis-ci.org/tinyobjloader/tinyobjloader.svg?branch=master)](https://travis-ci.org/tinyobjloader/tinyobjloader) +[![PyPI version](https://badge.fury.io/py/tinyobjloader.svg)](https://badge.fury.io/py/tinyobjloader) [![AZ Build Status](https://dev.azure.com/tinyobjloader/tinyobjloader/_apis/build/status/tinyobjloader.tinyobjloader?branchName=master)](https://dev.azure.com/tinyobjloader/tinyobjloader/_build/latest?definitionId=1&branchName=master) @@ -14,19 +14,19 @@ Tiny but powerful single file wavefront obj loader written in C++03. No dependen `tinyobjloader` is good for embedding .obj loader to your (global illumination) renderer ;-) -If you are looking for C89 version, please see https://github.com/syoyo/tinyobjloader-c . +If you are looking for C99 version, please see https://github.com/syoyo/tinyobjloader-c . Version notice -------------- -We recommend to use `master`(`main`) branch. Its v2.0 release candidate. Most features are now nearly robust and stable(Remaining task for release v2.0 is polishing C++ and Python API). +We recommend to use `master`(`main`) branch. Its v2.0 release candidate. Most features are now nearly robust and stable(Remaining task for release v2.0 is polishing C++ and Python API, and fix built-in triangulation code). We have released new version v1.0.0 on 20 Aug, 2016. Old version is available as `v0.9.x` branch https://github.com/syoyo/tinyobjloader/tree/v0.9.x ## What's new -* 29 Jul, 2021 : Added Mapbox's earcut for robust triangulation. Also fixes triangulation bug. +* 29 Jul, 2021 : Added Mapbox's earcut for robust triangulation. Also fixes triangulation bug(still there is some issue in built-in triangulation algorithm: https://github.com/tinyobjloader/tinyobjloader/issues/319). * 19 Feb, 2020 : The repository has been moved to https://github.com/tinyobjloader/tinyobjloader ! * 18 May, 2019 : Python binding!(See `python` folder. Also see https://pypi.org/project/tinyobjloader/) * 14 Apr, 2019 : Bump version v2.0.0 rc0. New C++ API and python bindings!(1.x API still exists for backward compatibility) @@ -72,6 +72,9 @@ TinyObjLoader is successfully used in ... * liblava - A modern C++ and easy-to-use framework for the Vulkan API. [MIT]: https://github.com/liblava/liblava * rtxON - Simple Vulkan raytracing tutorials https://github.com/iOrange/rtxON * metal-ray-tracer - Writing ray-tracer using Metal Performance Shaders https://github.com/sergeyreznik/metal-ray-tracer https://sergeyreznik.github.io/metal-ray-tracer/index.html +* Supernova Engine - 2D and 3D projects with Lua or C++ in data oriented design: https://github.com/supernovaengine/supernova +* AGE (Arc Game Engine) - An open-source engine for building 2D & 3D real-time rendering and interactive contents: https://github.com/MohitSethi99/ArcGameEngine +* [Wicked Engine](https://github.com/turanszkij/WickedEngine) - 3D engine with modern graphics * Your project here! (Letting us know via github issue is welcome!) ### Old version(v0.9.x) @@ -106,10 +109,7 @@ TinyObjLoader is successfully used in ... * Vertex color(as an extension: https://blender.stackexchange.com/questions/31997/how-can-i-get-vertex-painted-obj-files-to-import-into-blender) * Texcoord * Normal -* Material - * Unknown material attributes are returned as key-value(value is string) map. * Crease tag('t'). This is OpenSubdiv specific(not in wavefront .obj specification) -* PBR material extension for .MTL. Its proposed here: http://exocortex.com/blog/extending_wavefront_mtl_to_support_pbr * Callback API for custom loading. * Double precision support(for HPC application). * Smoothing group @@ -126,12 +126,16 @@ TinyObjLoader is successfully used in ... * [ ] surface. * [ ] Free form curve/surfaces +### Material + +* PBR material extension for .MTL. Please see [pbr-mtl.md](pbr-mtl.md) for details. +* Texture options +* Unknown material attributes are returned as key-value(value is string) map. ## TODO * [ ] Fix obj_sticker example. * [ ] More unit test codes. -* [x] Texture options ## License @@ -150,7 +154,7 @@ One option is to simply copy the header file into your project and to make sure ### Building tinyobjloader - Using vcpkg(not recommended though) -Alghouth it is not a recommended way, you can download and install tinyobjloader using the [vcpkg](https://github.com/Microsoft/vcpkg) dependency manager: +Although it is not a recommended way, you can download and install tinyobjloader using the [vcpkg](https://github.com/Microsoft/vcpkg) dependency manager: git clone https://github.com/Microsoft/vcpkg.git cd vcpkg @@ -246,7 +250,7 @@ You can enable `double(64bit)` precision by using `TINYOBJLOADER_USE_DOUBLE` def When you enable `triangulation`(default is enabled), TinyObjLoader triangulate polygons(faces with 4 or more vertices). -Built-in trinagulation code may not work well in some polygon shape. +Built-in triangulation code may not work well in some polygon shape. You can define `TINYOBJLOADER_USE_MAPBOX_EARCUT` for robust triangulation using `mapbox/earcut.hpp`. This requires C++11 compiler though. And you need to copy `mapbox/earcut.hpp` to your project. @@ -256,7 +260,7 @@ If you have your own `mapbox/earcut.hpp` file incuded in your project, you can d ```c++ #define TINYOBJLOADER_IMPLEMENTATION // define this in only *one* .cc -// Optional. define TINYOBJLOADER_USE_MAPBOX_EARCUT gives robust trinagulation. Requires C++11 +// Optional. define TINYOBJLOADER_USE_MAPBOX_EARCUT gives robust triangulation. Requires C++11 //#define TINYOBJLOADER_USE_MAPBOX_EARCUT #include "tiny_obj_loader.h" @@ -328,7 +332,7 @@ for (size_t s = 0; s < shapes.size(); s++) { ```c++ #define TINYOBJLOADER_IMPLEMENTATION // define this in only *one* .cc -// Optional. define TINYOBJLOADER_USE_MAPBOX_EARCUT gives robust trinagulation. Requires C++11 +// Optional. define TINYOBJLOADER_USE_MAPBOX_EARCUT gives robust triangulation. Requires C++11 //#define TINYOBJLOADER_USE_MAPBOX_EARCUT #include "tiny_obj_loader.h" @@ -413,18 +417,25 @@ Here is some benchmark result. Time are measured on MacBook 12(Early 2016, Core ## Python binding +``` +$ python -m pip install tinyobjloader +``` + +See [python/sample.py](python/sample.py) for example use of Python binding of tinyobjloader. + ### CI + PyPI upload -cibuildwheels + twine upload for each git tagging event is handled in Azure Pipeline. +cibuildwheels + twine upload for each git tagging event is handled in Github Actions and Cirrus CI(arm builds). #### How to bump version(For developer) +* Apply `black` to python files(`python/sample.py`) * Bump version in CMakeLists.txt -* Update version in `python/setup.py` -* Commit and push `master`. Confirm C.I. build is OK. +* Commit and push `release`. Confirm C.I. build is OK. * Create tag starting with `v`(e.g. `v2.1.0`) * `git push --tags` - * cibuildwheels + pypi upload(through twine) will be automatically triggered in Azure Pipeline. + * version settings is automatically handled in python binding through setuptools_scm. + * cibuildwheels + pypi upload(through twine) will be automatically triggered in Github Actions + Cirrus CI. ## Tests diff --git a/azure-pipelines.yml b/azure-pipelines.yml index b9c4fcd5..2580f172 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -1,8 +1,18 @@ +# +# Python wheels build is now done in Github Actions + Cirrus CI(for arm build) +# so python build is disabled in Azure pipelines. +# + variables: # https://cibuildwheel.readthedocs.io/en/stable/cpp_standards/ - # python2.7 + C++11(pybind11) is not available. - CIBW_SKIP: "cp27-win* pp27-win32 pp36-win32" + # cibuildwheel now supports python 3.6+(as of 2022 Oct) + #CIBW_SKIP: "pp*" CIBW_BEFORE_BUILD: "pip install pybind11" + CIBW_ARCHS_LINUXBEFORE_BUILD: "pip install pybind11" + # disable aarch64 build for a while since it(pulling docker aarch64 image) exceeds Azure's 60 min limit + # NOTE: aarch64 linux support in Azure pipeline is not yet officially supported(as of 2022 Oct) https://github.com/microsoft/azure-pipelines-agent/issues/3935 + #CIBW_ARCHS_LINUX: auto aarch64 + CIBW_ARCHS_MACOS: x86_64 universal2 arm64 #CIBW_BEFORE_BUILD_MACOS: "pip install -U pip setuptools" #CIBW_BEFORE_BUILD_LINUX: "pip install -U pip setuptools" #CIBW_TEST_COMMAND: TODO "python -c \"import tinyobjloader; tinyobjloader.test()\"" @@ -24,132 +34,127 @@ jobs: steps: - task: UsePythonVersion@0 - script: | - pip install black==19.10b0 + # 19.10b0 triggers 'cannot import name '_unicodefun' from 'click'' error. + # https://stackoverflow.com/questions/71673404/importerror-cannot-import-name-unicodefun-from-click + #pip install black==19.10b0 + #pip install black==22.3.0 + pip install black==22.10.0 + black --check python/ displayName: Check Python code format - # Ubuntu16.04 seems now deprecated(as of 2021/12/01), - # so use `ubuntu-latest` - - job: linux - pool: {vmImage: "ubuntu-latest"} - steps: - - task: UsePythonVersion@0 - - bash: | - python3 -m pip install --upgrade pip - pip3 install cibuildwheel twine - # Make the header files available to the build. - cp *.h python - mkdir python/mapbox - cp mapbox/earcut.hpp python/mapbox/ - cd python - - # Source dist - python3 setup.py sdist - ls -la dist/* - - # build binary wheels - cibuildwheel --output-dir wheelhouse . - - - task: CopyFiles@2 - inputs: - contents: 'python/wheelhouse/**' - targetFolder: $(Build.ArtifactStagingDirectory) - - - task: CopyFiles@2 - inputs: - contents: 'python/dist/**' - targetFolder: $(Build.ArtifactStagingDirectory) - - - task: PublishBuildArtifacts@1 - inputs: - path: $(Build.ArtifactStagingDirectory) - artifactName: tinyobjDeployLinux - - - job: macos - pool: {vmImage: 'macOS-10.15'} - variables: - # Support C++11: https://github.com/joerick/cibuildwheel/pull/156 - MACOSX_DEPLOYMENT_TARGET: 10.9 - steps: - - task: UsePythonVersion@0 - - bash: | - python3 -m pip install --upgrade pip - pip3 install cibuildwheel - # Make the header files available to the build. - cp *.h python - mkdir python/mapbox - cp mapbox/earcut.hpp python/mapbox/earcut.hpp - cd python - cibuildwheel --output-dir wheelhouse . - - task: CopyFiles@2 - inputs: - contents: 'python/wheelhouse/*.whl' - targetFolder: $(Build.ArtifactStagingDirectory) - - task: PublishBuildArtifacts@1 - inputs: - path: $(Build.ArtifactStagingDirectory) - artifactName: tinyobjDeployMacOS - - - job: windows - pool: {vmImage: 'windows-2019'} - steps: - - task: UsePythonVersion@0 - - bash: | - python -m pip install --upgrade pip - pip install cibuildwheel - # Make the header files available to the build. - cp *.h python - mkdir python/mapbox - cp mapbox/earcut.hpp python/mapbox/ - cd python - cibuildwheel --output-dir wheelhouse . - - task: CopyFiles@2 - inputs: - contents: 'python/wheelhouse/*.whl' - targetFolder: $(Build.ArtifactStagingDirectory) - - task: PublishBuildArtifacts@1 - inputs: - path: $(Build.ArtifactStagingDirectory) - artifactName: tinyobjDeployWindows - - - job: deployPyPI - # Based on vispy: https://github.com/vispy/vispy/blob/master/azure-pipelines.yml - pool: {vmImage: 'Ubuntu-16.04'} - condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/v')) - dependsOn: - - linux - - macos - - windows - steps: - - task: UsePythonVersion@0 - - # TODO(syoyo): Use buildType: specific to download multiple artifacts at once? - - task: DownloadBuildArtifacts@0 - inputs: - artifactName: 'tinyobjDeployLinux' - downloadPath: $(Pipeline.Workspace) - - - task: DownloadBuildArtifacts@0 - inputs: - artifactName: 'tinyobjDeployMacOS' - downloadPath: $(Pipeline.Workspace) - - - task: DownloadBuildArtifacts@0 - inputs: - artifactName: 'tinyobjDeployWindows' - downloadPath: $(Pipeline.Workspace) - - # Publish to PyPI through twine - - bash: | - cd $(Pipeline.Workspace) - find . - python -m pip install --upgrade pip - pip install twine - echo tinyobjDeployLinux/python/dist/* - echo tinyobjDeployLinux/python/wheelhouse/* tinyobjDeployMacOS/python/wheelhouse/* tinyobjDeployWindows/python/wheelhouse/* - twine upload -u "__token__" --skip-existing tinyobjDeployLinux/python/dist/* tinyobjDeployLinux/python/wheelhouse/* tinyobjDeployMacOS/python/wheelhouse/* tinyobjDeployWindows/python/wheelhouse/* - env: - TWINE_PASSWORD: $(pypiToken2) + # Disabled: python build + ## + ## Ubuntu16.04 seems now deprecated(as of 2021/12/01), + ## so use `ubuntu-latest` + #- job: linux + # pool: {vmImage: "ubuntu-latest"} + # steps: + # - task: UsePythonVersion@0 + # - bash: | + # python3 -m pip install --upgrade pip + # pip3 install cibuildwheel twine + + # # Use pipx to build source dist + # pip3 install pipx + + # # Source dist + # pipx run build --sdist + # ls -la dist/* + + # # build binary wheels + # cibuildwheel --platform linux --output-dir wheelhouse . + + # - task: CopyFiles@2 + # inputs: + # contents: 'wheelhouse/**' + # targetFolder: $(Build.ArtifactStagingDirectory) + + # - task: CopyFiles@2 + # inputs: + # contents: 'dist/**' + # targetFolder: $(Build.ArtifactStagingDirectory) + + # - task: PublishBuildArtifacts@1 + # inputs: + # path: $(Build.ArtifactStagingDirectory) + # artifactName: tinyobjDeployLinux + + #- job: macos + # pool: {vmImage: 'macOS-latest'} + # variables: + # # Support C++11: https://github.com/joerick/cibuildwheel/pull/156 + # MACOSX_DEPLOYMENT_TARGET: 10.9 + # steps: + # - task: UsePythonVersion@0 + # - bash: | + # python3 -m pip install --upgrade pip + # pip3 install cibuildwheel + # cibuildwheel --platform macos --output-dir wheelhouse . + # - task: CopyFiles@2 + # inputs: + # contents: 'wheelhouse/*.whl' + # targetFolder: $(Build.ArtifactStagingDirectory) + # - task: PublishBuildArtifacts@1 + # inputs: + # path: $(Build.ArtifactStagingDirectory) + # artifactName: tinyobjDeployMacOS + + #- job: windows + # pool: {vmImage: 'windows-latest'} + # steps: + # - task: UsePythonVersion@0 + # - bash: | + # python -m pip install --upgrade pip + # pip install cibuildwheel + # cibuildwheel --platform windows --output-dir wheelhouse . + # - task: CopyFiles@2 + # inputs: + # contents: 'wheelhouse/*.whl' + # targetFolder: $(Build.ArtifactStagingDirectory) + # - task: PublishBuildArtifacts@1 + # inputs: + # path: $(Build.ArtifactStagingDirectory) + # artifactName: tinyobjDeployWindows + + #- job: deployPyPI + # # Based on vispy: https://github.com/vispy/vispy/blob/master/azure-pipelines.yml + # pool: {vmImage: 'ubuntu-latest'} + # condition: and(succeeded(), startsWith(variables['Build.SourceBranch'], 'refs/tags/v')) + # dependsOn: + # - linux + # - macos + # - windows + # steps: + # - task: UsePythonVersion@0 + + # # TODO(syoyo): Use buildType: specific to download multiple artifacts at once? + # - task: DownloadBuildArtifacts@0 + # inputs: + # artifactName: 'tinyobjDeployLinux' + # downloadPath: $(Pipeline.Workspace) + + # - task: DownloadBuildArtifacts@0 + # inputs: + # artifactName: 'tinyobjDeployMacOS' + # downloadPath: $(Pipeline.Workspace) + + # - task: DownloadBuildArtifacts@0 + # inputs: + # artifactName: 'tinyobjDeployWindows' + # downloadPath: $(Pipeline.Workspace) + + # # Publish to PyPI through twine + # - bash: | + # cd $(Pipeline.Workspace) + # find . + # python -m pip install --upgrade pip + # pip install twine + # echo tinyobjDeployLinux/dist/* + # echo tinyobjDeployLinux/wheelhouse/* tinyobjDeployMacOS/wheelhouse/* tinyobjDeployWindows/wheelhouse/* + # twine upload -u "__token__" --skip-existing tinyobjDeployLinux/dist/* tinyobjDeployLinux/wheelhouse/* tinyobjDeployMacOS/wheelhouse/* tinyobjDeployWindows/wheelhouse/* + # env: + # TWINE_PASSWORD: $(pypiToken2) trigger: branches: diff --git a/bootstrap-cmake-linux-with-pyhthon.sh b/bootstrap-cmake-linux-with-pyhthon.sh new file mode 100755 index 00000000..96cf4bf6 --- /dev/null +++ b/bootstrap-cmake-linux-with-pyhthon.sh @@ -0,0 +1,20 @@ +curdir=`pwd` + +builddir=${curdir}/build_python_module + +rm -rf ${builddir} +mkdir ${builddir} + +# set path to pybind11 +# If you install pybind11 through pip, its usually installed to /pybind11. +pybind11_path=`python -c "import site; print (site.getsitepackages()[0])"` +echo ${pybind11_path} + +CC=clang CXX=clang++ \ + pybind11_DIR=${pybind11_path}/pybind11 \ + cmake \ + -B${builddir} \ + -DCMAKE_VERBOSE_MAKEFILE=1 \ + -DTINYOBJLOADER_WITH_PYTHON=1 + +cd ${curdir} diff --git a/cmake/ClangClCMakeCompileRules.cmake b/cmake/ClangClCMakeCompileRules.cmake new file mode 100644 index 00000000..a3bcf1c2 --- /dev/null +++ b/cmake/ClangClCMakeCompileRules.cmake @@ -0,0 +1,9 @@ +# macOS paths usually start with /Users/*. Unfortunately, clang-cl interprets +# paths starting with /U as macro undefines, so we need to put a -- before the +# input file path to force it to be treated as a path. CMake's compilation rules +# should be tweaked accordingly, but until that's done, and to support older +# CMake versions, overriding compilation rules works well enough. This file will +# be included by cmake after the default compilation rules have already been set +# up, so we can just modify them instead of duplicating them entirely. +string(REPLACE "-c " "-c -- " CMAKE_C_COMPILE_OBJECT "${CMAKE_C_COMPILE_OBJECT}") +string(REPLACE "-c " "-c -- " CMAKE_CXX_COMPILE_OBJECT "${CMAKE_CXX_COMPILE_OBJECT}") diff --git a/cmake/aarch64-linux-gnu.toolchain b/cmake/aarch64-linux-gnu.toolchain new file mode 100644 index 00000000..cdcdaf25 --- /dev/null +++ b/cmake/aarch64-linux-gnu.toolchain @@ -0,0 +1,14 @@ +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_PROCESSOR aarch64) +set(CMAKE_C_COMPILER_TARGET aarch64-linux-gnu) + +set(CMAKE_FIND_ROOT_PATH /usr/aarch64-linux-gnu/) + +# Sync with GitHub Actions config +set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc) +set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++) + +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) diff --git a/cmake/clang-cl-msvc-windows.cmake b/cmake/clang-cl-msvc-windows.cmake new file mode 100644 index 00000000..e2eac142 --- /dev/null +++ b/cmake/clang-cl-msvc-windows.cmake @@ -0,0 +1,327 @@ +# From llvm/cmake/platforms/WinMsvc.cmake +# Modified to use clang-cl on native Windows. + +# Cross toolchain configuration for using clang-cl on non-Windows hosts to +# target MSVC. +# +# Usage: +# cmake -G Ninja +# -DCMAKE_TOOLCHAIN_FILE=/path/to/this/file +# -DHOST_ARCH=[aarch64|arm64|armv7|arm|i686|x86|x86_64|x64] +# -DLLVM_NATIVE_TOOLCHAIN=/path/to/llvm/installation +# -DMSVC_BASE=/path/to/MSVC/system/libraries/and/includes +# -DWINSDK_BASE=/path/to/windows-sdk +# -DWINSDK_VER=windows sdk version folder name +# +# HOST_ARCH: +# The architecture to build for. +# +# LLVM_NATIVE_TOOLCHAIN: +# *Absolute path* to a folder containing the toolchain which will be used to +# build. At a minimum, this folder should have a bin directory with a +# copy of clang-cl, clang, clang++, and lld-link, as well as a lib directory +# containing clang's system resource directory. +# +# MSVC_BASE: +# *Absolute path* to the folder containing MSVC headers and system libraries. +# The layout of the folder matches that which is intalled by MSVC 2017 on +# Windows, and should look like this: +# +# ${MSVC_BASE} +# include +# vector +# stdint.h +# etc... +# lib +# x64 +# libcmt.lib +# msvcrt.lib +# etc... +# x86 +# libcmt.lib +# msvcrt.lib +# etc... +# +# For versions of MSVC < 2017, or where you have a hermetic toolchain in a +# custom format, you must use symlinks or restructure it to look like the above. +# +# WINSDK_BASE: +# Together with WINSDK_VER, determines the location of Windows SDK headers +# and libraries. +# +# WINSDK_VER: +# Together with WINSDK_BASE, determines the locations of Windows SDK headers +# and libraries. +# +# WINSDK_BASE and WINSDK_VER work together to define a folder layout that matches +# that of the Windows SDK installation on a standard Windows machine. It should +# match the layout described below. +# +# Note that if you install Windows SDK to a windows machine and simply copy the +# files, it will already be in the correct layout. +# +# ${WINSDK_BASE} +# Include +# ${WINSDK_VER} +# shared +# ucrt +# um +# windows.h +# etc... +# Lib +# ${WINSDK_VER} +# ucrt +# x64 +# x86 +# ucrt.lib +# etc... +# um +# x64 +# x86 +# kernel32.lib +# etc +# +# IMPORTANT: In order for this to work, you will need a valid copy of the Windows +# SDK and C++ STL headers and libraries on your host. Additionally, since the +# Windows libraries and headers are not case-correct, this toolchain file sets +# up a VFS overlay for the SDK headers and case-correcting symlinks for the +# libraries when running on a case-sensitive filesystem. + + +# When configuring CMake with a toolchain file against a top-level CMakeLists.txt, +# it will actually run CMake many times, once for each small test program used to +# determine what features a compiler supports. Unfortunately, none of these +# invocations share a CMakeCache.txt with the top-level invocation, meaning they +# won't see the value of any arguments the user passed via -D. Since these are +# necessary to properly configure MSVC in both the top-level configuration as well as +# all feature-test invocations, we set environment variables with the values so that +# these environments get inherited by child invocations. We can switch to +# CMAKE_TRY_COMPILE_PLATFORM_VARIABLES once our minimum supported CMake version +# is 3.6 or greater. +function(init_user_prop prop) + if(${prop}) + set(ENV{_${prop}} "${${prop}}") + else() + set(${prop} "$ENV{_${prop}}" PARENT_SCOPE) + endif() +endfunction() + +function(generate_winsdk_vfs_overlay winsdk_include_dir output_path) + set(include_dirs) + file(GLOB_RECURSE entries LIST_DIRECTORIES true "${winsdk_include_dir}/*") + foreach(entry ${entries}) + if(IS_DIRECTORY "${entry}") + list(APPEND include_dirs "${entry}") + endif() + endforeach() + + file(WRITE "${output_path}" "version: 0\n") + file(APPEND "${output_path}" "case-sensitive: false\n") + file(APPEND "${output_path}" "roots:\n") + + foreach(dir ${include_dirs}) + file(GLOB headers RELATIVE "${dir}" "${dir}/*.h") + if(NOT headers) + continue() + endif() + + file(APPEND "${output_path}" " - name: \"${dir}\"\n") + file(APPEND "${output_path}" " type: directory\n") + file(APPEND "${output_path}" " contents:\n") + + foreach(header ${headers}) + file(APPEND "${output_path}" " - name: \"${header}\"\n") + file(APPEND "${output_path}" " type: file\n") + file(APPEND "${output_path}" " external-contents: \"${dir}/${header}\"\n") + endforeach() + endforeach() +endfunction() + +function(generate_winsdk_lib_symlinks winsdk_um_lib_dir output_dir) + execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory "${output_dir}") + file(GLOB libraries RELATIVE "${winsdk_um_lib_dir}" "${winsdk_um_lib_dir}/*") + foreach(library ${libraries}) + string(TOLOWER "${library}" all_lowercase_symlink_name) + if(NOT library STREQUAL all_lowercase_symlink_name) + execute_process(COMMAND "${CMAKE_COMMAND}" + -E create_symlink + "${winsdk_um_lib_dir}/${library}" + "${output_dir}/${all_lowercase_symlink_name}") + endif() + + get_filename_component(name_we "${library}" NAME_WE) + get_filename_component(ext "${library}" EXT) + string(TOLOWER "${ext}" lowercase_ext) + set(lowercase_ext_symlink_name "${name_we}${lowercase_ext}") + if(NOT library STREQUAL lowercase_ext_symlink_name AND + NOT all_lowercase_symlink_name STREQUAL lowercase_ext_symlink_name) + execute_process(COMMAND "${CMAKE_COMMAND}" + -E create_symlink + "${winsdk_um_lib_dir}/${library}" + "${output_dir}/${lowercase_ext_symlink_name}") + endif() + endforeach() +endfunction() + +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_VERSION 10.0) +set(CMAKE_SYSTEM_PROCESSOR AMD64) + +init_user_prop(HOST_ARCH) +init_user_prop(LLVM_NATIVE_TOOLCHAIN) +init_user_prop(MSVC_BASE) +init_user_prop(WINSDK_BASE) +init_user_prop(WINSDK_VER) + +if(NOT HOST_ARCH) + set(HOST_ARCH x86_64) +endif() +if(HOST_ARCH STREQUAL "aarch64" OR HOST_ARCH STREQUAL "arm64") + set(TRIPLE_ARCH "aarch64") + set(WINSDK_ARCH "arm64") +elseif(HOST_ARCH STREQUAL "armv7" OR HOST_ARCH STREQUAL "arm") + set(TRIPLE_ARCH "armv7") + set(WINSDK_ARCH "arm") +elseif(HOST_ARCH STREQUAL "i686" OR HOST_ARCH STREQUAL "x86") + set(TRIPLE_ARCH "i686") + set(WINSDK_ARCH "x86") +elseif(HOST_ARCH STREQUAL "x86_64" OR HOST_ARCH STREQUAL "x64") + set(TRIPLE_ARCH "x86_64") + set(WINSDK_ARCH "x64") +else() + message(SEND_ERROR "Unknown host architecture ${HOST_ARCH}. Must be aarch64 (or arm64), armv7 (or arm), i686 (or x86), or x86_64 (or x64).") +endif() + +set(MSVC_INCLUDE "${MSVC_BASE}/include") +set(ATLMFC_INCLUDE "${MSVC_BASE}/atlmfc/include") +set(MSVC_LIB "${MSVC_BASE}/lib") +set(ATLMFC_LIB "${MSVC_BASE}/atlmfc/lib") +set(WINSDK_INCLUDE "${WINSDK_BASE}/Include/${WINSDK_VER}") +set(WINSDK_LIB "${WINSDK_BASE}/Lib/${WINSDK_VER}") + +# Do some sanity checking to make sure we can find a native toolchain and +# that the Windows SDK / MSVC STL directories look kosher. +if(NOT EXISTS "${LLVM_NATIVE_TOOLCHAIN}/bin/clang-cl.exe" OR + NOT EXISTS "${LLVM_NATIVE_TOOLCHAIN}/bin/lld-link.exe") + message(SEND_ERROR + "LLVM_NATIVE_TOOLCHAIN folder '${LLVM_NATIVE_TOOLCHAIN}' does not " + "point to a valid directory containing bin/clang-cl.exe and bin/lld-link.exe " + "binaries") +endif() + +if(NOT EXISTS "${MSVC_BASE}" OR + NOT EXISTS "${MSVC_INCLUDE}" OR + NOT EXISTS "${MSVC_LIB}") + message(SEND_ERROR + "CMake variable MSVC_BASE must point to a folder containing MSVC " + "system headers and libraries") +endif() + +if(NOT EXISTS "${WINSDK_BASE}" OR + NOT EXISTS "${WINSDK_INCLUDE}" OR + NOT EXISTS "${WINSDK_LIB}") + message(SEND_ERROR + "CMake variable WINSDK_BASE and WINSDK_VER must resolve to a valid " + "Windows SDK installation") +endif() + +if(NOT EXISTS "${WINSDK_INCLUDE}/um/Windows.h") + message(SEND_ERROR "Cannot find Windows.h") +endif() +if(NOT EXISTS "${WINSDK_INCLUDE}/um/WINDOWS.H") + set(case_sensitive_filesystem TRUE) +endif() + +set(CMAKE_C_COMPILER "${LLVM_NATIVE_TOOLCHAIN}/bin/clang-cl.exe" CACHE FILEPATH "") +set(CMAKE_CXX_COMPILER "${LLVM_NATIVE_TOOLCHAIN}/bin/clang-cl.exe" CACHE FILEPATH "") +set(CMAKE_LINKER "${LLVM_NATIVE_TOOLCHAIN}/bin/lld-link.exe" CACHE FILEPATH "") + +# Even though we're cross-compiling, we need some native tools (e.g. llvm-tblgen), and those +# native tools have to be built before we can start doing the cross-build. LLVM supports +# a CROSS_TOOLCHAIN_FLAGS_NATIVE argument which consists of a list of flags to pass to CMake +# when configuring the NATIVE portion of the cross-build. By default we construct this so +# that it points to the tools in the same location as the native clang-cl that we're using. +list(APPEND _CTF_NATIVE_DEFAULT "-DCMAKE_ASM_COMPILER=${LLVM_NATIVE_TOOLCHAIN}/bin/clang") +list(APPEND _CTF_NATIVE_DEFAULT "-DCMAKE_C_COMPILER=${LLVM_NATIVE_TOOLCHAIN}/bin/clang") +list(APPEND _CTF_NATIVE_DEFAULT "-DCMAKE_CXX_COMPILER=${LLVM_NATIVE_TOOLCHAIN}/bin/clang++") + +set(CROSS_TOOLCHAIN_FLAGS_NATIVE "${_CTF_NATIVE_DEFAULT}" CACHE STRING "") + +set(COMPILE_FLAGS + -D_CRT_SECURE_NO_WARNINGS + --target=${TRIPLE_ARCH}-windows-msvc + -fms-compatibility-version=19.11 + -imsvc "\"${ATLMFC_INCLUDE}\"" + -imsvc "\"${MSVC_INCLUDE}\"" + -imsvc "\"${WINSDK_INCLUDE}/ucrt\"" + -imsvc "\"${WINSDK_INCLUDE}/shared\"" + -imsvc "\"${WINSDK_INCLUDE}/um\"" + -imsvc "\"${WINSDK_INCLUDE}/winrt\"") + +if(case_sensitive_filesystem) + # Ensure all sub-configures use the top-level VFS overlay instead of generating their own. + init_user_prop(winsdk_vfs_overlay_path) + if(NOT winsdk_vfs_overlay_path) + set(winsdk_vfs_overlay_path "${CMAKE_BINARY_DIR}/winsdk_vfs_overlay.yaml") + generate_winsdk_vfs_overlay("${WINSDK_BASE}/Include/${WINSDK_VER}" "${winsdk_vfs_overlay_path}") + init_user_prop(winsdk_vfs_overlay_path) + endif() + list(APPEND COMPILE_FLAGS + -Xclang -ivfsoverlay -Xclang "${winsdk_vfs_overlay_path}") +endif() + +string(REPLACE ";" " " COMPILE_FLAGS "${COMPILE_FLAGS}") + +# We need to preserve any flags that were passed in by the user. However, we +# can't append to CMAKE_C_FLAGS and friends directly, because toolchain files +# will be re-invoked on each reconfigure and therefore need to be idempotent. +# The assignments to the _INITIAL cache variables don't use FORCE, so they'll +# only be populated on the initial configure, and their values won't change +# afterward. +set(_CMAKE_C_FLAGS_INITIAL "${CMAKE_C_FLAGS}" CACHE STRING "") +set(CMAKE_C_FLAGS "${_CMAKE_C_FLAGS_INITIAL} ${COMPILE_FLAGS}" CACHE STRING "" FORCE) + +set(_CMAKE_CXX_FLAGS_INITIAL "${CMAKE_CXX_FLAGS}" CACHE STRING "") +set(CMAKE_CXX_FLAGS "${_CMAKE_CXX_FLAGS_INITIAL} ${COMPILE_FLAGS}" CACHE STRING "" FORCE) + +set(LINK_FLAGS + # Prevent CMake from attempting to invoke mt.exe. It only recognizes the slashed form and not the dashed form. + /manifest:no + + -libpath:"${ATLMFC_LIB}/${WINSDK_ARCH}" + -libpath:"${MSVC_LIB}/${WINSDK_ARCH}" + -libpath:"${WINSDK_LIB}/ucrt/${WINSDK_ARCH}" + -libpath:"${WINSDK_LIB}/um/${WINSDK_ARCH}") + +if(case_sensitive_filesystem) + # Ensure all sub-configures use the top-level symlinks dir instead of generating their own. + init_user_prop(winsdk_lib_symlinks_dir) + if(NOT winsdk_lib_symlinks_dir) + set(winsdk_lib_symlinks_dir "${CMAKE_BINARY_DIR}/winsdk_lib_symlinks") + generate_winsdk_lib_symlinks("${WINSDK_BASE}/Lib/${WINSDK_VER}/um/${WINSDK_ARCH}" "${winsdk_lib_symlinks_dir}") + init_user_prop(winsdk_lib_symlinks_dir) + endif() + list(APPEND LINK_FLAGS + -libpath:"${winsdk_lib_symlinks_dir}") +endif() + +string(REPLACE ";" " " LINK_FLAGS "${LINK_FLAGS}") + +# See explanation for compiler flags above for the _INITIAL variables. +set(_CMAKE_EXE_LINKER_FLAGS_INITIAL "${CMAKE_EXE_LINKER_FLAGS}" CACHE STRING "") +set(CMAKE_EXE_LINKER_FLAGS "${_CMAKE_EXE_LINKER_FLAGS_INITIAL} ${LINK_FLAGS}" CACHE STRING "" FORCE) + +set(_CMAKE_MODULE_LINKER_FLAGS_INITIAL "${CMAKE_MODULE_LINKER_FLAGS}" CACHE STRING "") +set(CMAKE_MODULE_LINKER_FLAGS "${_CMAKE_MODULE_LINKER_FLAGS_INITIAL} ${LINK_FLAGS}" CACHE STRING "" FORCE) + +set(_CMAKE_SHARED_LINKER_FLAGS_INITIAL "${CMAKE_SHARED_LINKER_FLAGS}" CACHE STRING "") +set(CMAKE_SHARED_LINKER_FLAGS "${_CMAKE_SHARED_LINKER_FLAGS_INITIAL} ${LINK_FLAGS}" CACHE STRING "" FORCE) + +# CMake populates these with a bunch of unnecessary libraries, which requires +# extra case-correcting symlinks and what not. Instead, let projects explicitly +# control which libraries they require. +set(CMAKE_C_STANDARD_LIBRARIES "" CACHE STRING "" FORCE) +set(CMAKE_CXX_STANDARD_LIBRARIES "" CACHE STRING "" FORCE) + +# Allow clang-cl to work with macOS paths. +set(CMAKE_USER_MAKE_RULES_OVERRIDE "${CMAKE_CURRENT_LIST_DIR}/ClangClCMakeCompileRules.cmake") diff --git a/cmake/clang-cl-msvc-wsl.cmake b/cmake/clang-cl-msvc-wsl.cmake new file mode 100644 index 00000000..ffe21314 --- /dev/null +++ b/cmake/clang-cl-msvc-wsl.cmake @@ -0,0 +1,327 @@ +# From llvm/cmake/platforms/WinMsvc.cmake +# Modified to use clang-cl on native Windows. + +# Cross toolchain configuration for using clang-cl on non-Windows hosts to +# target MSVC. +# +# Usage: +# cmake -G Ninja +# -DCMAKE_TOOLCHAIN_FILE=/path/to/this/file +# -DHOST_ARCH=[aarch64|arm64|armv7|arm|i686|x86|x86_64|x64] +# -DLLVM_NATIVE_TOOLCHAIN=/path/to/llvm/installation +# -DMSVC_BASE=/path/to/MSVC/system/libraries/and/includes +# -DWINSDK_BASE=/path/to/windows-sdk +# -DWINSDK_VER=windows sdk version folder name +# +# HOST_ARCH: +# The architecture to build for. +# +# LLVM_NATIVE_TOOLCHAIN: +# *Absolute path* to a folder containing the toolchain which will be used to +# build. At a minimum, this folder should have a bin directory with a +# copy of clang-cl, clang, clang++, and lld-link, as well as a lib directory +# containing clang's system resource directory. +# +# MSVC_BASE: +# *Absolute path* to the folder containing MSVC headers and system libraries. +# The layout of the folder matches that which is intalled by MSVC 2017 on +# Windows, and should look like this: +# +# ${MSVC_BASE} +# include +# vector +# stdint.h +# etc... +# lib +# x64 +# libcmt.lib +# msvcrt.lib +# etc... +# x86 +# libcmt.lib +# msvcrt.lib +# etc... +# +# For versions of MSVC < 2017, or where you have a hermetic toolchain in a +# custom format, you must use symlinks or restructure it to look like the above. +# +# WINSDK_BASE: +# Together with WINSDK_VER, determines the location of Windows SDK headers +# and libraries. +# +# WINSDK_VER: +# Together with WINSDK_BASE, determines the locations of Windows SDK headers +# and libraries. +# +# WINSDK_BASE and WINSDK_VER work together to define a folder layout that matches +# that of the Windows SDK installation on a standard Windows machine. It should +# match the layout described below. +# +# Note that if you install Windows SDK to a windows machine and simply copy the +# files, it will already be in the correct layout. +# +# ${WINSDK_BASE} +# Include +# ${WINSDK_VER} +# shared +# ucrt +# um +# windows.h +# etc... +# Lib +# ${WINSDK_VER} +# ucrt +# x64 +# x86 +# ucrt.lib +# etc... +# um +# x64 +# x86 +# kernel32.lib +# etc +# +# IMPORTANT: In order for this to work, you will need a valid copy of the Windows +# SDK and C++ STL headers and libraries on your host. Additionally, since the +# Windows libraries and headers are not case-correct, this toolchain file sets +# up a VFS overlay for the SDK headers and case-correcting symlinks for the +# libraries when running on a case-sensitive filesystem. + + +# When configuring CMake with a toolchain file against a top-level CMakeLists.txt, +# it will actually run CMake many times, once for each small test program used to +# determine what features a compiler supports. Unfortunately, none of these +# invocations share a CMakeCache.txt with the top-level invocation, meaning they +# won't see the value of any arguments the user passed via -D. Since these are +# necessary to properly configure MSVC in both the top-level configuration as well as +# all feature-test invocations, we set environment variables with the values so that +# these environments get inherited by child invocations. We can switch to +# CMAKE_TRY_COMPILE_PLATFORM_VARIABLES once our minimum supported CMake version +# is 3.6 or greater. +function(init_user_prop prop) + if(${prop}) + set(ENV{_${prop}} "${${prop}}") + else() + set(${prop} "$ENV{_${prop}}" PARENT_SCOPE) + endif() +endfunction() + +function(generate_winsdk_vfs_overlay winsdk_include_dir output_path) + set(include_dirs) + file(GLOB_RECURSE entries LIST_DIRECTORIES true "${winsdk_include_dir}/*") + foreach(entry ${entries}) + if(IS_DIRECTORY "${entry}") + list(APPEND include_dirs "${entry}") + endif() + endforeach() + + file(WRITE "${output_path}" "version: 0\n") + file(APPEND "${output_path}" "case-sensitive: false\n") + file(APPEND "${output_path}" "roots:\n") + + foreach(dir ${include_dirs}) + file(GLOB headers RELATIVE "${dir}" "${dir}/*.h") + if(NOT headers) + continue() + endif() + + file(APPEND "${output_path}" " - name: \"${dir}\"\n") + file(APPEND "${output_path}" " type: directory\n") + file(APPEND "${output_path}" " contents:\n") + + foreach(header ${headers}) + file(APPEND "${output_path}" " - name: \"${header}\"\n") + file(APPEND "${output_path}" " type: file\n") + file(APPEND "${output_path}" " external-contents: \"${dir}/${header}\"\n") + endforeach() + endforeach() +endfunction() + +function(generate_winsdk_lib_symlinks winsdk_um_lib_dir output_dir) + execute_process(COMMAND "${CMAKE_COMMAND}" -E make_directory "${output_dir}") + file(GLOB libraries RELATIVE "${winsdk_um_lib_dir}" "${winsdk_um_lib_dir}/*") + foreach(library ${libraries}) + string(TOLOWER "${library}" all_lowercase_symlink_name) + if(NOT library STREQUAL all_lowercase_symlink_name) + execute_process(COMMAND "${CMAKE_COMMAND}" + -E create_symlink + "${winsdk_um_lib_dir}/${library}" + "${output_dir}/${all_lowercase_symlink_name}") + endif() + + get_filename_component(name_we "${library}" NAME_WE) + get_filename_component(ext "${library}" EXT) + string(TOLOWER "${ext}" lowercase_ext) + set(lowercase_ext_symlink_name "${name_we}${lowercase_ext}") + if(NOT library STREQUAL lowercase_ext_symlink_name AND + NOT all_lowercase_symlink_name STREQUAL lowercase_ext_symlink_name) + execute_process(COMMAND "${CMAKE_COMMAND}" + -E create_symlink + "${winsdk_um_lib_dir}/${library}" + "${output_dir}/${lowercase_ext_symlink_name}") + endif() + endforeach() +endfunction() + +set(CMAKE_SYSTEM_NAME Windows) +set(CMAKE_SYSTEM_VERSION 10.0) +set(CMAKE_SYSTEM_PROCESSOR AMD64) + +init_user_prop(HOST_ARCH) +init_user_prop(LLVM_NATIVE_TOOLCHAIN) +init_user_prop(MSVC_BASE) +init_user_prop(WINSDK_BASE) +init_user_prop(WINSDK_VER) + +if(NOT HOST_ARCH) + set(HOST_ARCH x86_64) +endif() +if(HOST_ARCH STREQUAL "aarch64" OR HOST_ARCH STREQUAL "arm64") + set(TRIPLE_ARCH "aarch64") + set(WINSDK_ARCH "arm64") +elseif(HOST_ARCH STREQUAL "armv7" OR HOST_ARCH STREQUAL "arm") + set(TRIPLE_ARCH "armv7") + set(WINSDK_ARCH "arm") +elseif(HOST_ARCH STREQUAL "i686" OR HOST_ARCH STREQUAL "x86") + set(TRIPLE_ARCH "i686") + set(WINSDK_ARCH "x86") +elseif(HOST_ARCH STREQUAL "x86_64" OR HOST_ARCH STREQUAL "x64") + set(TRIPLE_ARCH "x86_64") + set(WINSDK_ARCH "x64") +else() + message(SEND_ERROR "Unknown host architecture ${HOST_ARCH}. Must be aarch64 (or arm64), armv7 (or arm), i686 (or x86), or x86_64 (or x64).") +endif() + +set(MSVC_INCLUDE "${MSVC_BASE}/include") +set(ATLMFC_INCLUDE "${MSVC_BASE}/atlmfc/include") +set(MSVC_LIB "${MSVC_BASE}/lib") +set(ATLMFC_LIB "${MSVC_BASE}/atlmfc/lib") +set(WINSDK_INCLUDE "${WINSDK_BASE}/Include/${WINSDK_VER}") +set(WINSDK_LIB "${WINSDK_BASE}/Lib/${WINSDK_VER}") + +# Do some sanity checking to make sure we can find a native toolchain and +# that the Windows SDK / MSVC STL directories look kosher. +if(NOT EXISTS "${LLVM_NATIVE_TOOLCHAIN}/bin/clang-cl" OR + NOT EXISTS "${LLVM_NATIVE_TOOLCHAIN}/bin/lld-link") + message(SEND_ERROR + "LLVM_NATIVE_TOOLCHAIN folder '${LLVM_NATIVE_TOOLCHAIN}' does not " + "point to a valid directory containing bin/clang-cl and bin/lld-link " + "binaries") +endif() + +if(NOT EXISTS "${MSVC_BASE}" OR + NOT EXISTS "${MSVC_INCLUDE}" OR + NOT EXISTS "${MSVC_LIB}") + message(SEND_ERROR + "CMake variable MSVC_BASE must point to a folder containing MSVC " + "system headers and libraries") +endif() + +if(NOT EXISTS "${WINSDK_BASE}" OR + NOT EXISTS "${WINSDK_INCLUDE}" OR + NOT EXISTS "${WINSDK_LIB}") + message(SEND_ERROR + "CMake variable WINSDK_BASE and WINSDK_VER must resolve to a valid " + "Windows SDK installation") +endif() + +if(NOT EXISTS "${WINSDK_INCLUDE}/um/Windows.h") + message(SEND_ERROR "Cannot find Windows.h") +endif() +if(NOT EXISTS "${WINSDK_INCLUDE}/um/WINDOWS.H") + set(case_sensitive_filesystem TRUE) +endif() + +set(CMAKE_C_COMPILER "${LLVM_NATIVE_TOOLCHAIN}/bin/clang-cl" CACHE FILEPATH "") +set(CMAKE_CXX_COMPILER "${LLVM_NATIVE_TOOLCHAIN}/bin/clang-cl" CACHE FILEPATH "") +set(CMAKE_LINKER "${LLVM_NATIVE_TOOLCHAIN}/bin/lld-link" CACHE FILEPATH "") + +# Even though we're cross-compiling, we need some native tools (e.g. llvm-tblgen), and those +# native tools have to be built before we can start doing the cross-build. LLVM supports +# a CROSS_TOOLCHAIN_FLAGS_NATIVE argument which consists of a list of flags to pass to CMake +# when configuring the NATIVE portion of the cross-build. By default we construct this so +# that it points to the tools in the same location as the native clang-cl that we're using. +list(APPEND _CTF_NATIVE_DEFAULT "-DCMAKE_ASM_COMPILER=${LLVM_NATIVE_TOOLCHAIN}/bin/clang") +list(APPEND _CTF_NATIVE_DEFAULT "-DCMAKE_C_COMPILER=${LLVM_NATIVE_TOOLCHAIN}/bin/clang") +list(APPEND _CTF_NATIVE_DEFAULT "-DCMAKE_CXX_COMPILER=${LLVM_NATIVE_TOOLCHAIN}/bin/clang++") + +set(CROSS_TOOLCHAIN_FLAGS_NATIVE "${_CTF_NATIVE_DEFAULT}" CACHE STRING "") + +set(COMPILE_FLAGS + -D_CRT_SECURE_NO_WARNINGS + --target=${TRIPLE_ARCH}-windows-msvc + -fms-compatibility-version=19.11 + -imsvc "\"${ATLMFC_INCLUDE}\"" + -imsvc "\"${MSVC_INCLUDE}\"" + -imsvc "\"${WINSDK_INCLUDE}/ucrt\"" + -imsvc "\"${WINSDK_INCLUDE}/shared\"" + -imsvc "\"${WINSDK_INCLUDE}/um\"" + -imsvc "\"${WINSDK_INCLUDE}/winrt\"") + +if(case_sensitive_filesystem) + # Ensure all sub-configures use the top-level VFS overlay instead of generating their own. + init_user_prop(winsdk_vfs_overlay_path) + if(NOT winsdk_vfs_overlay_path) + set(winsdk_vfs_overlay_path "${CMAKE_BINARY_DIR}/winsdk_vfs_overlay.yaml") + generate_winsdk_vfs_overlay("${WINSDK_BASE}/Include/${WINSDK_VER}" "${winsdk_vfs_overlay_path}") + init_user_prop(winsdk_vfs_overlay_path) + endif() + list(APPEND COMPILE_FLAGS + -Xclang -ivfsoverlay -Xclang "${winsdk_vfs_overlay_path}") +endif() + +string(REPLACE ";" " " COMPILE_FLAGS "${COMPILE_FLAGS}") + +# We need to preserve any flags that were passed in by the user. However, we +# can't append to CMAKE_C_FLAGS and friends directly, because toolchain files +# will be re-invoked on each reconfigure and therefore need to be idempotent. +# The assignments to the _INITIAL cache variables don't use FORCE, so they'll +# only be populated on the initial configure, and their values won't change +# afterward. +set(_CMAKE_C_FLAGS_INITIAL "${CMAKE_C_FLAGS}" CACHE STRING "") +set(CMAKE_C_FLAGS "${_CMAKE_C_FLAGS_INITIAL} ${COMPILE_FLAGS}" CACHE STRING "" FORCE) + +set(_CMAKE_CXX_FLAGS_INITIAL "${CMAKE_CXX_FLAGS}" CACHE STRING "") +set(CMAKE_CXX_FLAGS "${_CMAKE_CXX_FLAGS_INITIAL} ${COMPILE_FLAGS}" CACHE STRING "" FORCE) + +set(LINK_FLAGS + # Prevent CMake from attempting to invoke mt.exe. It only recognizes the slashed form and not the dashed form. + /manifest:no + + -libpath:"${ATLMFC_LIB}/${WINSDK_ARCH}" + -libpath:"${MSVC_LIB}/${WINSDK_ARCH}" + -libpath:"${WINSDK_LIB}/ucrt/${WINSDK_ARCH}" + -libpath:"${WINSDK_LIB}/um/${WINSDK_ARCH}") + +if(case_sensitive_filesystem) + # Ensure all sub-configures use the top-level symlinks dir instead of generating their own. + init_user_prop(winsdk_lib_symlinks_dir) + if(NOT winsdk_lib_symlinks_dir) + set(winsdk_lib_symlinks_dir "${CMAKE_BINARY_DIR}/winsdk_lib_symlinks") + generate_winsdk_lib_symlinks("${WINSDK_BASE}/Lib/${WINSDK_VER}/um/${WINSDK_ARCH}" "${winsdk_lib_symlinks_dir}") + init_user_prop(winsdk_lib_symlinks_dir) + endif() + list(APPEND LINK_FLAGS + -libpath:"${winsdk_lib_symlinks_dir}") +endif() + +string(REPLACE ";" " " LINK_FLAGS "${LINK_FLAGS}") + +# See explanation for compiler flags above for the _INITIAL variables. +set(_CMAKE_EXE_LINKER_FLAGS_INITIAL "${CMAKE_EXE_LINKER_FLAGS}" CACHE STRING "") +set(CMAKE_EXE_LINKER_FLAGS "${_CMAKE_EXE_LINKER_FLAGS_INITIAL} ${LINK_FLAGS}" CACHE STRING "" FORCE) + +set(_CMAKE_MODULE_LINKER_FLAGS_INITIAL "${CMAKE_MODULE_LINKER_FLAGS}" CACHE STRING "") +set(CMAKE_MODULE_LINKER_FLAGS "${_CMAKE_MODULE_LINKER_FLAGS_INITIAL} ${LINK_FLAGS}" CACHE STRING "" FORCE) + +set(_CMAKE_SHARED_LINKER_FLAGS_INITIAL "${CMAKE_SHARED_LINKER_FLAGS}" CACHE STRING "") +set(CMAKE_SHARED_LINKER_FLAGS "${_CMAKE_SHARED_LINKER_FLAGS_INITIAL} ${LINK_FLAGS}" CACHE STRING "" FORCE) + +# CMake populates these with a bunch of unnecessary libraries, which requires +# extra case-correcting symlinks and what not. Instead, let projects explicitly +# control which libraries they require. +set(CMAKE_C_STANDARD_LIBRARIES "" CACHE STRING "" FORCE) +set(CMAKE_CXX_STANDARD_LIBRARIES "" CACHE STRING "" FORCE) + +# Allow clang-cl to work with macOS paths. +set(CMAKE_USER_MAKE_RULES_OVERRIDE "${CMAKE_CURRENT_LIST_DIR}/ClangClCMakeCompileRules.cmake") diff --git a/cmake/linux_i386.toolchain.cmake b/cmake/linux_i386.toolchain.cmake new file mode 100644 index 00000000..9c4a5094 --- /dev/null +++ b/cmake/linux_i386.toolchain.cmake @@ -0,0 +1,17 @@ +set(CMAKE_SYSTEM_NAME Linux) +set(CMAKE_SYSTEM_PROCESSOR "i386") +set(CMAKE_C_COMPILER_TARGET i386-linux-gnu) + +# Assume debian/ubuntu +#set(CMAKE_FIND_ROOT_PATH /usr/lib/i386-linux-gnu/) + +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32") +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -m32") + +set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) +set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY) + +# https://stackoverflow.com/questions/41557927/using-usr-lib-i386-linux-gnu-instead-of-usr-lib-x86-64-linux-gnu-to-find-libra +set(FIND_LIBRARY_USE_LIB64_PATHS OFF) diff --git a/cmake/llvm-mingw-cross.cmake b/cmake/llvm-mingw-cross.cmake new file mode 100644 index 00000000..f7e1759c --- /dev/null +++ b/cmake/llvm-mingw-cross.cmake @@ -0,0 +1,24 @@ +SET(CMAKE_SYSTEM_NAME Windows) + +IF (DEFINED ENV{LLVM_MINGW_DIR}) + SET(LLVM_MINGW_ROOT "$ENV{LLVM_MINGW_DIR}") +ELSE () + SET(LLVM_MINGW_ROOT "/mnt/data/local/llvm-mingw-20200325-ubuntu-18.04") +ENDIF() + + +SET(CMAKE_C_COMPILER ${LLVM_MINGW_ROOT}/bin/x86_64-w64-mingw32-clang) +SET(CMAKE_CXX_COMPILER ${LLVM_MINGW_ROOT}/bin/x86_64-w64-mingw32-clang++) +SET(CMAKE_RC_COMPILER ${LLVM_MINGW_ROOT}/bin/x86_64-w64-mingw32-windres) + +#SET(CMAKE_C_LINK_EXECUTABLE x86_64-w64-mingw32-gcc) +#SET(CMAKE_CXX_LINK_EXECUTABLE x86_64-w64-mingw32-g++) + +SET(CMAKE_FIND_ROOT_PATH ${LLVM_MINGW_ROOT}/x86_64-w64-mingw32) + +# We may need some advanced thread APIs to compile, so enable 0x601(Win7) if required. +# SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_WIN32_WINNT=0x601") + +SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/cmake/llvm-mingw-win64.cmake b/cmake/llvm-mingw-win64.cmake new file mode 100644 index 00000000..439b5240 --- /dev/null +++ b/cmake/llvm-mingw-win64.cmake @@ -0,0 +1,20 @@ +SET(CMAKE_SYSTEM_NAME Windows) + +IF (DEFINED ENV{LLVM_MINGW_DIR}) + SET(LLVM_MINGW_ROOT "$ENV{LLVM_MINGW_DIR}") +ELSE () + SET(LLVM_MINGW_ROOT "C:/ProgramData/llvm-mingw") +ENDIF() + +SET(CMAKE_C_COMPILER ${LLVM_MINGW_ROOT}/bin/x86_64-w64-mingw32-clang.exe) +SET(CMAKE_CXX_COMPILER ${LLVM_MINGW_ROOT}/bin/x86_64-w64-mingw32-clang++.exe) +SET(CMAKE_RC_COMPILER ${LLVM_MINGW_ROOT}/bin/x86_64-w64-mingw32-windres.exe) + +SET(CMAKE_FIND_ROOT_PATH ${LLVM_MINGW_ROOT}/x86_64-w64-mingw32) + +# We may need some advanced thread APIs to compile tinyusz. use 0x601(Win7) if required +# SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_WIN32_WINNT=0x601") + +SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/cmake/mingw64-cross.cmake b/cmake/mingw64-cross.cmake new file mode 100644 index 00000000..479cdd4d --- /dev/null +++ b/cmake/mingw64-cross.cmake @@ -0,0 +1,20 @@ +SET(CMAKE_SYSTEM_NAME Windows) + +IF (DEFINED ENV{MINGW_GCC_DIR}) + SET(MINGW_GCC_ROOT "$ENV{MINGW_GCC_DIR}") +ELSE () + # Assume mingw cross compiler is installed in your system + SET(MINGW_GCC_ROOT "/usr") +ENDIF() + +# win32 may fail to compile with C++11 threads. + +SET(CMAKE_C_COMPILER ${MINGW_GCC_ROOT}/bin/x86_64-w64-mingw32-gcc-posix) +SET(CMAKE_CXX_COMPILER ${MINGW_GCC_ROOT}/bin/x86_64-w64-mingw32-g++-posix) +SET(CMAKE_RC_COMPILER ${MINGW_GCC_ROOT}/bin/x86_64-w64-mingw32-windres) + +SET(CMAKE_FIND_ROOT_PATH ${MINGW_GCC_ROOT}/x86_64-w64-mingw32) + +SET(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER) +SET(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY) +SET(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY) diff --git a/cmake/sanitizers/FindASan.cmake b/cmake/sanitizers/FindASan.cmake new file mode 100644 index 00000000..98ea7cb3 --- /dev/null +++ b/cmake/sanitizers/FindASan.cmake @@ -0,0 +1,59 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_ADDRESS "Enable AddressSanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + # Clang 3.2+ use this version. The no-omit-frame-pointer option is optional. + "-g -fsanitize=address -fno-omit-frame-pointer" + "-g -fsanitize=address" + + # Older deprecated flag for ASan + "-g -faddress-sanitizer" +) + + +if (SANITIZE_ADDRESS AND (SANITIZE_THREAD OR SANITIZE_MEMORY)) + message(FATAL_ERROR "AddressSanitizer is not compatible with " + "ThreadSanitizer or MemorySanitizer.") +endif () + + +include(sanitize-helpers) + +if (SANITIZE_ADDRESS) + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" "AddressSanitizer" + "ASan") + + find_program(ASan_WRAPPER "asan-wrapper" PATHS ${CMAKE_MODULE_PATH}) + mark_as_advanced(ASan_WRAPPER) +endif () + +function (add_sanitize_address TARGET) + if (NOT SANITIZE_ADDRESS) + return() + endif () + + sanitizer_add_flags(${TARGET} "AddressSanitizer" "ASan") +endfunction () diff --git a/cmake/sanitizers/FindMSan.cmake b/cmake/sanitizers/FindMSan.cmake new file mode 100644 index 00000000..22d0050e --- /dev/null +++ b/cmake/sanitizers/FindMSan.cmake @@ -0,0 +1,57 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_MEMORY "Enable MemorySanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + "-g -fsanitize=memory" +) + + +include(sanitize-helpers) + +if (SANITIZE_MEMORY) + if (NOT ${CMAKE_SYSTEM_NAME} STREQUAL "Linux") + message(WARNING "MemorySanitizer disabled for target ${TARGET} because " + "MemorySanitizer is supported for Linux systems only.") + set(SANITIZE_MEMORY Off CACHE BOOL + "Enable MemorySanitizer for sanitized targets." FORCE) + elseif (NOT ${CMAKE_SIZEOF_VOID_P} EQUAL 8) + message(WARNING "MemorySanitizer disabled for target ${TARGET} because " + "MemorySanitizer is supported for 64bit systems only.") + set(SANITIZE_MEMORY Off CACHE BOOL + "Enable MemorySanitizer for sanitized targets." FORCE) + else () + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" "MemorySanitizer" + "MSan") + endif () +endif () + +function (add_sanitize_memory TARGET) + if (NOT SANITIZE_MEMORY) + return() + endif () + + sanitizer_add_flags(${TARGET} "MemorySanitizer" "MSan") +endfunction () diff --git a/cmake/sanitizers/FindSanitizers.cmake b/cmake/sanitizers/FindSanitizers.cmake new file mode 100755 index 00000000..101bab84 --- /dev/null +++ b/cmake/sanitizers/FindSanitizers.cmake @@ -0,0 +1,94 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# If any of the used compiler is a GNU compiler, add a second option to static +# link against the sanitizers. +option(SANITIZE_LINK_STATIC "Try to link static against sanitizers." Off) + + + + +set(FIND_QUIETLY_FLAG "") +if (DEFINED Sanitizers_FIND_QUIETLY) + set(FIND_QUIETLY_FLAG "QUIET") +endif () + +find_package(ASan ${FIND_QUIETLY_FLAG}) +find_package(TSan ${FIND_QUIETLY_FLAG}) +find_package(MSan ${FIND_QUIETLY_FLAG}) +find_package(UBSan ${FIND_QUIETLY_FLAG}) + + + + +function(sanitizer_add_blacklist_file FILE) + if(NOT IS_ABSOLUTE ${FILE}) + set(FILE "${CMAKE_CURRENT_SOURCE_DIR}/${FILE}") + endif() + get_filename_component(FILE "${FILE}" REALPATH) + + sanitizer_check_compiler_flags("-fsanitize-blacklist=${FILE}" + "SanitizerBlacklist" "SanBlist") +endfunction() + +function(add_sanitizers ...) + # If no sanitizer is enabled, return immediately. + if (NOT (SANITIZE_ADDRESS OR SANITIZE_MEMORY OR SANITIZE_THREAD OR + SANITIZE_UNDEFINED)) + return() + endif () + + foreach (TARGET ${ARGV}) + # Check if this target will be compiled by exactly one compiler. Other- + # wise sanitizers can't be used and a warning should be printed once. + get_target_property(TARGET_TYPE ${TARGET} TYPE) + if (TARGET_TYPE STREQUAL "INTERFACE_LIBRARY") + message(WARNING "Can't use any sanitizers for target ${TARGET}, " + "because it is an interface library and cannot be " + "compiled directly.") + return() + endif () + sanitizer_target_compilers(${TARGET} TARGET_COMPILER) + list(LENGTH TARGET_COMPILER NUM_COMPILERS) + if (NUM_COMPILERS GREATER 1) + message(WARNING "Can't use any sanitizers for target ${TARGET}, " + "because it will be compiled by incompatible compilers. " + "Target will be compiled without sanitizers.") + return() + + # If the target is compiled by no or no known compiler, give a warning. + elseif (NUM_COMPILERS EQUAL 0) + message(WARNING "Sanitizers for target ${TARGET} may not be" + " usable, because it uses no or an unknown compiler. " + "This is a false warning for targets using only " + "object lib(s) as input.") + endif () + + # Add sanitizers for target. + add_sanitize_address(${TARGET}) + add_sanitize_thread(${TARGET}) + add_sanitize_memory(${TARGET}) + add_sanitize_undefined(${TARGET}) + endforeach () +endfunction(add_sanitizers) diff --git a/cmake/sanitizers/FindTSan.cmake b/cmake/sanitizers/FindTSan.cmake new file mode 100644 index 00000000..3cba3c03 --- /dev/null +++ b/cmake/sanitizers/FindTSan.cmake @@ -0,0 +1,65 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_THREAD "Enable ThreadSanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + "-g -fsanitize=thread" +) + + +# ThreadSanitizer is not compatible with MemorySanitizer. +if (SANITIZE_THREAD AND SANITIZE_MEMORY) + message(FATAL_ERROR "ThreadSanitizer is not compatible with " + "MemorySanitizer.") +endif () + + +include(sanitize-helpers) + +if (SANITIZE_THREAD) + if (NOT ${CMAKE_SYSTEM_NAME} STREQUAL "Linux" AND + NOT ${CMAKE_SYSTEM_NAME} STREQUAL "Darwin") + message(WARNING "ThreadSanitizer disabled for target ${TARGET} because " + "ThreadSanitizer is supported for Linux systems and macOS only.") + set(SANITIZE_THREAD Off CACHE BOOL + "Enable ThreadSanitizer for sanitized targets." FORCE) + elseif (NOT ${CMAKE_SIZEOF_VOID_P} EQUAL 8) + message(WARNING "ThreadSanitizer disabled for target ${TARGET} because " + "ThreadSanitizer is supported for 64bit systems only.") + set(SANITIZE_THREAD Off CACHE BOOL + "Enable ThreadSanitizer for sanitized targets." FORCE) + else () + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" "ThreadSanitizer" + "TSan") + endif () +endif () + +function (add_sanitize_thread TARGET) + if (NOT SANITIZE_THREAD) + return() + endif () + + sanitizer_add_flags(${TARGET} "ThreadSanitizer" "TSan") +endfunction () diff --git a/cmake/sanitizers/FindUBSan.cmake b/cmake/sanitizers/FindUBSan.cmake new file mode 100644 index 00000000..ae103f71 --- /dev/null +++ b/cmake/sanitizers/FindUBSan.cmake @@ -0,0 +1,46 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +option(SANITIZE_UNDEFINED + "Enable UndefinedBehaviorSanitizer for sanitized targets." Off) + +set(FLAG_CANDIDATES + "-g -fsanitize=undefined" +) + + +include(sanitize-helpers) + +if (SANITIZE_UNDEFINED) + sanitizer_check_compiler_flags("${FLAG_CANDIDATES}" + "UndefinedBehaviorSanitizer" "UBSan") +endif () + +function (add_sanitize_undefined TARGET) + if (NOT SANITIZE_UNDEFINED) + return() + endif () + + sanitizer_add_flags(${TARGET} "UndefinedBehaviorSanitizer" "UBSan") +endfunction () diff --git a/cmake/sanitizers/asan-wrapper b/cmake/sanitizers/asan-wrapper new file mode 100755 index 00000000..5d541033 --- /dev/null +++ b/cmake/sanitizers/asan-wrapper @@ -0,0 +1,55 @@ +#!/bin/sh + +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# This script is a wrapper for AddressSanitizer. In some special cases you need +# to preload AddressSanitizer to avoid error messages - e.g. if you're +# preloading another library to your application. At the moment this script will +# only do something, if we're running on a Linux platform. OSX might not be +# affected. + + +# Exit immediately, if platform is not Linux. +if [ "$(uname)" != "Linux" ] +then + exec $@ +fi + + +# Get the used libasan of the application ($1). If a libasan was found, it will +# be prepended to LD_PRELOAD. +libasan=$(ldd $1 | grep libasan | sed "s/^[[:space:]]//" | cut -d' ' -f1) +if [ -n "$libasan" ] +then + if [ -n "$LD_PRELOAD" ] + then + export LD_PRELOAD="$libasan:$LD_PRELOAD" + else + export LD_PRELOAD="$libasan" + fi +fi + +# Execute the application. +exec $@ diff --git a/cmake/sanitizers/sanitize-helpers.cmake b/cmake/sanitizers/sanitize-helpers.cmake new file mode 100755 index 00000000..3649b074 --- /dev/null +++ b/cmake/sanitizers/sanitize-helpers.cmake @@ -0,0 +1,177 @@ +# The MIT License (MIT) +# +# Copyright (c) +# 2013 Matthew Arsenault +# 2015-2016 RWTH Aachen University, Federal Republic of Germany +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +# Helper function to get the language of a source file. +function (sanitizer_lang_of_source FILE RETURN_VAR) + get_filename_component(LONGEST_EXT "${FILE}" EXT) + # If extension is empty return. This can happen for extensionless headers + if("${LONGEST_EXT}" STREQUAL "") + set(${RETURN_VAR} "" PARENT_SCOPE) + return() + endif() + # Get shortest extension as some files can have dot in their names + string(REGEX REPLACE "^.*(\\.[^.]+)$" "\\1" FILE_EXT ${LONGEST_EXT}) + string(TOLOWER "${FILE_EXT}" FILE_EXT) + string(SUBSTRING "${FILE_EXT}" 1 -1 FILE_EXT) + + get_property(ENABLED_LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) + foreach (LANG ${ENABLED_LANGUAGES}) + list(FIND CMAKE_${LANG}_SOURCE_FILE_EXTENSIONS "${FILE_EXT}" TEMP) + if (NOT ${TEMP} EQUAL -1) + set(${RETURN_VAR} "${LANG}" PARENT_SCOPE) + return() + endif () + endforeach() + + set(${RETURN_VAR} "" PARENT_SCOPE) +endfunction () + + +# Helper function to get compilers used by a target. +function (sanitizer_target_compilers TARGET RETURN_VAR) + # Check if all sources for target use the same compiler. If a target uses + # e.g. C and Fortran mixed and uses different compilers (e.g. clang and + # gfortran) this can trigger huge problems, because different compilers may + # use different implementations for sanitizers. + set(BUFFER "") + get_target_property(TSOURCES ${TARGET} SOURCES) + foreach (FILE ${TSOURCES}) + # If expression was found, FILE is a generator-expression for an object + # library. Object libraries will be ignored. + string(REGEX MATCH "TARGET_OBJECTS:([^ >]+)" _file ${FILE}) + if ("${_file}" STREQUAL "") + sanitizer_lang_of_source(${FILE} LANG) + if (LANG) + list(APPEND BUFFER ${CMAKE_${LANG}_COMPILER_ID}) + endif () + endif () + endforeach () + + list(REMOVE_DUPLICATES BUFFER) + set(${RETURN_VAR} "${BUFFER}" PARENT_SCOPE) +endfunction () + + +# Helper function to check compiler flags for language compiler. +function (sanitizer_check_compiler_flag FLAG LANG VARIABLE) + if (${LANG} STREQUAL "C") + include(CheckCCompilerFlag) + check_c_compiler_flag("${FLAG}" ${VARIABLE}) + + elseif (${LANG} STREQUAL "CXX") + include(CheckCXXCompilerFlag) + check_cxx_compiler_flag("${FLAG}" ${VARIABLE}) + + elseif (${LANG} STREQUAL "Fortran") + # CheckFortranCompilerFlag was introduced in CMake 3.x. To be compatible + # with older Cmake versions, we will check if this module is present + # before we use it. Otherwise we will define Fortran coverage support as + # not available. + include(CheckFortranCompilerFlag OPTIONAL RESULT_VARIABLE INCLUDED) + if (INCLUDED) + check_fortran_compiler_flag("${FLAG}" ${VARIABLE}) + elseif (NOT CMAKE_REQUIRED_QUIET) + message(STATUS "Performing Test ${VARIABLE}") + message(STATUS "Performing Test ${VARIABLE}" + " - Failed (Check not supported)") + endif () + endif() +endfunction () + + +# Helper function to test compiler flags. +function (sanitizer_check_compiler_flags FLAG_CANDIDATES NAME PREFIX) + set(CMAKE_REQUIRED_QUIET ${${PREFIX}_FIND_QUIETLY}) + + get_property(ENABLED_LANGUAGES GLOBAL PROPERTY ENABLED_LANGUAGES) + foreach (LANG ${ENABLED_LANGUAGES}) + # Sanitizer flags are not dependend on language, but the used compiler. + # So instead of searching flags foreach language, search flags foreach + # compiler used. + set(COMPILER ${CMAKE_${LANG}_COMPILER_ID}) + if (NOT DEFINED ${PREFIX}_${COMPILER}_FLAGS) + foreach (FLAG ${FLAG_CANDIDATES}) + if(NOT CMAKE_REQUIRED_QUIET) + message(STATUS "Try ${COMPILER} ${NAME} flag = [${FLAG}]") + endif() + + set(CMAKE_REQUIRED_FLAGS "${FLAG}") + unset(${PREFIX}_FLAG_DETECTED CACHE) + sanitizer_check_compiler_flag("${FLAG}" ${LANG} + ${PREFIX}_FLAG_DETECTED) + + if (${PREFIX}_FLAG_DETECTED) + # If compiler is a GNU compiler, search for static flag, if + # SANITIZE_LINK_STATIC is enabled. + if (SANITIZE_LINK_STATIC AND (${COMPILER} STREQUAL "GNU")) + string(TOLOWER ${PREFIX} PREFIX_lower) + sanitizer_check_compiler_flag( + "-static-lib${PREFIX_lower}" ${LANG} + ${PREFIX}_STATIC_FLAG_DETECTED) + + if (${PREFIX}_STATIC_FLAG_DETECTED) + set(FLAG "-static-lib${PREFIX_lower} ${FLAG}") + endif () + endif () + + set(${PREFIX}_${COMPILER}_FLAGS "${FLAG}" CACHE STRING + "${NAME} flags for ${COMPILER} compiler.") + mark_as_advanced(${PREFIX}_${COMPILER}_FLAGS) + break() + endif () + endforeach () + + if (NOT ${PREFIX}_FLAG_DETECTED) + set(${PREFIX}_${COMPILER}_FLAGS "" CACHE STRING + "${NAME} flags for ${COMPILER} compiler.") + mark_as_advanced(${PREFIX}_${COMPILER}_FLAGS) + + message(WARNING "${NAME} is not available for ${COMPILER} " + "compiler. Targets using this compiler will be " + "compiled without ${NAME}.") + endif () + endif () + endforeach () +endfunction () + + +# Helper to assign sanitizer flags for TARGET. +function (sanitizer_add_flags TARGET NAME PREFIX) + # Get list of compilers used by target and check, if sanitizer is available + # for this target. Other compiler checks like check for conflicting + # compilers will be done in add_sanitizers function. + sanitizer_target_compilers(${TARGET} TARGET_COMPILER) + list(LENGTH TARGET_COMPILER NUM_COMPILERS) + if ("${${PREFIX}_${TARGET_COMPILER}_FLAGS}" STREQUAL "") + return() + endif() + + # Set compile- and link-flags for target. + set_property(TARGET ${TARGET} APPEND_STRING + PROPERTY COMPILE_FLAGS " ${${PREFIX}_${TARGET_COMPILER}_FLAGS}") + set_property(TARGET ${TARGET} APPEND_STRING + PROPERTY COMPILE_FLAGS " ${SanBlist_${TARGET_COMPILER}_FLAGS}") + set_property(TARGET ${TARGET} APPEND_STRING + PROPERTY LINK_FLAGS " ${${PREFIX}_${TARGET_COMPILER}_FLAGS}") +endfunction () diff --git a/examples/obj_sticher/obj_writer.cc b/examples/obj_sticher/obj_writer.cc index 9ea8d7c4..31a2c895 100644 --- a/examples/obj_sticher/obj_writer.cc +++ b/examples/obj_sticher/obj_writer.cc @@ -26,7 +26,7 @@ bool WriteMat(const std::string& filename, const std::vector& drawObjects, std::vector& materials, std::map& textures) { glPolygonMode(GL_FRONT, GL_FILL); - glPolygonMode(GL_BACK, GL_FILL); + if (g_cull_face) { + glPolygonMode(GL_BACK, GL_LINE); + } else { + glPolygonMode(GL_BACK, GL_FILL); + } glEnable(GL_POLYGON_OFFSET_FILL); glPolygonOffset(1.0, 1.0); @@ -933,29 +950,31 @@ static void Draw(const std::vector& drawObjects, } // draw wireframe - glDisable(GL_POLYGON_OFFSET_FILL); - glPolygonMode(GL_FRONT, GL_LINE); - glPolygonMode(GL_BACK, GL_LINE); + if (g_show_wire) { + glDisable(GL_POLYGON_OFFSET_FILL); + glPolygonMode(GL_FRONT, GL_LINE); + glPolygonMode(GL_BACK, GL_LINE); + + glColor3f(0.0f, 0.0f, 0.4f); + for (size_t i = 0; i < drawObjects.size(); i++) { + DrawObject o = drawObjects[i]; + if (o.vb_id < 1) { + continue; + } - glColor3f(0.0f, 0.0f, 0.4f); - for (size_t i = 0; i < drawObjects.size(); i++) { - DrawObject o = drawObjects[i]; - if (o.vb_id < 1) { - continue; + glBindBuffer(GL_ARRAY_BUFFER, o.vb_id); + glEnableClientState(GL_VERTEX_ARRAY); + glEnableClientState(GL_NORMAL_ARRAY); + glDisableClientState(GL_COLOR_ARRAY); + glDisableClientState(GL_TEXTURE_COORD_ARRAY); + glVertexPointer(3, GL_FLOAT, stride, (const void*)0); + glNormalPointer(GL_FLOAT, stride, (const void*)(sizeof(float) * 3)); + glColorPointer(3, GL_FLOAT, stride, (const void*)(sizeof(float) * 6)); + glTexCoordPointer(2, GL_FLOAT, stride, (const void*)(sizeof(float) * 9)); + + glDrawArrays(GL_TRIANGLES, 0, 3 * o.numTriangles); + CheckErrors("drawarrays"); } - - glBindBuffer(GL_ARRAY_BUFFER, o.vb_id); - glEnableClientState(GL_VERTEX_ARRAY); - glEnableClientState(GL_NORMAL_ARRAY); - glDisableClientState(GL_COLOR_ARRAY); - glDisableClientState(GL_TEXTURE_COORD_ARRAY); - glVertexPointer(3, GL_FLOAT, stride, (const void*)0); - glNormalPointer(GL_FLOAT, stride, (const void*)(sizeof(float) * 3)); - glColorPointer(3, GL_FLOAT, stride, (const void*)(sizeof(float) * 6)); - glTexCoordPointer(2, GL_FLOAT, stride, (const void*)(sizeof(float) * 9)); - - glDrawArrays(GL_TRIANGLES, 0, 3 * o.numTriangles); - CheckErrors("drawarrays"); } } @@ -995,6 +1014,11 @@ int main(int argc, char** argv) { return 1; } + std::cout << "W : Toggle wireframe\n"; + std::cout << "C : Toggle face culling\n"; + //std::cout << "K, J, H, L, P, N : Move camera\n"; + std::cout << "Q, Esc : quit\n"; + glfwMakeContextCurrent(window); glfwSwapInterval(1); diff --git a/loader_example.cc b/loader_example.cc index 8143bb79..21feb684 100644 --- a/loader_example.cc +++ b/loader_example.cc @@ -257,7 +257,7 @@ static void PrintInfo(const tinyobj::attrib_t& attrib, printf(" material.Pm = %f\n", static_cast(materials[i].metallic)); printf(" material.Ps = %f\n", static_cast(materials[i].sheen)); printf(" material.Pc = %f\n", static_cast(materials[i].clearcoat_thickness)); - printf(" material.Pcr = %f\n", static_cast(materials[i].clearcoat_thickness)); + printf(" material.Pcr = %f\n", static_cast(materials[i].clearcoat_roughness)); printf(" material.aniso = %f\n", static_cast(materials[i].anisotropy)); printf(" material.anisor = %f\n", static_cast(materials[i].anisotropy_rotation)); printf(" material.map_Ke = %s\n", materials[i].emissive_texname.c_str()); diff --git a/models/cube-vertex-w-component.obj b/models/cube-vertex-w-component.obj new file mode 100644 index 00000000..b909f26d --- /dev/null +++ b/models/cube-vertex-w-component.obj @@ -0,0 +1,31 @@ +mtllib cube.mtl + +v 0.000000 2.000000 2.000000 0.1 +v 0.000000 0.000000 2.000000 0.2 +v 2.000000 0.000000 2.000000 0.3 +v 2.000000 2.000000 2.000000 0.4 +v 0.000000 2.000000 0.000000 0.5 +v 0.000000 0.000000 0.000000 0.6 +v 2.000000 0.000000 0.000000 0.7 +v 2.000000 2.000000 0.000000 0.8 +# 8 vertices + +g front cube +usemtl white +f 1 2 3 4 +g back cube +# expects white material +f 8 7 6 5 +g right cube +usemtl red +f 4 3 7 8 +g top cube +usemtl white +f 5 1 4 8 +g left cube +usemtl green +f 5 6 2 1 +g bottom cube +usemtl white +f 2 6 7 3 +# 6 elements diff --git a/models/cube_w_BOM.mtl b/models/cube_w_BOM.mtl new file mode 100644 index 00000000..96255b54 --- /dev/null +++ b/models/cube_w_BOM.mtl @@ -0,0 +1,24 @@ +newmtl white +Ka 0 0 0 +Kd 1 1 1 +Ks 0 0 0 + +newmtl red +Ka 0 0 0 +Kd 1 0 0 +Ks 0 0 0 + +newmtl green +Ka 0 0 0 +Kd 0 1 0 +Ks 0 0 0 + +newmtl blue +Ka 0 0 0 +Kd 0 0 1 +Ks 0 0 0 + +newmtl light +Ka 20 20 20 +Kd 1 1 1 +Ks 0 0 0 diff --git a/models/cube_w_BOM.obj b/models/cube_w_BOM.obj new file mode 100644 index 00000000..3c395f04 --- /dev/null +++ b/models/cube_w_BOM.obj @@ -0,0 +1,32 @@ +mtllib cube_w_BOM.mtl + +v 0.000000 2.000000 2.000000 +v 0.000000 0.000000 2.000000 +v 2.000000 0.000000 2.000000 +v 2.000000 2.000000 2.000000 +v 0.000000 2.000000 0.000000 +v 0.000000 0.000000 0.000000 +v 2.000000 0.000000 0.000000 +v 2.000000 2.000000 0.000000 +# 8 vertices + +g front cube +usemtl white +f 1 2 3 4 +# two white spaces between 'back' and 'cube' +g back cube +# expects white material +f 8 7 6 5 +g right cube +usemtl red +f 4 3 7 8 +g top cube +usemtl white +f 5 1 4 8 +g left cube +usemtl green +f 5 6 2 1 +g bottom cube +usemtl white +f 2 6 7 3 +# 6 elements diff --git a/models/invalid-relative-texture-index.obj b/models/invalid-relative-texture-index.obj new file mode 100644 index 00000000..ed3a571a --- /dev/null +++ b/models/invalid-relative-texture-index.obj @@ -0,0 +1,2 @@ +vt 0 0 +f 1/-1 1/-1 1/-2 \ No newline at end of file diff --git a/models/invalid-relative-vertex-index.obj b/models/invalid-relative-vertex-index.obj new file mode 100644 index 00000000..bddc54a9 --- /dev/null +++ b/models/invalid-relative-vertex-index.obj @@ -0,0 +1 @@ +f -4 -3 -2 \ No newline at end of file diff --git a/models/issue-319-002.obj b/models/issue-319-002.obj new file mode 100644 index 00000000..8e056fa0 --- /dev/null +++ b/models/issue-319-002.obj @@ -0,0 +1,39 @@ +### +# +# OBJ File Generated by Meshlab +# +#### +# Object ZH2_001.obj +# +# Vertices: 19 +# Faces: 3 +# +#### +v 8219.830078 6406.934082 9.603000 +v 8219.632812 6406.582031 9.603000 +v 8219.632812 6406.582031 9.139000 +v 8219.973633 6405.420898 9.139000 +v 8211.128906 6404.090820 9.139000 +v 8211.128906 6404.090820 9.603000 +v 8211.469727 6402.930176 9.139000 +v 8211.469727 6402.930176 9.603000 +v 8211.133789 6402.831055 9.603000 +v 8210.793945 6403.992188 9.603000 +v 8210.713867 6404.264160 9.603000 +v 8211.840820 6403.038086 9.139000 +v 8219.899414 6404.861816 9.139000 +v 8219.755859 6405.352051 9.139000 +v 8211.985352 6402.544922 9.139000 +v 8232.911133 6378.534180 55.848999 +v 8226.281250 6376.591797 55.848999 +v 8226.341797 6376.384766 55.848999 +v 8233.450195 6378.466797 55.852001 +v 8233.450195 6378.466797 55.852001 +# 19 vertices, 0 vertices normals + +f 2 1 11 10 9 8 6 +f 5 7 12 15 13 14 4 3 +f 18 19 20 16 17 +# 3 faces, 0 coords texture + +# End of File diff --git a/models/issue-319-003.obj b/models/issue-319-003.obj new file mode 100644 index 00000000..882a25c1 --- /dev/null +++ b/models/issue-319-003.obj @@ -0,0 +1,27 @@ +#### +# +# OBJ File Generated by Meshlab +# +#### +# Object new 1.obj +# +# Vertices: 10 +# Faces: 1 +# +#### +v 8434.808594 6083.654785 2.387000 +v 8434.808594 6083.654785 71.633003 +v 8432.309570 6092.206055 71.633003 +v 8432.309570 6092.206055 63.955002 +v 8432.309570 6092.206055 2.387000 +v 8433.083984 6089.560059 71.633003 +v 8433.161133 6089.293945 71.633003 +v 8432.309570 6092.206055 64.323997 +v 8432.309570 6092.206055 67.152000 +v 8432.309570 6092.206055 68.078003 +# 10 vertices, 0 vertices normals + +f 6 7 2 1 5 4 8 9 10 3 +# 1 faces, 0 coords texture + +# End of File diff --git a/models/issue-330.obj b/models/issue-330.obj new file mode 100644 index 00000000..aa46631c --- /dev/null +++ b/models/issue-330.obj @@ -0,0 +1,12 @@ +v -105.342712 40.184242 -16.056709 +v -105.463989 40.202003 -16.003181 +v -105.564941 40.207558 -15.934708 +v -105.722252 40.151146 -16.112091 +v -105.610237 40.191372 -16.176643 +v -105.667282 40.189800 -15.864197 +v -105.751717 40.125790 -15.794304 +# 7 vertices, 0 vertices normals + +f 2 5 4 3 +f 4 6 3 +# 2 faces, 0 coords texture diff --git a/models/issue-356-leading-spaces-newmtl.mtl b/models/issue-356-leading-spaces-newmtl.mtl new file mode 100644 index 00000000..f5a388e9 --- /dev/null +++ b/models/issue-356-leading-spaces-newmtl.mtl @@ -0,0 +1,2 @@ +newmtl aaa +Ka 1.000000 1.000000 1.000000 diff --git a/models/issue-356-leading-spaces-newmtl.obj b/models/issue-356-leading-spaces-newmtl.obj new file mode 100644 index 00000000..b41984a8 --- /dev/null +++ b/models/issue-356-leading-spaces-newmtl.obj @@ -0,0 +1,2 @@ +mtllib issue-356-leading-spaces-newmtl.mtl +usemtl aaa diff --git a/models/issue-389-comment.obj b/models/issue-389-comment.obj new file mode 100644 index 00000000..cf16d926 --- /dev/null +++ b/models/issue-389-comment.obj @@ -0,0 +1,44 @@ +g Part 1 +v 0.0576127 0.0488792 0.0423 +v 0.0576127 0.0488792 0 +v -0.0483158 0.0488792 0 +v -0.0483158 0.0488792 0.0423 +v -0.0483158 -0.0139454 0 +v -0.0483158 -0.0139454 0.0423 +v 0.0576127 -0.0139454 0 +v 0.0576127 -0.0139454 0.0423 +vn 0 1 0 +vn -1 0 0 +vn 0 -1 0 +vn 1 0 0 +vn 0 0 1 +vn 0 0 -1 +o mesh0 +f 1//1 2//1 3//1 +f 3//1 4//1 1//1 +o mesh1 +f 4//2 3//2 5//2 +f 5//2 6//2 4//2 +o mesh2 +f 6//3 5//3 7//3 +f 7//3 8//3 6//3 +o mesh3 +f 8//4 7//4 2//4 +f 2//4 1//4 8//4 +o mesh4 +f 8//5 1//5 4//5 +f 4//5 6//5 8//5 +o mesh5 +f 5//6 3//6 2//6 +f 2//6 7//6 5//6 + +# Zusätzliche Linien (aus der Oberseite) +o lines +v 0.0576127 0.0488792 0.0423 # Startpunkt Linie 1 (Ecke 1 Oberseite) +v 0.0576127 0.0488792 0.2423 # Endpunkt Linie 1 (2m Höhe) +v -0.0483158 -0.0139454 0.0423 # Startpunkt Linie 2 (Ecke 6 Oberseite) +v -0.0483158 -0.0139454 0.2423 # Endpunkt Linie 2 (2m Höhe) + +# Linien +l 1 9 # Linie 1 +l 6 10 # Linie 2 diff --git a/models/issue-391.mtl b/models/issue-391.mtl new file mode 100644 index 00000000..c23ced4b --- /dev/null +++ b/models/issue-391.mtl @@ -0,0 +1,4 @@ +newmtl has_kd +Kd 1 0 0 +newmtl has_map +map_Kd test.png \ No newline at end of file diff --git a/models/issue-391.obj b/models/issue-391.obj new file mode 100644 index 00000000..06d8774b --- /dev/null +++ b/models/issue-391.obj @@ -0,0 +1,9 @@ +mtllib issue-391.mtl +v 0 0 0 +v 1 0 0 +v 0 1 0 +vn 0 0 1 +usemtl has_map +f 1//1 2//1 3//1 +usemtl has_kd +f 1//1 2//1 3//1 \ No newline at end of file diff --git a/pbr-mtl.md b/pbr-mtl.md new file mode 100644 index 00000000..b5856216 --- /dev/null +++ b/pbr-mtl.md @@ -0,0 +1,29 @@ +## PBR material extension. + +The spec can be found in either + +https://benhouston3d.com/blog/extended-wavefront-obj-mtl-for-pbr/ + +or Internet Archive: https://web.archive.org/web/20230210121526/http://exocortex.com/blog/extending_wavefront_mtl_to_support_pbr + +* Kd/map_Kd (base/diffuse) // reuse +* Ks/map_Ks (specular) // reuse +* d or Tr (opacity) // reuse +* map_d/map_Tr (opacitymap) // reuse +* Tf (translucency) // reuse +* bump/-bm (bump map) // reuse +* disp (displacement map) // reuse + +PBR material parameters as defined by the Disney PBR. + +* Pr/map_Pr (roughness) // new +* Pm/map_Pm (metallic) // new +* Ps/map_Ps (sheen) // new +* Pc (clearcoat thickness) // new +* Pcr (clearcoat roughness) // new +* Ke/map_Ke (emissive) // new +* aniso (anisotropy) // new +* anisor (anisotropy rotation) // new +* norm (normal map) // new + +EoL. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..d3ba7cf3 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,39 @@ +[build-system] + +requires = [ + # NOTE: setuptools_scm>=8 is not supported in py3.6 cibuildwheel env. + # so use older setuptools_scm for a while + #"setuptools>=64", + #"setuptools_scm>=8", + "setuptools>=45", + "setuptools_scm[toml]<8", + "wheel", + "pybind11>=2.10.0", +] +build-backend = "setuptools.build_meta" + +[tool.black] +line-length = 140 + +[project] +name = "tinyobjloader" + +# version: Use setuptools_scm +dynamic = ["version", "classifiers", "authors", "description"] + + +readme = {file = "README.md", content-type = "text/markdown"} + +# Project URLs in pyproject.toml is not mature. +# so write it to setup.py +# https://github.com/pypa/packaging-problems/issues/606 +# +# [project.urils] + + +[tool.setuptools_scm] +# setuptools_scm>=8 +#version_file = "python/_version.py" + +# setuptools_scm<8 +write_to = "python/_version.py" diff --git a/python/MANIFEST.in b/python/MANIFEST.in deleted file mode 100644 index 90ef93d6..00000000 --- a/python/MANIFEST.in +++ /dev/null @@ -1,6 +0,0 @@ -# Copy the header file into the python/ folder. -include ../tiny_obj_loader.h -# Include it in the source distribution. -include tiny_obj_loader.h - -include pyproject.toml diff --git a/python/Makefile b/python/Makefile index 06352eda..ede9c2d2 100644 --- a/python/Makefile +++ b/python/Makefile @@ -1,5 +1,5 @@ all: - python setup.py build + cd .. && python -m pip install . t: python sample.py diff --git a/python/README.md b/python/README.md index 913668f6..8f9aa5d2 100644 --- a/python/README.md +++ b/python/README.md @@ -5,8 +5,7 @@ ## Requirements -* python 3.x(3.6+ recommended) - * python 2.7 may work, but not officially supported. +* python 3.6+ ## Install @@ -67,18 +66,17 @@ https://github.com/syoyo/tinyobjloader/blob/master/python/sample.py ## How to build -Using `cibuildwheel` is an recommended way to build a python module. +Using `cibuildwheel` is a recommended way to build a python module. See $tinyobjloader/azure-pipelines.yml for details. ### Developer build -Edit `setup.py` and uncomment `Developer option` lines - Assume pip is installed. ``` -$ pip install pybind11 -$ python setup.py build +$ git clone https://github.com/tinyobjloader/tinyobjloader +$ cd tinyobjloader +$ python -m pip install . ``` ## License @@ -87,4 +85,3 @@ MIT(tinyobjloader) and ISC(mapbox earcut) license. ## TODO * [ ] Writer saver - diff --git a/python/bindings.cc b/python/bindings.cc index 08592d70..e7e6c951 100644 --- a/python/bindings.cc +++ b/python/bindings.cc @@ -8,7 +8,7 @@ // define some helper functions for pybind11 #define TINY_OBJ_LOADER_PYTHON_BINDING -#include "tiny_obj_loader.h" +#include "../tiny_obj_loader.h" namespace py = pybind11; @@ -38,15 +38,41 @@ PYBIND11_MODULE(tinyobjloader, tobj_module) py::class_(tobj_module, "attrib_t") .def(py::init<>()) .def_readonly("vertices", &attrib_t::vertices) + .def_readonly("vertex_weights", &attrib_t::vertex_weights) + .def_readonly("skin_weights", &attrib_t::skin_weights) + .def_readonly("normals", &attrib_t::normals) + .def_readonly("texcoords", &attrib_t::texcoords) + .def_readonly("colors", &attrib_t::colors) .def("numpy_vertices", [] (attrib_t &instance) { auto ret = py::array_t(instance.vertices.size()); py::buffer_info buf = ret.request(); memcpy(buf.ptr, instance.vertices.data(), instance.vertices.size() * sizeof(real_t)); return ret; }) - .def_readonly("normals", &attrib_t::normals) - .def_readonly("texcoords", &attrib_t::texcoords) - .def_readonly("colors", &attrib_t::colors) + .def("numpy_vertex_weights", [] (attrib_t &instance) { + auto ret = py::array_t(instance.vertex_weights.size()); + py::buffer_info buf = ret.request(); + memcpy(buf.ptr, instance.vertex_weights.data(), instance.vertex_weights.size() * sizeof(real_t)); + return ret; + }) + .def("numpy_normals", [] (attrib_t &instance) { + auto ret = py::array_t(instance.normals.size()); + py::buffer_info buf = ret.request(); + memcpy(buf.ptr, instance.normals.data(), instance.normals.size() * sizeof(real_t)); + return ret; + }) + .def("numpy_texcoords", [] (attrib_t &instance) { + auto ret = py::array_t(instance.texcoords.size()); + py::buffer_info buf = ret.request(); + memcpy(buf.ptr, instance.texcoords.data(), instance.texcoords.size() * sizeof(real_t)); + return ret; + }) + .def("numpy_colors", [] (attrib_t &instance) { + auto ret = py::array_t(instance.colors.size()); + py::buffer_info buf = ret.request(); + memcpy(buf.ptr, instance.colors.data(), instance.colors.size() * sizeof(real_t)); + return ret; + }) ; py::class_(tobj_module, "shape_t") @@ -119,7 +145,7 @@ PYBIND11_MODULE(tinyobjloader, tobj_module) .def("GetCustomParameter", &material_t::GetCustomParameter) ; - py::class_(tobj_module, "mesh_t") + py::class_(tobj_module, "mesh_t", py::buffer_protocol()) .def(py::init<>()) .def_readonly("num_face_vertices", &mesh_t::num_face_vertices) .def("numpy_num_face_vertices", [] (mesh_t &instance) { @@ -128,11 +154,46 @@ PYBIND11_MODULE(tinyobjloader, tobj_module) memcpy(buf.ptr, instance.num_face_vertices.data(), instance.num_face_vertices.size() * sizeof(unsigned char)); return ret; }) + .def("vertex_indices", [](mesh_t &self) { + // NOTE: we cannot use py::buffer_info and py:buffer as a return type. + // py::memoriview is not suited for vertex indices usecase, since indices data may be used after + // deleting C++ mesh_t object in Python world. + // + // So create a dedicated Python object(std::vector) + + std::vector indices; + indices.resize(self.indices.size()); + for (size_t i = 0; i < self.indices.size(); i++) { + indices[i] = self.indices[i].vertex_index; + } + + return indices; + }) + .def("normal_indices", [](mesh_t &self) { + + std::vector indices; + indices.resize(self.indices.size()); + for (size_t i = 0; i < self.indices.size(); i++) { + indices[i] = self.indices[i].normal_index; + } + + return indices; + }) + .def("texcoord_indices", [](mesh_t &self) { + + std::vector indices; + indices.resize(self.indices.size()); + for (size_t i = 0; i < self.indices.size(); i++) { + indices[i] = self.indices[i].texcoord_index; + } + + return indices; + }) .def_readonly("indices", &mesh_t::indices) .def("numpy_indices", [] (mesh_t &instance) { // Flatten indexes. index_t is composed of 3 ints(vertex_index, normal_index, texcoord_index). // numpy_indices = [0, -1, -1, 1, -1, -1, ...] - // C++11 or later should pack POD struct tightly and does not reorder variables, + // C++11 or later should pack POD struct tightly and does not reorder variables, // so we can memcpy to copy data. // Still, we check the size of struct and byte offsets of each variable just for sure. static_assert(sizeof(index_t) == 12, "sizeof(index_t) must be 12"); @@ -154,10 +215,34 @@ PYBIND11_MODULE(tinyobjloader, tobj_module) }); py::class_(tobj_module, "lines_t") - .def(py::init<>()); + .def(py::init<>()) + .def_readonly("indices", &lines_t::indices) + .def_readonly("num_line_vertices", &lines_t::num_line_vertices) + ; py::class_(tobj_module, "points_t") - .def(py::init<>()); + .def(py::init<>()) + .def_readonly("indices", &points_t::indices) + ; + py::class_(tobj_module, "joint_and_weight_t") + .def(py::init<>()) + .def_readonly("joint_id", &joint_and_weight_t::joint_id, "Joint index(NOTE: Joint info is provided externally, not from .obj") + .def_readonly("weight", &joint_and_weight_t::weight, "Weight value(NOTE: weight is not normalized)") + ; + + py::class_(tobj_module, "skin_weight_t") + .def(py::init<>()) + .def_readonly("vertex_id", &skin_weight_t::vertex_id) + .def_readonly("weightValues", &skin_weight_t::weightValues) + ; + + py::class_(tobj_module, "tag_t") + .def(py::init<>()) + .def_readonly("name", &tag_t::name) + .def_readonly("intValues", &tag_t::intValues) + .def_readonly("floatValues", &tag_t::floatValues) + .def_readonly("stringValues", &tag_t::stringValues) + ; } diff --git a/python/pyproject.toml b/python/pyproject.toml deleted file mode 100644 index 51e27c42..00000000 --- a/python/pyproject.toml +++ /dev/null @@ -1,2 +0,0 @@ -[build-system] -requires = ["setuptools", "wheel", "pybind11>=2.3"] diff --git a/python/sample.py b/python/sample.py index 2f30c006..45c97612 100644 --- a/python/sample.py +++ b/python/sample.py @@ -1,8 +1,21 @@ import sys import tinyobjloader +is_numpy_available = False +try: + import numpy + + is_numpy_available = True +except: + print( + "NumPy not installed. Do not use numpy_*** API. If you encounter slow performance, see a performance tips for non-numpy API https://github.com/tinyobjloader/tinyobjloader/issues/275" + ) + filename = "../models/cornell_box.obj" +if len(sys.argv) > 1: + filename = sys.argv[1] + reader = tinyobjloader.ObjReader() @@ -24,9 +37,12 @@ print("Warn:", reader.Warning()) attrib = reader.GetAttrib() -print("attrib.vertices = ", len(attrib.vertices)) -print("attrib.normals = ", len(attrib.normals)) -print("attrib.texcoords = ", len(attrib.texcoords)) +print("len(attrib.vertices) = ", len(attrib.vertices)) +print("len(attrib.vertex_weights) = ", len(attrib.vertex_weights)) +print("len(attrib.normals) = ", len(attrib.normals)) +print("len(attrib.texcoords) = ", len(attrib.texcoords)) +print("len(attrib.colors) = ", len(attrib.colors)) +print("len(attrib.skin_weights) = ", len(attrib.skin_weights)) # vertex data must be `xyzxyzxyz...` assert len(attrib.vertices) % 3 == 0 @@ -37,16 +53,53 @@ # texcoords data must be `uvuvuv...` assert len(attrib.texcoords) % 2 == 0 -for (i, v) in enumerate(attrib.vertices): +# colors data must be `rgbrgbrgb...` +assert len(attrib.texcoords) % 3 == 0 + +# Performance note +# (direct?) array access through member variable is quite slow. +# https://github.com/tinyobjloader/tinyobjloader/issues/275#issuecomment-753465833 +# +# We encourage first copy(?) varible to Python world: +# +# vertices = attrib.vertices +# +# for i in range(...) +# v = vertices[i] +# +# Or please consider using numpy_*** interface(e.g. numpy_vertices()) + +for i, v in enumerate(attrib.vertices): print("v[{}] = {}".format(i, v)) -for (i, v) in enumerate(attrib.normals): +# vw is filled with 1.0 if [w] component is not present in `v` line in .obj +for i, w in enumerate(attrib.vertex_weights): + print("vweight[{}] = {}".format(i, w)) + +for i, v in enumerate(attrib.normals): print("vn[{}] = {}".format(i, v)) -for (i, v) in enumerate(attrib.texcoords): - print("vt[{}] = {}".format(i, t)) +for i, v in enumerate(attrib.texcoords): + print("vt[{}] = {}".format(i, v)) + +for i, v in enumerate(attrib.colors): + print("vcol[{}] = {}".format(i, v)) -print("numpy_vertices = {}".format(attrib.numpy_vertices())) +if len(attrib.skin_weights): + print("num skin weights", len(attrib.skin_weights)) + + for i, skin in enumerate(attrib.skin_weights): + print("skin_weight[{}]".format(i)) + print(" vertex_id = ", skin.vertex_id) + print(" len(weights) = ", len(skin.weightValues)) + for k, w in enumerate(skin.weightValues): + print(" [{}] joint_id: {}, weight: {}".format(k, w.joint_id, w.weight)) + +if is_numpy_available: + print("numpy_v = {}".format(attrib.numpy_vertices())) + print("numpy_vn = {}".format(attrib.numpy_normals())) + print("numpy_vt = {}".format(attrib.numpy_texcoords())) + print("numpy_vcol = {}".format(attrib.numpy_colors())) materials = reader.GetMaterials() print("Num materials: ", len(materials)) @@ -69,10 +122,17 @@ for shape in shapes: print(shape.name) print("len(num_indices) = {}".format(len(shape.mesh.indices))) - for (i, idx) in enumerate(shape.mesh.indices): + for i, idx in enumerate(shape.mesh.indices): print("[{}] v_idx {}".format(i, idx.vertex_index)) print("[{}] vn_idx {}".format(i, idx.normal_index)) print("[{}] vt_idx {}".format(i, idx.texcoord_index)) - print("numpy_indices = {}".format(shape.mesh.numpy_indices())) - print("numpy_num_face_vertices = {}".format(shape.mesh.numpy_num_face_vertices())) - print("numpy_material_ids = {}".format(shape.mesh.numpy_material_ids())) + print("material_ids = {}".format(shape.mesh.material_ids)) + + # faster access to indices + a = shape.mesh.vertex_indices() + print("vertex_indices", shape.mesh.vertex_indices()) + + if is_numpy_available: + print("numpy_indices = {}".format(shape.mesh.numpy_indices())) + print("numpy_num_face_vertices = {}".format(shape.mesh.numpy_num_face_vertices())) + print("numpy_material_ids = {}".format(shape.mesh.numpy_material_ids())) diff --git a/python/setup.py b/python/setup.py deleted file mode 100644 index 578cd1ef..00000000 --- a/python/setup.py +++ /dev/null @@ -1,131 +0,0 @@ -import setuptools -import platform - -from distutils.command.build_ext import build_ext - -with open("README.md", "r") as fh: - long_description = fh.read() - -# Adapted from https://github.com/pybind/python_example/blob/master/setup.py -class get_pybind_include(object): - """Helper class to determine the pybind11 include path - The purpose of this class is to postpone importing pybind11 - until it is actually installed, so that the ``get_include()`` - method can be invoked. """ - - def __init__(self, user=False, pep517=False): - self.user = user - self.pep517 = pep517 - - def __str__(self): - import os - import pybind11 - - interpreter_include_path = pybind11.get_include(self.user) - - if self.pep517: - # When pybind11 is installed permanently in site packages, the headers - # will be in the interpreter include path above. PEP 517 provides an - # experimental feature for build system dependencies. When installing - # a package from a source distribvution, first its build dependencies - # are installed in a temporary location. pybind11 does not return the - # correct path for this condition, so we glom together a second path, - # and ultimately specify them _both_ in the include search path. - # https://github.com/pybind/pybind11/issues/1067 - return os.path.abspath( - os.path.join( - os.path.dirname(pybind11.__file__), - "..", - "..", - "..", - "..", - "include", - os.path.basename(interpreter_include_path), - ) - ) - else: - return interpreter_include_path - - -# unix = default compiler name? -copt = {"unix": ["-std=c++11"], "gcc": ["-std=c++11"], "clang": ["std=c++11"]} -# TODO: set C++ version for msvc? {'msvc': ["/std:c++14"] } - -# ext_compile_args = ["-std=c++11"] -# ext_link_args = [] - -# https://stackoverflow.com/questions/724664/python-distutils-how-to-get-a-compiler-that-is-going-to-be-used -class build_ext_subclass(build_ext): - def build_extensions(self): - c = self.compiler.compiler_type - if c in copt: - for e in self.extensions: - e.extra_compile_args = copt[c] - - # if lopt.has_key(c): - # for e in self.extensions: - # e.extra_link_args = lopt[ c ] - build_ext.build_extensions(self) - - -# Developer option -# -# if platform.system() == "Darwin": -# # XCode10 or later does not support libstdc++, so we need to use libc++. -# # macosx-version 10.6 does not support libc++, so we require min macosx version 10.9. -# ext_compile_args.append("-stdlib=libc++") -# ext_compile_args.append("-mmacosx-version-min=10.9") -# ext_link_args.append("-stdlib=libc++") -# ext_link_args.append("-mmacosx-version-min=10.9") - -# `tiny_obj_loader.cc` contains implementation of tiny_obj_loader. -m = setuptools.Extension( - "tinyobjloader", - # extra_compile_args=ext_compile_args, - # extra_link_args=ext_link_args, - sources=["bindings.cc", "tiny_obj_loader.cc"], - include_dirs=[ - # Support `build_ext` finding tinyobjloader (without first running - # `sdist`). - "..", - # Support `build_ext` finding pybind 11 (provided it's permanently - # installed). - get_pybind_include(), - get_pybind_include(user=True), - # Support building from a source distribution finding pybind11 from - # a PEP 517 temporary install. - get_pybind_include(pep517=True), - ], - language="c++", -) - - -setuptools.setup( - name="tinyobjloader", - version="2.0.0rc9", - description="Tiny but powerful Wavefront OBJ loader", - long_description=long_description, - long_description_content_type="text/markdown", - author="Syoyo Fujita", - author_email="syoyo@lighttransport.com", - url="https://github.com/tinyobjloader/tinyobjloader", - project_urls={ - "Issue Tracker": "https://github.com/tinyobjloader/tinyobjloader/issues", - }, - classifiers=[ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "Intended Audience :: Science/Research", - "Intended Audience :: Manufacturing", - "Topic :: Artistic Software", - "Topic :: Multimedia :: Graphics :: 3D Modeling", - "Topic :: Scientific/Engineering :: Visualization", - "License :: OSI Approved :: MIT License", - "License :: OSI Approved :: ISC License (ISCL)", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3", - ], - packages=setuptools.find_packages(), - ext_modules=[m], - cmdclass={"build_ext": build_ext_subclass}, -) diff --git a/python/tiny_obj_loader.cc b/python/tiny_obj_loader.cc index a0b8bc60..821542e7 100644 --- a/python/tiny_obj_loader.cc +++ b/python/tiny_obj_loader.cc @@ -6,4 +6,4 @@ #define TINYOBJLOADER_USE_MAPBOX_EARCUT #define TINYOBJLOADER_IMPLEMENTATION -#include "tiny_obj_loader.h" +#include "../tiny_obj_loader.h" diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..cb950871 --- /dev/null +++ b/setup.py @@ -0,0 +1,69 @@ +# Adapted from https://github.com/pybind/python_example/blob/master/setup.py +import sys + +#from pybind11 import get_cmake_dir +# Available at setup time due to pyproject.toml +from pybind11.setup_helpers import Pybind11Extension#, build_ext +from setuptools import setup + +try: + # try to read setuptools_scm generated _version.py + from .python import _version +except: + __version__ = "2.0.0rc10" + +with open("README.md", "r", encoding="utf8") as fh: + long_description = fh.read() + +# The main interface is through Pybind11Extension. +# * You can add cxx_std=11/14/17, and then build_ext can be removed. +# * You can set include_pybind11=false to add the include directory yourself, +# say from a submodule. +# +# Note: +# Sort input source files if you glob sources to ensure bit-for-bit +# reproducible builds (https://github.com/pybind/python_example/pull/53) + +ext_modules = [ + Pybind11Extension("tinyobjloader", + sorted(["python/bindings.cc", "python/tiny_obj_loader.cc"]), + # Example: passing in the version to the compiled code + define_macros = [('VERSION_INFO', __version__)], + cxx_std=11, + ), +] + +setup( + name="tinyobjloader", + packages=['python'], + #version=__version__, + author="Syoyo Fujita", + author_email="syoyo@lighttransport.com", + url="https://github.com/tinyobjloader/tinyobjloader", + #project_urls={ + # "Issue Tracker": "https://github.com/tinyobjloader/tinyobjloader/issues", + #}, + description="Tiny but powerful Wavefront OBJ loader", + long_description=long_description, + long_description_content_type='text/markdown', + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "Intended Audience :: Manufacturing", + "Topic :: Artistic Software", + "Topic :: Multimedia :: Graphics :: 3D Modeling", + "Topic :: Scientific/Engineering :: Visualization", + "License :: OSI Approved :: MIT License", + "License :: OSI Approved :: ISC License (ISCL)", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + ], + ext_modules=ext_modules, + #extras_require={"test": "pytest"}, + # Currently, build_ext only provides an optional "highest supported C++ + # level" feature, but in the future it may provide more features. + # cmdclass={"build_ext": build_ext}, + #zip_safe=False, + #python_requires=">=3.6", +) diff --git a/tests/tester.cc b/tests/tester.cc index ebb5290c..b5c4d0db 100644 --- a/tests/tester.cc +++ b/tests/tester.cc @@ -858,6 +858,50 @@ void test_zero_face_idx_value_issue140() { TEST_CHECK(!err.empty()); } +void test_invalid_relative_vertex_index() { + tinyobj::attrib_t attrib; + std::vector shapes; + std::vector materials; + + std::string warn; + std::string err; + bool ret = + tinyobj::LoadObj(&attrib, &shapes, &materials, &warn, &err, + "../models/invalid-relative-vertex-index.obj", gMtlBasePath); + + if (!warn.empty()) { + std::cout << "WARN: " << warn << std::endl; + } + + if (!err.empty()) { + std::cerr << "ERR: " << err << std::endl; + } + TEST_CHECK(false == ret); + TEST_CHECK(!err.empty()); +} + +void test_invalid_texture_vertex_index() { + tinyobj::attrib_t attrib; + std::vector shapes; + std::vector materials; + + std::string warn; + std::string err; + bool ret = + tinyobj::LoadObj(&attrib, &shapes, &materials, &warn, &err, + "../models/invalid-relative-texture-vertex-index.obj", gMtlBasePath); + + if (!warn.empty()) { + std::cout << "WARN: " << warn << std::endl; + } + + if (!err.empty()) { + std::cerr << "ERR: " << err << std::endl; + } + TEST_CHECK(false == ret); + TEST_CHECK(!err.empty()); +} + void test_texture_name_whitespace_issue145() { tinyobj::attrib_t attrib; std::vector shapes; @@ -1364,6 +1408,118 @@ void test_face_missing_issue295() { TEST_CHECK((3 * 28) == shapes[0].mesh.indices.size()); // 28 triangle faces x 3 } +void test_comment_issue389() { + tinyobj::attrib_t attrib; + std::vector shapes; + std::vector materials; + + std::string warn; + std::string err; + bool ret = tinyobj::LoadObj( + &attrib, &shapes, &materials, &warn, &err, + "../models/issue-389-comment.obj", + gMtlBasePath, /* triangualte */false); + + TEST_CHECK(warn.empty()); + + if (!warn.empty()) { + std::cout << "WARN: " << warn << std::endl; + } + + if (!err.empty()) { + std::cerr << "ERR: " << err << std::endl; + } + + TEST_CHECK(true == ret); +} + +void test_default_kd_for_multiple_materials_issue391() { + tinyobj::attrib_t attrib; + std::vector shapes; + std::vector materials; + + std::string warn; + std::string err; + bool ret = tinyobj::LoadObj(&attrib, &shapes, &materials, &warn, &err, + "../models/issue-391.obj", gMtlBasePath); + if (!warn.empty()) { + std::cout << "WARN: " << warn << std::endl; + } + + if (!err.empty()) { + std::cerr << "ERR: " << err << std::endl; + } + + const tinyobj::real_t kGrey[] = {0.6, 0.6, 0.6}; + const tinyobj::real_t kRed[] = {1.0, 0.0, 0.0}; + + TEST_CHECK(true == ret); + TEST_CHECK(2 == materials.size()); + for (size_t i = 0; i < materials.size(); ++i) { + const tinyobj::material_t& material = materials[i]; + if (material.name == "has_map") { + for (int i = 0; i < 3; ++i) TEST_CHECK(material.diffuse[i] == kGrey[i]); + } else if (material.name == "has_kd") { + for (int i = 0; i < 3; ++i) TEST_CHECK(material.diffuse[i] == kRed[i]); + } else { + std::cerr << "Unexpected material found!" << std::endl; + TEST_CHECK(false); + } + } +} + +void test_removeUtf8Bom() { + // Basic input with BOM + std::string withBOM = "\xEF\xBB\xBFhello world"; + TEST_CHECK(tinyobj::removeUtf8Bom(withBOM) == "hello world"); + + // Input without BOM + std::string noBOM = "hello world"; + TEST_CHECK(tinyobj::removeUtf8Bom(noBOM) == "hello world"); + + // Leaves short string unchanged + std::string shortStr = "\xEF"; + TEST_CHECK(tinyobj::removeUtf8Bom(shortStr) == shortStr); + + std::string shortStr2 = "\xEF\xBB"; + TEST_CHECK(tinyobj::removeUtf8Bom(shortStr2) == shortStr2); + + // BOM only returns empty string + std::string justBom = "\xEF\xBB\xBF"; + TEST_CHECK(tinyobj::removeUtf8Bom(justBom) == ""); + + // Empty string + std::string emptyStr = ""; + TEST_CHECK(tinyobj::removeUtf8Bom(emptyStr) == ""); +} + +void test_loadObj_with_BOM() { + tinyobj::attrib_t attrib; + std::vector shapes; + std::vector materials; + + std::string warn; + std::string err; + bool ret = tinyobj::LoadObj(&attrib, &shapes, &materials, &warn, &err, + "../models/cube_w_BOM.obj", gMtlBasePath); + + if (!warn.empty()) { + std::cout << "WARN: " << warn << std::endl; + } + + if (!err.empty()) { + std::cerr << "ERR: " << err << std::endl; + } + + TEST_CHECK(true == ret); + TEST_CHECK(6 == shapes.size()); + TEST_CHECK(0 == shapes[0].name.compare("front cube")); + TEST_CHECK(0 == shapes[1].name.compare("back cube")); // multiple whitespaces + // are aggregated as + // single white space. +} + + // Fuzzer test. // Just check if it does not crash. // Disable by default since Windows filesystem can't create filename of afl @@ -1467,4 +1623,14 @@ TEST_LIST = { test_mtl_filename_with_whitespace_issue46}, {"test_face_missing_issue295", test_face_missing_issue295}, + {"test_comment_issue389", + test_comment_issue389}, + {"test_invalid_relative_vertex_index", + test_invalid_relative_vertex_index}, + {"test_invalid_texture_vertex_index", + test_invalid_texture_vertex_index}, + {"default_kd_for_multiple_materials_issue391", + test_default_kd_for_multiple_materials_issue391}, + {"test_removeUtf8Bom", test_removeUtf8Bom}, + {"test_loadObj_with_BOM", test_loadObj_with_BOM}, {NULL, NULL}}; diff --git a/tiny_obj_loader.h b/tiny_obj_loader.h index 7d0c3844..a927864e 100644 --- a/tiny_obj_loader.h +++ b/tiny_obj_loader.h @@ -24,12 +24,16 @@ THE SOFTWARE. // // version 2.0.0 : Add new object oriented API. 1.x API is still provided. +// * Add python binding. // * Support line primitive. // * Support points primitive. // * Support multiple search path for .mtl(v1 API). -// * Support vertex weight `vw`(as an tinyobj extension) +// * Support vertex skinning weight `vw`(as an tinyobj +// extension). Note that this differs vertex weight([w] +// component in `v` line) // * Support escaped whitespece in mtllib -// * Add robust triangulation using Mapbox earcut(TINYOBJLOADER_USE_MAPBOX_EARCUT). +// * Add robust triangulation using Mapbox +// earcut(TINYOBJLOADER_USE_MAPBOX_EARCUT). // version 1.4.0 : Modifed ParseTextureNameAndOption API // version 1.3.1 : Make ParseTextureNameAndOption API public // version 1.3.0 : Separate warning and error message(breaking API of LoadObj) @@ -194,9 +198,9 @@ struct material_t { int dummy; // Suppress padding warning. - std::string ambient_texname; // map_Ka - std::string diffuse_texname; // map_Kd - std::string specular_texname; // map_Ks + std::string ambient_texname; // map_Ka. For ambient or ambient occlusion. + std::string diffuse_texname; // map_Kd + std::string specular_texname; // map_Ks std::string specular_highlight_texname; // map_Ns std::string bump_texname; // map_bump, map_Bump, bump std::string displacement_texname; // disp @@ -352,10 +356,9 @@ struct index_t { struct mesh_t { std::vector indices; - std::vector + std::vector num_face_vertices; // The number of vertices per - // face. 3 = triangle, 4 = quad, - // ... Up to 255 vertices per face. + // face. 3 = triangle, 4 = quad, ... std::vector material_ids; // per-face material ID std::vector smoothing_group_ids; // per-face smoothing group // ID(0 = off. positive value @@ -421,6 +424,8 @@ struct attrib_t { struct callback_t { // W is optional and set to 1 if there is no `w` item in `v` line void (*vertex_cb)(void *user_data, real_t x, real_t y, real_t z, real_t w); + void (*vertex_color_cb)(void *user_data, real_t x, real_t y, real_t z, + real_t r, real_t g, real_t b, bool has_color); void (*normal_cb)(void *user_data, real_t x, real_t y, real_t z); // y and z are optional and set to 0 if there is no `y` and/or `z` item(s) in @@ -444,6 +449,7 @@ struct callback_t { callback_t() : vertex_cb(NULL), + vertex_color_cb(NULL), normal_cb(NULL), texcoord_cb(NULL), index_cb(NULL), @@ -673,6 +679,7 @@ bool ParseTextureNameAndOption(std::string *texname, texture_option_t *texopt, #endif #include + #include "mapbox/earcut.hpp" #ifdef __clang__ @@ -796,8 +803,32 @@ static std::istream &safeGetline(std::istream &is, std::string &t) { (static_cast((x) - '0') < static_cast(10)) #define IS_NEW_LINE(x) (((x) == '\r') || ((x) == '\n') || ((x) == '\0')) +template +static inline std::string toString(const T &t) { + std::stringstream ss; + ss << t; + return ss.str(); +} + +static inline std::string removeUtf8Bom(const std::string& input) { + // UTF-8 BOM = 0xEF,0xBB,0xBF + if (input.size() >= 3 && + static_cast(input[0]) == 0xEF && + static_cast(input[1]) == 0xBB && + static_cast(input[2]) == 0xBF) { + return input.substr(3); // Skip BOM + } + return input; +} + +struct warning_context { + std::string *warn; + size_t line_number; +}; + // Make index zero-base, and also support relative index. -static inline bool fixIndex(int idx, int n, int *ret) { +static inline bool fixIndex(int idx, int n, int *ret, bool allow_zero, + const warning_context &context) { if (!ret) { return false; } @@ -809,11 +840,22 @@ static inline bool fixIndex(int idx, int n, int *ret) { if (idx == 0) { // zero is not allowed according to the spec. - return false; + if (context.warn) { + (*context.warn) += + "A zero value index found (will have a value of -1 for normal and " + "tex indices. Line " + + toString(context.line_number) + ").\n"; + } + + (*ret) = idx - 1; + return allow_zero; } if (idx < 0) { (*ret) = n + idx; // negative value = relative + if ((*ret) < 0) { + return false; // invalid relative index + } return true; } @@ -972,7 +1014,8 @@ static bool tryParseDouble(const char *s, const char *s_end, double *result) { while (end_not_reached && IS_DIGIT(*curr)) { // To avoid annoying MSVC's min/max macro definiton, // Use hardcoded int max value - if (exponent > (2147483647/10)) { // 2147483647 = std::numeric_limits::max() + if (exponent > + (2147483647 / 10)) { // 2147483647 = std::numeric_limits::max() // Integer overflow goto fail; } @@ -1034,6 +1077,7 @@ static inline void parseReal3(real_t *x, real_t *y, real_t *z, (*z) = parseReal(token, default_z); } +#if 0 // not used static inline void parseV(real_t *x, real_t *y, real_t *z, real_t *w, const char **token, const double default_x = 0.0, const double default_y = 0.0, @@ -1044,26 +1088,45 @@ static inline void parseV(real_t *x, real_t *y, real_t *z, real_t *w, (*z) = parseReal(token, default_z); (*w) = parseReal(token, default_w); } +#endif // Extension: parse vertex with colors(6 items) -static inline bool parseVertexWithColor(real_t *x, real_t *y, real_t *z, - real_t *r, real_t *g, real_t *b, - const char **token, - const double default_x = 0.0, - const double default_y = 0.0, - const double default_z = 0.0) { +// Return 3: xyz, 4: xyzw, 6: xyzrgb +// `r`: red(case 6) or [w](case 4) +static inline int parseVertexWithColor(real_t *x, real_t *y, real_t *z, + real_t *r, real_t *g, real_t *b, + const char **token, + const double default_x = 0.0, + const double default_y = 0.0, + const double default_z = 0.0) { + // TODO: Check error (*x) = parseReal(token, default_x); (*y) = parseReal(token, default_y); (*z) = parseReal(token, default_z); - const bool found_color = - parseReal(token, r) && parseReal(token, g) && parseReal(token, b); + // - 4 components(x, y, z, w) ot 6 components + bool has_r = parseReal(token, r); + + if (!has_r) { + (*r) = (*g) = (*b) = 1.0; + return 3; + } + + bool has_g = parseReal(token, g); + + if (!has_g) { + (*g) = (*b) = 1.0; + return 4; + } + + bool has_b = parseReal(token, b); - if (!found_color) { + if (!has_b) { (*r) = (*g) = (*b) = 1.0; + return 3; // treated as xyz } - return found_color; + return 6; } static inline bool parseOnOff(const char **token, bool default_value = true) { @@ -1134,14 +1197,14 @@ static tag_sizes parseTagTriple(const char **token) { // Parse triples with index offsets: i, i/j/k, i//k, i/j static bool parseTriple(const char **token, int vsize, int vnsize, int vtsize, - vertex_index_t *ret) { + vertex_index_t *ret, const warning_context &context) { if (!ret) { return false; } vertex_index_t vi(-1); - if (!fixIndex(atoi((*token)), vsize, &(vi.v_idx))) { + if (!fixIndex(atoi((*token)), vsize, &vi.v_idx, false, context)) { return false; } @@ -1155,7 +1218,7 @@ static bool parseTriple(const char **token, int vsize, int vnsize, int vtsize, // i//k if ((*token)[0] == '/') { (*token)++; - if (!fixIndex(atoi((*token)), vnsize, &(vi.vn_idx))) { + if (!fixIndex(atoi((*token)), vnsize, &vi.vn_idx, true, context)) { return false; } (*token) += strcspn((*token), "/ \t\r"); @@ -1164,7 +1227,7 @@ static bool parseTriple(const char **token, int vsize, int vnsize, int vtsize, } // i/j/k or i/j - if (!fixIndex(atoi((*token)), vtsize, &(vi.vt_idx))) { + if (!fixIndex(atoi((*token)), vtsize, &vi.vt_idx, true, context)) { return false; } @@ -1176,7 +1239,7 @@ static bool parseTriple(const char **token, int vsize, int vnsize, int vtsize, // i/j/k (*token)++; // skip '/' - if (!fixIndex(atoi((*token)), vnsize, &(vi.vn_idx))) { + if (!fixIndex(atoi((*token)), vnsize, &vi.vn_idx, true, context)) { return false; } (*token) += strcspn((*token), "/ \t\r"); @@ -1397,6 +1460,35 @@ static int pnpoly(int nvert, T *vertx, T *verty, T testx, T testy) { return c; } +struct TinyObjPoint { + real_t x, y, z; + TinyObjPoint() : x(0), y(0), z(0) {} + TinyObjPoint(real_t x_, real_t y_, real_t z_) : x(x_), y(y_), z(z_) {} +}; + +inline TinyObjPoint cross(const TinyObjPoint &v1, const TinyObjPoint &v2) { + return TinyObjPoint(v1.y * v2.z - v1.z * v2.y, v1.z * v2.x - v1.x * v2.z, + v1.x * v2.y - v1.y * v2.x); +} + +inline real_t dot(const TinyObjPoint &v1, const TinyObjPoint &v2) { + return (v1.x * v2.x + v1.y * v2.y + v1.z * v2.z); +} + +inline real_t GetLength(TinyObjPoint &e) { + return std::sqrt(e.x * e.x + e.y * e.y + e.z * e.z); +} + +inline TinyObjPoint Normalize(TinyObjPoint e) { + real_t inv_length = real_t(1) / GetLength(e); + return TinyObjPoint(e.x * inv_length, e.y * inv_length, e.z * inv_length); +} + +inline TinyObjPoint WorldToLocal(const TinyObjPoint &a, const TinyObjPoint &u, + const TinyObjPoint &v, const TinyObjPoint &w) { + return TinyObjPoint(dot(a, u), dot(a, v), dot(a, w)); +} + // TODO(syoyo): refactor function. static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, const std::vector &tags, @@ -1425,7 +1517,7 @@ static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, continue; } - if (triangulate) { + if (triangulate && npolys != 3) { if (npolys == 4) { vertex_index_t i0 = face.vertex_indices[0]; vertex_index_t i1 = face.vertex_indices[1]; @@ -1534,65 +1626,62 @@ static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, shape->mesh.smoothing_group_ids.push_back(face.smoothing_group_id); } else { +#ifdef TINYOBJLOADER_USE_MAPBOX_EARCUT vertex_index_t i0 = face.vertex_indices[0]; - vertex_index_t i1(-1); - vertex_index_t i2 = face.vertex_indices[1]; + vertex_index_t i0_2 = i0; - // find the two axes to work in - size_t axes[2] = {1, 2}; + // TMW change: Find the normal axis of the polygon using Newell's + // method + TinyObjPoint n; for (size_t k = 0; k < npolys; ++k) { - i0 = face.vertex_indices[(k + 0) % npolys]; - i1 = face.vertex_indices[(k + 1) % npolys]; - i2 = face.vertex_indices[(k + 2) % npolys]; + i0 = face.vertex_indices[k % npolys]; size_t vi0 = size_t(i0.v_idx); - size_t vi1 = size_t(i1.v_idx); - size_t vi2 = size_t(i2.v_idx); - if (((3 * vi0 + 2) >= v.size()) || ((3 * vi1 + 2) >= v.size()) || - ((3 * vi2 + 2) >= v.size())) { - // Invalid triangle. - // FIXME(syoyo): Is it ok to simply skip this invalid triangle? - continue; - } + size_t j = (k + 1) % npolys; + i0_2 = face.vertex_indices[j]; + size_t vi0_2 = size_t(i0_2.v_idx); + real_t v0x = v[vi0 * 3 + 0]; real_t v0y = v[vi0 * 3 + 1]; real_t v0z = v[vi0 * 3 + 2]; - real_t v1x = v[vi1 * 3 + 0]; - real_t v1y = v[vi1 * 3 + 1]; - real_t v1z = v[vi1 * 3 + 2]; - real_t v2x = v[vi2 * 3 + 0]; - real_t v2y = v[vi2 * 3 + 1]; - real_t v2z = v[vi2 * 3 + 2]; - real_t e0x = v1x - v0x; - real_t e0y = v1y - v0y; - real_t e0z = v1z - v0z; - real_t e1x = v2x - v1x; - real_t e1y = v2y - v1y; - real_t e1z = v2z - v1z; - real_t cx = std::fabs(e0y * e1z - e0z * e1y); - real_t cy = std::fabs(e0z * e1x - e0x * e1z); - real_t cz = std::fabs(e0x * e1y - e0y * e1x); - const real_t epsilon = std::numeric_limits::epsilon(); - // std::cout << "cx " << cx << ", cy " << cy << ", cz " << cz << - // "\n"; - if (cx > epsilon || cy > epsilon || cz > epsilon) { - // std::cout << "corner\n"; - // found a corner - if (cx > cy && cx > cz) { - // std::cout << "pattern0\n"; - } else { - // std::cout << "axes[0] = 0\n"; - axes[0] = 0; - if (cz > cx && cz > cy) { - // std::cout << "axes[1] = 1\n"; - axes[1] = 1; - } - } - break; - } - } -#ifdef TINYOBJLOADER_USE_MAPBOX_EARCUT + real_t v0x_2 = v[vi0_2 * 3 + 0]; + real_t v0y_2 = v[vi0_2 * 3 + 1]; + real_t v0z_2 = v[vi0_2 * 3 + 2]; + + const TinyObjPoint point1(v0x, v0y, v0z); + const TinyObjPoint point2(v0x_2, v0y_2, v0z_2); + + TinyObjPoint a(point1.x - point2.x, point1.y - point2.y, + point1.z - point2.z); + TinyObjPoint b(point1.x + point2.x, point1.y + point2.y, + point1.z + point2.z); + + n.x += (a.y * b.z); + n.y += (a.z * b.x); + n.z += (a.x * b.y); + } + real_t length_n = GetLength(n); + // Check if zero length normal + if (length_n <= 0) { + continue; + } + // Negative is to flip the normal to the correct direction + real_t inv_length = -real_t(1.0) / length_n; + n.x *= inv_length; + n.y *= inv_length; + n.z *= inv_length; + + TinyObjPoint axis_w, axis_v, axis_u; + axis_w = n; + TinyObjPoint a; + if (std::fabs(axis_w.x) > real_t(0.9999999)) { + a = TinyObjPoint(0, 1, 0); + } else { + a = TinyObjPoint(1, 0, 0); + } + axis_v = Normalize(cross(axis_w, a)); + axis_u = cross(axis_w, axis_v); using Point = std::array; // first polyline define the main polygon. @@ -1601,6 +1690,10 @@ static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, std::vector polyline; + // TMW change: Find best normal and project v0x and v0y to those + // coordinates, instead of picking a plane aligned with an axis (which + // can flip polygons). + // Fill polygon data(facevarying vertices). for (size_t k = 0; k < npolys; k++) { i0 = face.vertex_indices[k]; @@ -1608,10 +1701,14 @@ static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, assert(((3 * vi0 + 2) < v.size())); - real_t v0x = v[vi0 * 3 + axes[0]]; - real_t v0y = v[vi0 * 3 + axes[1]]; + real_t v0x = v[vi0 * 3 + 0]; + real_t v0y = v[vi0 * 3 + 1]; + real_t v0z = v[vi0 * 3 + 2]; - polyline.push_back({v0x, v0y}); + TinyObjPoint polypoint(v0x, v0y, v0z); + TinyObjPoint loc = WorldToLocal(polypoint, axis_u, axis_v, axis_w); + + polyline.push_back({loc.x, loc.y}); } polygon.push_back(polyline); @@ -1652,7 +1749,63 @@ static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, } #else // Built-in ear clipping triangulation + vertex_index_t i0 = face.vertex_indices[0]; + vertex_index_t i1(-1); + vertex_index_t i2 = face.vertex_indices[1]; + // find the two axes to work in + size_t axes[2] = {1, 2}; + for (size_t k = 0; k < npolys; ++k) { + i0 = face.vertex_indices[(k + 0) % npolys]; + i1 = face.vertex_indices[(k + 1) % npolys]; + i2 = face.vertex_indices[(k + 2) % npolys]; + size_t vi0 = size_t(i0.v_idx); + size_t vi1 = size_t(i1.v_idx); + size_t vi2 = size_t(i2.v_idx); + + if (((3 * vi0 + 2) >= v.size()) || ((3 * vi1 + 2) >= v.size()) || + ((3 * vi2 + 2) >= v.size())) { + // Invalid triangle. + // FIXME(syoyo): Is it ok to simply skip this invalid triangle? + continue; + } + real_t v0x = v[vi0 * 3 + 0]; + real_t v0y = v[vi0 * 3 + 1]; + real_t v0z = v[vi0 * 3 + 2]; + real_t v1x = v[vi1 * 3 + 0]; + real_t v1y = v[vi1 * 3 + 1]; + real_t v1z = v[vi1 * 3 + 2]; + real_t v2x = v[vi2 * 3 + 0]; + real_t v2y = v[vi2 * 3 + 1]; + real_t v2z = v[vi2 * 3 + 2]; + real_t e0x = v1x - v0x; + real_t e0y = v1y - v0y; + real_t e0z = v1z - v0z; + real_t e1x = v2x - v1x; + real_t e1y = v2y - v1y; + real_t e1z = v2z - v1z; + real_t cx = std::fabs(e0y * e1z - e0z * e1y); + real_t cy = std::fabs(e0z * e1x - e0x * e1z); + real_t cz = std::fabs(e0x * e1y - e0y * e1x); + const real_t epsilon = std::numeric_limits::epsilon(); + // std::cout << "cx " << cx << ", cy " << cy << ", cz " << cz << + // "\n"; + if (cx > epsilon || cy > epsilon || cz > epsilon) { + // std::cout << "corner\n"; + // found a corner + if (cx > cy && cx > cz) { + // std::cout << "pattern0\n"; + } else { + // std::cout << "axes[0] = 0\n"; + axes[0] = 0; + if (cz > cx && cz > cy) { + // std::cout << "axes[1] = 1\n"; + axes[1] = 1; + } + } + break; + } + } face_t remainingFace = face; // copy size_t guess_vert = 0; @@ -1712,7 +1865,8 @@ static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, // std::cout << "e0x, e0y, e1x, e1y " << e0x << ", " << e0y << ", " // << e1x << ", " << e1y << "\n"; - real_t area = (vx[0] * vy[1] - vy[0] * vx[1]) * static_cast(0.5); + real_t area = + (vx[0] * vy[1] - vy[0] * vx[1]) * static_cast(0.5); // std::cout << "cross " << cross << ", area " << area << "\n"; // if an internal angle if (cross * area < static_cast(0.0)) { @@ -1829,7 +1983,7 @@ static bool exportGroupsToShape(shape_t *shape, const PrimGroup &prim_group, } shape->mesh.num_face_vertices.push_back( - static_cast(npolys)); + static_cast(npolys)); shape->mesh.material_ids.push_back(material_id); // per face shape->mesh.smoothing_group_ids.push_back( face.smoothing_group_id); // per face @@ -1967,6 +2121,9 @@ void LoadMtl(std::map *material_map, if (linebuf.empty()) { continue; } + if (line_no == 1) { + linebuf = removeUtf8Bom(linebuf); + } // Skip leading space. const char *token = linebuf.c_str(); @@ -1991,13 +2148,19 @@ void LoadMtl(std::map *material_map, has_d = false; has_tr = false; + has_kd = false; // set new mtl name token += 7; { - std::stringstream sstr; - sstr << token; - material.name = sstr.str(); + std::string namebuf = parseString(&token); + // TODO: empty name check? + if (namebuf.empty()) { + if (warning) { + (*warning) += "empty material name in `newmtl`\n"; + } + } + material.name = namebuf; } continue; } @@ -2161,7 +2324,7 @@ void LoadMtl(std::map *material_map, continue; } - // ambient texture + // ambient or ambient occlusion texture if ((0 == strncmp(token, "map_Ka", 6)) && IS_SPACE(token[6])) { token += 7; ParseTextureNameAndOption(&(material.ambient_texname), @@ -2203,15 +2366,9 @@ void LoadMtl(std::map *material_map, } // bump texture - if ((0 == strncmp(token, "map_bump", 8)) && IS_SPACE(token[8])) { - token += 9; - ParseTextureNameAndOption(&(material.bump_texname), - &(material.bump_texopt), token); - continue; - } - - // bump texture - if ((0 == strncmp(token, "map_Bump", 8)) && IS_SPACE(token[8])) { + if (((0 == strncmp(token, "map_bump", 8)) || + (0 == strncmp(token, "map_Bump", 8))) && + IS_SPACE(token[8])) { token += 9; ParseTextureNameAndOption(&(material.bump_texname), &(material.bump_texopt), token); @@ -2235,6 +2392,16 @@ void LoadMtl(std::map *material_map, continue; } + // displacement texture + if (((0 == strncmp(token, "map_disp", 8)) || + (0 == strncmp(token, "map_Disp", 8))) && + IS_SPACE(token[8])) { + token += 9; + ParseTextureNameAndOption(&(material.displacement_texname), + &(material.displacement_texopt), token); + continue; + } + // displacement texture if ((0 == strncmp(token, "disp", 4)) && IS_SPACE(token[4])) { token += 5; @@ -2437,10 +2604,11 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, std::stringstream errss; std::vector v; + std::vector vertex_weights; // optional [w] component in `v` std::vector vn; std::vector vt; std::vector vc; - std::vector vw; + std::vector vw; // tinyobj extension: vertex skin weights std::vector tags; PrimGroup prim_group; std::string name; @@ -2460,7 +2628,7 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, shape_t shape; - bool found_all_colors = true; + bool found_all_colors = true; // check if all 'v' line has color info size_t line_num = 0; std::string linebuf; @@ -2483,6 +2651,9 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, if (linebuf.empty()) { continue; } + if (line_num == 1) { + linebuf = removeUtf8Bom(linebuf); + } // Skip leading space. const char *token = linebuf.c_str(); @@ -2499,13 +2670,17 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, real_t x, y, z; real_t r, g, b; - found_all_colors &= parseVertexWithColor(&x, &y, &z, &r, &g, &b, &token); + int num_components = parseVertexWithColor(&x, &y, &z, &r, &g, &b, &token); + found_all_colors &= (num_components == 6); v.push_back(x); v.push_back(y); v.push_back(z); - if (found_all_colors || default_vcols_fallback) { + vertex_weights.push_back( + r); // r = w, and initialized to 1.0 when `w` component is not found. + + if ((num_components == 6) || default_vcols_fallback) { vc.push_back(r); vc.push_back(g); vc.push_back(b); @@ -2551,7 +2726,7 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, sw.vertex_id = vid; - while (!IS_NEW_LINE(token[0])) { + while (!IS_NEW_LINE(token[0]) && token[0] != '#') { real_t j, w; // joint_id should not be negative, weight may be negative // TODO(syoyo): # of elements check @@ -2582,23 +2757,26 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, vw.push_back(sw); } + warning_context context; + context.warn = warn; + context.line_number = line_num; + // line if (token[0] == 'l' && IS_SPACE((token[1]))) { token += 2; __line_t line; - while (!IS_NEW_LINE(token[0])) { + while (!IS_NEW_LINE(token[0]) && token[0] != '#') { vertex_index_t vi; if (!parseTriple(&token, static_cast(v.size() / 3), static_cast(vn.size() / 3), - static_cast(vt.size() / 2), &vi)) { + static_cast(vt.size() / 2), &vi, context)) { if (err) { - std::stringstream ss; - ss << "Failed parse `l' line(e.g. zero value for vertex index. " - "line " - << line_num << ".)\n"; - (*err) += ss.str(); + (*err) += + "Failed to parse `l' line (e.g. a zero value for vertex index. " + "Line " + + toString(line_num) + ").\n"; } return false; } @@ -2620,17 +2798,16 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, __points_t pts; - while (!IS_NEW_LINE(token[0])) { + while (!IS_NEW_LINE(token[0]) && token[0] != '#') { vertex_index_t vi; if (!parseTriple(&token, static_cast(v.size() / 3), static_cast(vn.size() / 3), - static_cast(vt.size() / 2), &vi)) { + static_cast(vt.size() / 2), &vi, context)) { if (err) { - std::stringstream ss; - ss << "Failed parse `p' line(e.g. zero value for vertex index. " - "line " - << line_num << ".)\n"; - (*err) += ss.str(); + (*err) += + "Failed to parse `p' line (e.g. a zero value for vertex index. " + "Line " + + toString(line_num) + ").\n"; } return false; } @@ -2656,16 +2833,16 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, face.smoothing_group_id = current_smoothing_id; face.vertex_indices.reserve(3); - while (!IS_NEW_LINE(token[0])) { + while (!IS_NEW_LINE(token[0]) && token[0] != '#') { vertex_index_t vi; if (!parseTriple(&token, static_cast(v.size() / 3), static_cast(vn.size() / 3), - static_cast(vt.size() / 2), &vi)) { + static_cast(vt.size() / 2), &vi, context)) { if (err) { - std::stringstream ss; - ss << "Failed parse `f' line(e.g. zero value for face index. line " - << line_num << ".)\n"; - (*err) += ss.str(); + (*err) += + "Failed to parse `f' line (e.g. a zero value for vertex index " + "or invalid relative vertex index). Line " + + toString(line_num) + ").\n"; } return false; } @@ -2792,7 +2969,7 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, std::vector names; - while (!IS_NEW_LINE(token[0])) { + while (!IS_NEW_LINE(token[0]) && token[0] != '#') { std::string str = parseString(&token); names.push_back(str); token += strspn(token, " \t\r"); // skip tag @@ -2954,14 +3131,16 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, if (greatest_vn_idx >= static_cast(vn.size() / 3)) { if (warn) { std::stringstream ss; - ss << "Vertex normal indices out of bounds (line " << line_num << ".)\n\n"; + ss << "Vertex normal indices out of bounds (line " << line_num + << ".)\n\n"; (*warn) += ss.str(); } } if (greatest_vt_idx >= static_cast(vt.size() / 2)) { if (warn) { std::stringstream ss; - ss << "Vertex texcoord indices out of bounds (line " << line_num << ".)\n\n"; + ss << "Vertex texcoord indices out of bounds (line " << line_num + << ".)\n\n"; (*warn) += ss.str(); } } @@ -2983,7 +3162,7 @@ bool LoadObj(attrib_t *attrib, std::vector *shapes, } attrib->vertices.swap(v); - attrib->vertex_weights.swap(v); + attrib->vertex_weights.swap(vertex_weights); attrib->normals.swap(vn); attrib->texcoords.swap(vt); attrib->texcoord_ws.swap(vt); @@ -3042,11 +3221,16 @@ bool LoadObjWithCallback(std::istream &inStream, const callback_t &callback, // vertex if (token[0] == 'v' && IS_SPACE((token[1]))) { token += 2; - // TODO(syoyo): Support parsing vertex color extension. - real_t x, y, z, w; // w is optional. default = 1.0 - parseV(&x, &y, &z, &w, &token); + real_t x, y, z; + real_t r, g, b; + + int num_components = parseVertexWithColor(&x, &y, &z, &r, &g, &b, &token); if (callback.vertex_cb) { - callback.vertex_cb(user_data, x, y, z, w); + callback.vertex_cb(user_data, x, y, z, r); // r=w is optional + } + if (callback.vertex_color_cb) { + bool found_color = (num_components == 6); + callback.vertex_color_cb(user_data, x, y, z, r, g, b, found_color); } continue; } @@ -3079,7 +3263,7 @@ bool LoadObjWithCallback(std::istream &inStream, const callback_t &callback, token += strspn(token, " \t"); indices.clear(); - while (!IS_NEW_LINE(token[0])) { + while (!IS_NEW_LINE(token[0]) && token[0] != '#') { vertex_index_t vi = parseRawTriple(&token); index_t idx; @@ -3194,7 +3378,7 @@ bool LoadObjWithCallback(std::istream &inStream, const callback_t &callback, if (token[0] == 'g' && IS_SPACE((token[1]))) { names.clear(); - while (!IS_NEW_LINE(token[0])) { + while (!IS_NEW_LINE(token[0]) && token[0] != '#') { std::string str = parseString(&token); names.push_back(str); token += strspn(token, " \t\r"); // skip tag