diff --git a/.clang-format b/.clang-format index d912fdc66..80f4f718b 100644 --- a/.clang-format +++ b/.clang-format @@ -1,5 +1,5 @@ --- -BasedOnStyle: Google +BasedOnStyle: Google AccessModifierOffset: -2 ConstructorInitializerIndentWidth: 2 AlignEscapedNewlinesLeft: false @@ -8,15 +8,13 @@ AllowAllParametersOfDeclarationOnNextLine: false AllowShortIfStatementsOnASingleLine: false AllowShortLoopsOnASingleLine: false AllowShortFunctionsOnASingleLine: None -AllowShortLoopsOnASingleLine: false AlwaysBreakTemplateDeclarations: true AlwaysBreakBeforeMultilineStrings: false BreakBeforeBinaryOperators: false BreakBeforeTernaryOperators: false -BreakConstructorInitializersBeforeComma: false -BreakConstructorInitializers: AfterColon +BreakConstructorInitializers: BeforeComma BinPackParameters: true -ColumnLimit: 90 +ColumnLimit: 90 ConstructorInitializerAllOnOneLineOrOnePerLine: true DerivePointerBinding: false PointerBindsToType: true @@ -31,19 +29,20 @@ PenaltyBreakString: 1 PenaltyBreakFirstLessLess: 1000 PenaltyExcessCharacter: 1000 PenaltyReturnTypeOnItsOwnLine: 90 -SpacesBeforeTrailingComments: 3 -Cpp11BracedListStyle: true -Standard: Auto -IndentWidth: 2 -TabWidth: 2 -UseTab: Never +SpacesBeforeTrailingComments: 2 +Cpp11BracedListStyle: false +Standard: Auto +IndentWidth: 2 +TabWidth: 2 +UseTab: Never IndentFunctionDeclarationAfterType: false SpacesInParentheses: false -SpacesInAngles: false +SpacesInAngles: false SpaceInEmptyParentheses: false SpacesInCStyleCastParentheses: false SpaceAfterControlStatementKeyword: true SpaceBeforeAssignmentOperators: true +SpaceBeforeParens: Never ContinuationIndentWidth: 4 SortIncludes: false SpaceAfterCStyleCast: false @@ -54,17 +53,16 @@ BreakBeforeBraces: Custom # Control of individual brace wrapping cases BraceWrapping: { - AfterClass: 'true' - AfterControlStatement: 'true' - AfterEnum : 'true' - AfterFunction : 'true' - AfterNamespace : 'true' - AfterStruct : 'true' - AfterUnion : 'true' - BeforeCatch : 'true' - BeforeElse : 'true' - IndentBraces : 'false' + AfterClass: 'true', + AfterControlStatement: 'true', + AfterEnum : 'true', + AfterFunction : 'true', + AfterNamespace : 'true', + AfterStruct : 'true', + AfterUnion : 'true', + BeforeCatch : 'true', + BeforeElse : 'true', + IndentBraces : 'false', SplitEmptyFunction: 'false' } ... - diff --git a/.codespell_ignore_words b/.codespell_ignore_words new file mode 100644 index 000000000..ab09b3c2f --- /dev/null +++ b/.codespell_ignore_words @@ -0,0 +1,7 @@ +INOUT +InOut +delimeter +Succesful +worl +valu +Exeption diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md new file mode 100644 index 000000000..9eb010608 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -0,0 +1,19 @@ +--- +name: Bug report +about: Help me help you... +title: '' +labels: '' +assignees: '' + +--- + +**Describe the bug** +A clear and concise description of what the bug is. + +If you are experiencing a crash, provide a backtrace (GDB or similar). + +*How to Reproduce** + +Please provide a specific description of how to reproduce the issue or source code that can be compiled and executed. Please attach a file/project that is easy to compile, don't copy and paste code snippets! + +Even better, create a Pull Request with a failing unit test. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 000000000..021458556 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,20 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Is your feature request related to a problem? Please describe.** +A clear and concise description of what the problem is. + +**Describe the solution you'd like** +A clear and concise description of what you want to happen. + +**Describe alternatives you've considered** +A clear and concise description of any alternative solutions or features you've considered. + +**Additional context** +Add any other context or screenshots about the feature request here. diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..27c6ae66a --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,11 @@ + diff --git a/.github/workflows/cmake_ubuntu.yml b/.github/workflows/cmake_ubuntu.yml index 3c0889ba7..41ed9a196 100644 --- a/.github/workflows/cmake_ubuntu.yml +++ b/.github/workflows/cmake_ubuntu.yml @@ -1,6 +1,11 @@ name: cmake Ubuntu -on: [push, pull_request] +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] env: # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) @@ -15,22 +20,17 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-20.04] + os: [ubuntu-22.04] steps: - uses: actions/checkout@v2 - + - name: Install Conan id: conan uses: turtlebrowser/get-conan@main - with: - version: 1.59.0 - - - name: Create default profile - run: conan profile new default --detect - - name: Update profile - run: conan profile update settings.compiler.libcxx=libstdc++11 default + - name: Create default profile + run: conan profile detect - name: Create Build Environment # Some projects don't allow in-source building, so create a separate build directory @@ -44,17 +44,16 @@ jobs: - name: Configure CMake shell: bash working-directory: ${{github.workspace}}/build - run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DCMAKE_TOOLCHAIN_FILE=conan_toolchain.cmake + run: cmake ${{github.workspace}} -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}} -DCMAKE_TOOLCHAIN_FILE=conan_toolchain.cmake - name: Build shell: bash working-directory: ${{github.workspace}}/build run: cmake --build . --config ${{env.BUILD_TYPE}} - + - name: run test (Linux) - working-directory: ${{github.workspace}}/build - run: ./tests/behaviortree_cpp_test - + working-directory: ${{github.workspace}}/build/tests + run: ctest + - name: Upload coverage reports to Codecov uses: codecov/codecov-action@v3 - diff --git a/.github/workflows/cmake_windows.yml b/.github/workflows/cmake_windows.yml index aed23392f..34f4f97ce 100644 --- a/.github/workflows/cmake_windows.yml +++ b/.github/workflows/cmake_windows.yml @@ -1,6 +1,11 @@ name: cmake Windows -on: [push, pull_request] +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] env: # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) @@ -18,16 +23,14 @@ jobs: os: [windows-latest] steps: - - uses: actions/checkout@v2 - + - uses: actions/checkout@v4 + - name: Install Conan id: conan uses: turtlebrowser/get-conan@main - with: - version: 1.59.0 - + - name: Create default profile - run: conan profile new default --detect + run: conan profile detect - name: Create Build Environment # Some projects don't allow in-source building, so create a separate build directory @@ -47,8 +50,7 @@ jobs: working-directory: ${{github.workspace}}/build shell: bash run: cmake --build . --config ${{env.BUILD_TYPE}} - + - name: run test (Windows) working-directory: ${{github.workspace}}/build run: $env:PATH+=";${{env.BUILD_TYPE}}"; tests/${{env.BUILD_TYPE}}/behaviortree_cpp_test.exe - diff --git a/.github/workflows/doxygen-gh-pages.yml b/.github/workflows/doxygen-gh-pages.yml new file mode 100644 index 000000000..b8fb8f46b --- /dev/null +++ b/.github/workflows/doxygen-gh-pages.yml @@ -0,0 +1,18 @@ +name: Doxygen GitHub Pages Deploy Action + +on: + push: + branches: + - main + - master + +jobs: + deploy: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: DenverCoder1/doxygen-github-pages-action@v2.0.0 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + folder: doc/html diff --git a/.github/workflows/pixi.yaml b/.github/workflows/pixi.yaml index f6269ee7b..ddd1cbfb8 100644 --- a/.github/workflows/pixi.yaml +++ b/.github/workflows/pixi.yaml @@ -1,58 +1,27 @@ name: Pixi (conda) -on: [push, pull_request] - -env: - # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) - # Note if this value is changed, has to be manually updated in the `windows-latest` tests_command - BUILD_TYPE: Release +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] jobs: pixi_conda_build: strategy: matrix: - include: - - os: windows-latest - build_depend: vs2022_win-64=19.* - tests_command: "'PATH=\\\"$PATH;build/Release\\\" build/tests/Release/behaviortree_cpp_test.exe'" - - os: ubuntu-latest - build_depend: "gxx=12.2.*" - tests_command: "./build/tests/behaviortree_cpp_test" + os: + - windows-latest + - ubuntu-latest runs-on: ${{ matrix.os }} steps: # Pixi is the tool used to create/manage conda environment - - uses: prefix-dev/setup-pixi@v0.4.1 - with: - pixi-version: v0.7.0 - locked: false - frozen: false - run-install: false - manifest-path: build-env/pixi.yaml - - name: Make pixi workspace - run: | - pixi init build-env - - name: Install dependencies - working-directory: ${{github.workspace}}/build-env - run: | - pixi add cmake zeromq=4.3.4 gtest=1.12.* gmock=1.12.* sqlite=3.40.* ${{ matrix.build-depend }} - pixi install - - name: Create Build Directory - working-directory: ${{github.workspace}}/build-env - run: mkdir build - uses: actions/checkout@v3 + - uses: prefix-dev/setup-pixi@v0.8.1 with: - path: build-env/BehaviorTree.CPP + pixi-version: v0.40.3 - name: Build - working-directory: ${{github.workspace}}/build-env - run: | - pixi task add build "cd build; cmake ../BehaviorTree.CPP -DCMAKE_BUILD_TYPE=${{env.BUILD_TYPE}}; cmake --build . --parallel --config ${{env.BUILD_TYPE}}" - pixi run build + run: pixi run build - name: Run tests - working-directory: ${{github.workspace}}/build-env - run: | - pixi task add tests ${{ matrix.tests_command }} - pixi run tests - - - - \ No newline at end of file + run: pixi run test diff --git a/.github/workflows/pre-commit.yaml b/.github/workflows/pre-commit.yaml new file mode 100644 index 000000000..ee7fa9229 --- /dev/null +++ b/.github/workflows/pre-commit.yaml @@ -0,0 +1,16 @@ +name: pre-commit + +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/ros1.yaml b/.github/workflows/ros1.yaml deleted file mode 100644 index 9d244f60f..000000000 --- a/.github/workflows/ros1.yaml +++ /dev/null @@ -1,17 +0,0 @@ -name: ros1 - -on: [push, pull_request] - -jobs: - industrial_ci: - strategy: - matrix: - env: - - {ROS_DISTRO: noetic, ROS_REPO: main} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - uses: 'ros-industrial/industrial_ci@master' - env: ${{matrix.env}} - with: - package-name: behaviortree_cpp diff --git a/.github/workflows/ros2-rolling.yaml b/.github/workflows/ros2-rolling.yaml new file mode 100644 index 000000000..446c49879 --- /dev/null +++ b/.github/workflows/ros2-rolling.yaml @@ -0,0 +1,22 @@ +name: ros2-rolling + +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] + +jobs: + industrial_ci: + strategy: + matrix: + env: + - {ROS_DISTRO: rolling, ROS_REPO: main} + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: 'ros-industrial/industrial_ci@master' + env: ${{matrix.env}} + with: + package-name: plotjuggler diff --git a/.github/workflows/ros2.yaml b/.github/workflows/ros2.yaml index 23422b835..099cc04f2 100644 --- a/.github/workflows/ros2.yaml +++ b/.github/workflows/ros2.yaml @@ -1,6 +1,11 @@ name: ros2 -on: [push, pull_request] +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] jobs: industrial_ci: @@ -8,11 +13,11 @@ jobs: matrix: env: - {ROS_DISTRO: humble, ROS_REPO: main} - - {ROS_DISTRO: rolling, ROS_REPO: main} + - {ROS_DISTRO: jazzy, ROS_REPO: main} runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v3 - uses: 'ros-industrial/industrial_ci@master' env: ${{matrix.env}} with: - package-name: behaviortree_cpp + package-name: plotjuggler diff --git a/.github/workflows/sonarcube.yml.bkp b/.github/workflows/sonarcube.yml.bkp new file mode 100644 index 000000000..926306125 --- /dev/null +++ b/.github/workflows/sonarcube.yml.bkp @@ -0,0 +1,42 @@ +name: Sonarcube Scan + +on: + push: + branches: + - master + pull_request: + types: [opened, synchronize, reopened] + +jobs: + build: + name: Build + runs-on: ubuntu-latest + env: + BUILD_WRAPPER_OUT_DIR: build_wrapper_output_directory # Directory where build-wrapper output will be placed + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis + - name: Install Build Wrapper + uses: SonarSource/sonarqube-scan-action/install-build-wrapper@v4 + + - name: Install Dependencies + run: | + sudo apt-get update + sudo apt-get install -y libzmq3-dev libsqlite3-dev + + - name: Install googletest + uses: Bacondish2023/setup-googletest@v1 + + - name: Run Build Wrapper + run: | + mkdir build + cmake -S . -B build + build-wrapper-linux-x86-64 --out-dir ${{ env.BUILD_WRAPPER_OUT_DIR }} cmake --build build/ --config Release + - name: SonarQube Scan + uses: SonarSource/sonarqube-scan-action@v4 + env: + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} # Put the name of your token here + with: + args: > + --define sonar.cfamily.compile-commands="${{ env.BUILD_WRAPPER_OUT_DIR }}/compile_commands.json" diff --git a/.gitignore b/.gitignore index 79bec0c7a..9d5bd4326 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,10 @@ CMakeSettings.json # OSX junk .DS_Store + +# pixi environments +.pixi + +CMakeUserPresets.json + +tags diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..d491f36d9 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,54 @@ + +# To use: +# +# pre-commit run -a +# +# Or: +# +# pre-commit install # (runs every time you commit in git) +# +# To update this file: +# +# pre-commit autoupdate +# +# See https://github.com/pre-commit/pre-commit + +exclude: ^3rdparty/|3rdparty|^include/behaviortree_cpp/contrib/ +repos: + + # Standard hooks + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-added-large-files + - id: check-ast + - id: check-case-conflict + - id: check-docstring-first + - id: check-merge-conflict + - id: check-symlinks + - id: check-xml + - id: check-yaml + - id: debug-statements + - id: end-of-file-fixer + exclude_types: [svg] + - id: mixed-line-ending + - id: trailing-whitespace + exclude_types: [svg] + - id: fix-byte-order-marker + + # CPP hooks + - repo: https://github.com/pre-commit/mirrors-clang-format + rev: v17.0.6 + hooks: + - id: clang-format + args: ['-fallback-style=none', '-i'] + + # Spell check + - repo: https://github.com/codespell-project/codespell + rev: v2.4.1 + hooks: + - id: codespell + additional_dependencies: + - tomli + args: + [--toml=./pyproject.toml] diff --git a/3rdparty/cppzmq/LICENSE b/3rdparty/cppzmq/LICENSE new file mode 100644 index 000000000..ae98bd859 --- /dev/null +++ b/3rdparty/cppzmq/LICENSE @@ -0,0 +1,17 @@ + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to + deal in the Software without restriction, including without limitation the + rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + IN THE SOFTWARE. diff --git a/3rdparty/cppzmq/README.md b/3rdparty/cppzmq/README.md new file mode 100644 index 000000000..e2bea0b63 --- /dev/null +++ b/3rdparty/cppzmq/README.md @@ -0,0 +1,196 @@ +[![CI](https://github.com/zeromq/cppzmq/actions/workflows/ci.yml/badge.svg)](https://github.com/zeromq/cppzmq/actions) +[![Coverage Status](https://coveralls.io/repos/github/zeromq/cppzmq/badge.svg?branch=master)](https://coveralls.io/github/zeromq/cppzmq?branch=master) +[![License](https://img.shields.io/github/license/zeromq/cppzmq.svg)](https://github.com/zeromq/cppzmq/blob/master/LICENSE) + +Introduction & Design Goals +=========================== + +cppzmq is a C++ binding for libzmq. It has the following design goals: + - cppzmq maps the libzmq C API to C++ concepts. In particular: + - it is type-safe (the libzmq C API exposes various class-like concepts as void*) + - it provides exception-based error handling (the libzmq C API provides errno-based error handling) + - it provides RAII-style classes that automate resource management (the libzmq C API requires the user to take care to free resources explicitly) + - cppzmq is a light-weight, header-only binding. You only need to include the header file zmq.hpp (and maybe zmq_addon.hpp) to use it. + - zmq.hpp is meant to contain direct mappings of the abstractions provided by the libzmq C API, while zmq_addon.hpp provides additional higher-level abstractions. + +There are other C++ bindings for ZeroMQ with different design goals. In particular, none of the following bindings are header-only: + - [zmqpp](https://github.com/zeromq/zmqpp) is a high-level binding to libzmq. + - [czmqpp](https://github.com/zeromq/czmqpp) is a binding based on the high-level czmq API. + - [fbzmq](https://github.com/facebook/fbzmq) is a binding that integrates with Apache Thrift and provides higher-level abstractions in addition. It requires C++14. + +Supported platforms +=================== + + - Only a subset of the platforms that are supported by libzmq itself are supported. Some features already require a compiler supporting C++11. In the future, probably all features will require C++11. To build and run the tests, CMake and Catch are required. + - Any libzmq 4.x version is expected to work. DRAFT features may only work for the most recent tested version. Currently explicitly tested libzmq versions are + - 4.2.0 (without DRAFT API) + - 4.3.4 (with and without DRAFT API) + - Platforms with full support (i.e. CI executing build and tests) + - Ubuntu 18.04 x64 (with gcc 4.8.5, 5.5.0, 7.5.0) + - Ubuntu 20.04 x64 (with gcc 9.3.0, 10.3.0 and clang 12) + - Visual Studio 2017 x64 + - Visual Studio 2019 x64 + - macOS 10.15 (with clang 12, without DRAFT API) + - Additional platforms that are known to work: + - We have no current reports on additional platforms that are known to work yet. Please add your platform here. If CI can be provided for them with a cloud-based CI service working with GitHub, you are invited to add CI, and make it possible to be included in the list above. + - Additional platforms that probably work: + - Any platform supported by libzmq that provides a sufficiently recent gcc (4.8.1 or newer) or clang (3.4.1 or newer) + - Visual Studio 2012+ x86/x64 + +Examples +======== +These examples require at least C++11. +```c++ +#include + +int main() +{ + zmq::context_t ctx; + zmq::socket_t sock(ctx, zmq::socket_type::push); + sock.bind("inproc://test"); + sock.send(zmq::str_buffer("Hello, world"), zmq::send_flags::dontwait); +} +``` +This a more complex example where we send and receive multi-part messages over TCP with a wildcard port. +```c++ +#include +#include + +int main() +{ + zmq::context_t ctx; + zmq::socket_t sock1(ctx, zmq::socket_type::push); + zmq::socket_t sock2(ctx, zmq::socket_type::pull); + sock1.bind("tcp://127.0.0.1:*"); + const std::string last_endpoint = + sock1.get(zmq::sockopt::last_endpoint); + std::cout << "Connecting to " + << last_endpoint << std::endl; + sock2.connect(last_endpoint); + + std::array send_msgs = { + zmq::str_buffer("foo"), + zmq::str_buffer("bar!") + }; + if (!zmq::send_multipart(sock1, send_msgs)) + return 1; + + std::vector recv_msgs; + const auto ret = zmq::recv_multipart( + sock2, std::back_inserter(recv_msgs)); + if (!ret) + return 1; + std::cout << "Got " << *ret + << " messages" << std::endl; + return 0; +} +``` + +See the `examples` directory for more examples. When the project is compiled with tests enabled, each example gets compiled to an executable. + + +API Overview +============ + +For an extensive overview of the `zmq.hpp` API in use, see this [Tour of CPPZMQ by @brettviren](https://brettviren.github.io/cppzmq-tour/index.html). + +Bindings for libzmq in `zmq.hpp`: + +Types: +* class `zmq::context_t` +* enum `zmq::ctxopt` +* class `zmq::socket_t` +* class `zmq::socket_ref` +* enum `zmq::socket_type` +* enum `zmq::sockopt` +* enum `zmq::send_flags` +* enum `zmq::recv_flags` +* class `zmq::message_t` +* class `zmq::const_buffer` +* class `zmq::mutable_buffer` +* struct `zmq::recv_buffer_size` +* alias `zmq::send_result_t` +* alias `zmq::recv_result_t` +* alias `zmq::recv_buffer_result_t` +* class `zmq::error_t` +* class `zmq::monitor_t` +* struct `zmq_event_t`, +* alias `zmq::free_fn`, +* alias `zmq::pollitem_t`, +* alias `zmq::fd_t` +* class `zmq::poller_t` DRAFT +* enum `zmq::event_flags` DRAFT +* enum `zmq::poller_event` DRAFT + +Functions: +* `zmq::version` +* `zmq::poll` +* `zmq::proxy` +* `zmq::proxy_steerable` +* `zmq::buffer` +* `zmq::str_buffer` + +Extra high-level types and functions `zmq_addon.hpp`: + +Types: +* class `zmq::multipart_t` +* class `zmq::active_poller_t` DRAFT + +Functions: +* `zmq::recv_multipart` +* `zmq::send_multipart` +* `zmq::send_multipart_n` +* `zmq::encode` +* `zmq::decode` + +Compatibility Guidelines +======================== + +The users of cppzmq are expected to follow the guidelines below to ensure not to break when upgrading cppzmq to newer versions (non-exhaustive list): + +* Do not depend on any macros defined in cppzmq unless explicitly declared public here. + +The following macros may be used by consumers of cppzmq: `CPPZMQ_VERSION`, `CPPZMQ_VERSION_MAJOR`, `CPPZMQ_VERSION_MINOR`, `CPPZMQ_VERSION_PATCH`. + +Contribution policy +=================== + +The contribution policy is at: http://rfc.zeromq.org/spec:22 + +Build instructions +================== + +Build steps: + +1. Build [libzmq](https://github.com/zeromq/libzmq) via cmake. This does an out of source build and installs the build files + - download and unzip the lib, cd to directory + - mkdir build + - cd build + - cmake .. + - sudo make -j4 install + +2. Build cppzmq via cmake. This does an out of source build and installs the build files + - download and unzip the lib, cd to directory + - mkdir build + - cd build + - cmake .. + - sudo make -j4 install + +3. Build cppzmq via [vcpkg](https://github.com/Microsoft/vcpkg/). This does an out of source build and installs the build files + - git clone https://github.com/Microsoft/vcpkg.git + - cd vcpkg + - ./bootstrap-vcpkg.sh # bootstrap-vcpkg.bat for Powershell + - ./vcpkg integrate install + - ./vcpkg install cppzmq + +Using this: + +A cmake find package scripts is provided for you to easily include this library. +Add these lines in your CMakeLists.txt to include the headers and library files of +cpp zmq (which will also include libzmq for you). + +``` +#find cppzmq wrapper, installed by make of cppzmq +find_package(cppzmq) +target_link_libraries(*Your Project Name* cppzmq) +``` diff --git a/3rdparty/cppzmq/zmq.hpp b/3rdparty/cppzmq/zmq.hpp index 979ac9a79..3fa484c6c 100644 --- a/3rdparty/cppzmq/zmq.hpp +++ b/3rdparty/cppzmq/zmq.hpp @@ -147,7 +147,7 @@ /* Version macros for compile-time API version detection */ #define CPPZMQ_VERSION_MAJOR 4 -#define CPPZMQ_VERSION_MINOR 9 +#define CPPZMQ_VERSION_MINOR 10 #define CPPZMQ_VERSION_PATCH 0 #define CPPZMQ_VERSION \ @@ -538,6 +538,11 @@ class message_t throw error_t(); memcpy(data(), data_, size_); } + + void rebuild(const std::string &str) + { + rebuild(str.data(), str.size()); + } void rebuild(void *data_, size_t size_, free_fn *ffn_, void *hint_ = ZMQ_NULLPTR) { @@ -1477,6 +1482,9 @@ ZMQ_DEFINE_INTEGRAL_BOOL_UNIT_OPT(ZMQ_CURVE_SERVER, curve_server, int); #ifdef ZMQ_CURVE_SERVERKEY ZMQ_DEFINE_ARRAY_OPT_BIN_OR_Z85(ZMQ_CURVE_SERVERKEY, curve_serverkey); #endif +#ifdef ZMQ_DISCONNECT_MSG +ZMQ_DEFINE_ARRAY_OPT_BINARY(ZMQ_DISCONNECT_MSG, disconnect_msg); +#endif #ifdef ZMQ_EVENTS ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_EVENTS, events, int); #endif @@ -1517,6 +1525,9 @@ ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_HEARTBEAT_TIMEOUT, heartbeat_timeout, int); #ifdef ZMQ_HEARTBEAT_TTL ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_HEARTBEAT_TTL, heartbeat_ttl, int); #endif +#ifdef ZMQ_HELLO_MSG +ZMQ_DEFINE_ARRAY_OPT_BINARY(ZMQ_HELLO_MSG, hello_msg); +#endif #ifdef ZMQ_IMMEDIATE ZMQ_DEFINE_INTEGRAL_BOOL_UNIT_OPT(ZMQ_IMMEDIATE, immediate, int); #endif @@ -1562,6 +1573,9 @@ ZMQ_DEFINE_ARRAY_OPT(ZMQ_PLAIN_PASSWORD, plain_password); #ifdef ZMQ_PLAIN_USERNAME ZMQ_DEFINE_ARRAY_OPT(ZMQ_PLAIN_USERNAME, plain_username); #endif +#ifdef ZMQ_PRIORITY +ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_PRIORITY, priority, int); +#endif #ifdef ZMQ_USE_FD ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_USE_FD, use_fd, int); #endif @@ -1589,6 +1603,9 @@ ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_RECONNECT_IVL, reconnect_ivl, int); #ifdef ZMQ_RECONNECT_IVL_MAX ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_RECONNECT_IVL_MAX, reconnect_ivl_max, int); #endif +#ifdef ZMQ_RECONNECT_STOP +ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_RECONNECT_STOP, reconnect_stop, int); +#endif #ifdef ZMQ_RECOVERY_IVL ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_RECOVERY_IVL, recovery_ivl, int); #endif @@ -1619,9 +1636,15 @@ ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_SNDHWM, sndhwm, int); #ifdef ZMQ_SNDTIMEO ZMQ_DEFINE_INTEGRAL_OPT(ZMQ_SNDTIMEO, sndtimeo, int); #endif +#ifdef ZMQ_SOCKS_PASSWORD +ZMQ_DEFINE_ARRAY_OPT(ZMQ_SOCKS_PASSWORD, socks_password); +#endif #ifdef ZMQ_SOCKS_PROXY ZMQ_DEFINE_ARRAY_OPT(ZMQ_SOCKS_PROXY, socks_proxy); #endif +#ifdef ZMQ_SOCKS_USERNAME +ZMQ_DEFINE_ARRAY_OPT(ZMQ_SOCKS_USERNAME, socks_username); +#endif #ifdef ZMQ_STREAM_NOTIFY ZMQ_DEFINE_INTEGRAL_BOOL_UNIT_OPT(ZMQ_STREAM_NOTIFY, stream_notify, int); #endif @@ -1679,6 +1702,9 @@ ZMQ_DEFINE_INTEGRAL_BOOL_UNIT_OPT(ZMQ_XPUB_VERBOSER, xpub_verboser, int); #ifdef ZMQ_XPUB_MANUAL ZMQ_DEFINE_INTEGRAL_BOOL_UNIT_OPT(ZMQ_XPUB_MANUAL, xpub_manual, int); #endif +#ifdef ZMQ_XPUB_MANUAL_LAST_VALUE +ZMQ_DEFINE_INTEGRAL_BOOL_UNIT_OPT(ZMQ_XPUB_MANUAL_LAST_VALUE, xpub_manual_last_value, int); +#endif #ifdef ZMQ_XPUB_NODROP ZMQ_DEFINE_INTEGRAL_BOOL_UNIT_OPT(ZMQ_XPUB_NODROP, xpub_nodrop, int); #endif @@ -2637,6 +2663,17 @@ template class poller_t add_impl(socket, events, nullptr); } + template< + typename Dummy = void, + typename = + typename std::enable_if::value, Dummy>::type> + void add(fd_t fd, event_flags events, T *user_data) + { + add_impl(fd, events, user_data); + } + + void add(fd_t fd, event_flags events) { add_impl(fd, events, nullptr); } + void remove(zmq::socket_ref socket) { if (0 != zmq_poller_remove(poller_ptr.get(), socket.handle())) { @@ -2703,6 +2740,15 @@ template class poller_t throw error_t(); } } + + void add_impl(fd_t fd, event_flags events, T *user_data) + { + if (0 + != zmq_poller_add_fd(poller_ptr.get(), fd, user_data, + static_cast(events))) { + throw error_t(); + } + } }; #endif // defined(ZMQ_BUILD_DRAFT_API) && defined(ZMQ_CPP11) && defined(ZMQ_HAVE_POLLER) diff --git a/3rdparty/cppzmq/zmq_addon.hpp b/3rdparty/cppzmq/zmq_addon.hpp index 147abe145..958eec56d 100644 --- a/3rdparty/cppzmq/zmq_addon.hpp +++ b/3rdparty/cppzmq/zmq_addon.hpp @@ -77,7 +77,7 @@ inline bool is_little_endian() inline void write_network_order(unsigned char *buf, const uint32_t value) { if (is_little_endian()) { - ZMQ_CONSTEXPR_VAR uint32_t mask = std::numeric_limits::max(); + ZMQ_CONSTEXPR_VAR uint32_t mask = (std::numeric_limits::max)(); *buf++ = static_cast((value >> 24) & mask); *buf++ = static_cast((value >> 16) & mask); *buf++ = static_cast((value >> 8) & mask); @@ -224,12 +224,12 @@ message_t encode(const Range &parts) // First pass check sizes for (const auto &part : parts) { const size_t part_size = part.size(); - if (part_size > std::numeric_limits::max()) { + if (part_size > (std::numeric_limits::max)()) { // Size value must fit into uint32_t. throw std::range_error("Invalid size, message part too large"); } const size_t count_size = - part_size < std::numeric_limits::max() ? 1 : 5; + part_size < (std::numeric_limits::max)() ? 1 : 5; mmsg_size += part_size + count_size; } @@ -240,12 +240,12 @@ message_t encode(const Range &parts) const unsigned char *part_data = static_cast(part.data()); - if (part_size < std::numeric_limits::max()) { + if (part_size < (std::numeric_limits::max)()) { // small part *buf++ = (unsigned char) part_size; } else { // big part - *buf++ = std::numeric_limits::max(); + *buf++ = (std::numeric_limits::max)(); detail::write_network_order(buf, part_size); buf += sizeof(part_size); } @@ -279,7 +279,7 @@ template OutputIt decode(const message_t &encoded, OutputIt out) while (source < limit) { size_t part_size = *source++; - if (part_size == std::numeric_limits::max()) { + if (part_size == (std::numeric_limits::max)()) { if (static_cast(limit - source) < sizeof(uint32_t)) { throw std::out_of_range( "Malformed encoding, overflow in reading size"); @@ -343,10 +343,10 @@ class multipart_t multipart_t(message_t &&message) { add(std::move(message)); } // Move constructor - multipart_t(multipart_t &&other) { m_parts = std::move(other.m_parts); } + multipart_t(multipart_t &&other) ZMQ_NOTHROW { m_parts = std::move(other.m_parts); } // Move assignment operator - multipart_t &operator=(multipart_t &&other) + multipart_t &operator=(multipart_t &&other) ZMQ_NOTHROW { m_parts = std::move(other.m_parts); return *this; diff --git a/include/behaviortree_cpp/flatbuffers/base.h b/3rdparty/flatbuffers/base.h similarity index 76% rename from include/behaviortree_cpp/flatbuffers/base.h rename to 3rdparty/flatbuffers/base.h index 54a51aacb..1c19dde98 100644 --- a/include/behaviortree_cpp/flatbuffers/base.h +++ b/3rdparty/flatbuffers/base.h @@ -32,7 +32,7 @@ #include #include -#if defined(ARDUINO) && !defined(ARDUINOSTL_M_H) +#if defined(ARDUINO) && !defined(ARDUINOSTL_M_H) && defined(__AVR__) #include #else #include @@ -43,6 +43,7 @@ #include #include #include +#include #include #include @@ -50,10 +51,6 @@ #include #endif -#ifdef _STLPORT_VERSION - #define FLATBUFFERS_CPP98_STL -#endif - #ifdef __ANDROID__ #include #endif @@ -142,9 +139,9 @@ #endif #endif // !defined(FLATBUFFERS_LITTLEENDIAN) -#define FLATBUFFERS_VERSION_MAJOR 1 -#define FLATBUFFERS_VERSION_MINOR 12 -#define FLATBUFFERS_VERSION_REVISION 0 +#define FLATBUFFERS_VERSION_MAJOR 24 +#define FLATBUFFERS_VERSION_MINOR 3 +#define FLATBUFFERS_VERSION_REVISION 25 #define FLATBUFFERS_STRING_EXPAND(X) #X #define FLATBUFFERS_STRING(X) FLATBUFFERS_STRING_EXPAND(X) namespace flatbuffers { @@ -158,7 +155,7 @@ namespace flatbuffers { #define FLATBUFFERS_FINAL_CLASS final #define FLATBUFFERS_OVERRIDE override #define FLATBUFFERS_EXPLICIT_CPP11 explicit - #define FLATBUFFERS_VTABLE_UNDERLYING_TYPE : flatbuffers::voffset_t + #define FLATBUFFERS_VTABLE_UNDERLYING_TYPE : ::flatbuffers::voffset_t #else #define FLATBUFFERS_FINAL_CLASS #define FLATBUFFERS_OVERRIDE @@ -237,16 +234,26 @@ namespace flatbuffers { } #define FLATBUFFERS_HAS_STRING_VIEW 1 // Check for absl::string_view - #elif __has_include("absl/strings/string_view.h") - #include "absl/strings/string_view.h" - namespace flatbuffers { - typedef absl::string_view string_view; - } - #define FLATBUFFERS_HAS_STRING_VIEW 1 + #elif __has_include("absl/strings/string_view.h") && \ + __has_include("absl/base/config.h") && \ + (__cplusplus >= 201411) + #include "absl/base/config.h" + #if !defined(ABSL_USES_STD_STRING_VIEW) + #include "absl/strings/string_view.h" + namespace flatbuffers { + typedef absl::string_view string_view; + } + #define FLATBUFFERS_HAS_STRING_VIEW 1 + #endif #endif #endif // __has_include #endif // !FLATBUFFERS_HAS_STRING_VIEW +#ifndef FLATBUFFERS_GENERAL_HEAP_ALLOC_OK + // Allow heap allocations to be used + #define FLATBUFFERS_GENERAL_HEAP_ALLOC_OK 1 +#endif // !FLATBUFFERS_GENERAL_HEAP_ALLOC_OK + #ifndef FLATBUFFERS_HAS_NEW_STRTOD // Modern (C++11) strtod and strtof functions are available for use. // 1) nan/inf strings as argument of strtod; @@ -259,9 +266,12 @@ namespace flatbuffers { #endif // !FLATBUFFERS_HAS_NEW_STRTOD #ifndef FLATBUFFERS_LOCALE_INDEPENDENT - // Enable locale independent functions {strtof_l, strtod_l,strtoll_l, strtoull_l}. - #if ((defined(_MSC_VER) && _MSC_VER >= 1800) || \ - (defined(_XOPEN_VERSION) && (_XOPEN_VERSION>=700)) && (!defined(__ANDROID_API__) || (defined(__ANDROID_API__) && (__ANDROID_API__>=21)))) + // Enable locale independent functions {strtof_l, strtod_l,strtoll_l, + // strtoull_l}. + #if (defined(_MSC_VER) && _MSC_VER >= 1800) || \ + (defined(__ANDROID_API__) && __ANDROID_API__>= 21) || \ + (defined(_XOPEN_VERSION) && (_XOPEN_VERSION >= 700)) && \ + (!defined(__Fuchsia__) && !defined(__ANDROID_API__)) #define FLATBUFFERS_LOCALE_INDEPENDENT 1 #else #define FLATBUFFERS_LOCALE_INDEPENDENT 0 @@ -269,27 +279,29 @@ namespace flatbuffers { #endif // !FLATBUFFERS_LOCALE_INDEPENDENT // Suppress Undefined Behavior Sanitizer (recoverable only). Usage: -// - __supress_ubsan__("undefined") -// - __supress_ubsan__("signed-integer-overflow") +// - FLATBUFFERS_SUPPRESS_UBSAN("undefined") +// - FLATBUFFERS_SUPPRESS_UBSAN("signed-integer-overflow") #if defined(__clang__) && (__clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >=7)) - #define __supress_ubsan__(type) __attribute__((no_sanitize(type))) + #define FLATBUFFERS_SUPPRESS_UBSAN(type) __attribute__((no_sanitize(type))) #elif defined(__GNUC__) && (__GNUC__ * 100 + __GNUC_MINOR__ >= 409) - #define __supress_ubsan__(type) __attribute__((no_sanitize_undefined)) + #define FLATBUFFERS_SUPPRESS_UBSAN(type) __attribute__((no_sanitize_undefined)) #else - #define __supress_ubsan__(type) + #define FLATBUFFERS_SUPPRESS_UBSAN(type) #endif -// This is constexpr function used for checking compile-time constants. -// Avoid `#pragma warning(disable: 4127) // C4127: expression is constant`. -template FLATBUFFERS_CONSTEXPR inline bool IsConstTrue(T t) { - return !!t; +namespace flatbuffers { + // This is constexpr function used for checking compile-time constants. + // Avoid `#pragma warning(disable: 4127) // C4127: expression is constant`. + template FLATBUFFERS_CONSTEXPR inline bool IsConstTrue(T t) { + return !!t; + } } // Enable C++ attribute [[]] if std:c++17 or higher. #if ((__cplusplus >= 201703L) \ || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201703L))) // All attributes unknown to an implementation are ignored without causing an error. - #define FLATBUFFERS_ATTRIBUTE(attr) [[attr]] + #define FLATBUFFERS_ATTRIBUTE(attr) attr #define FLATBUFFERS_FALLTHROUGH() [[fallthrough]] #else @@ -314,9 +326,11 @@ namespace flatbuffers { // Also, using a consistent offset type maintains compatibility of serialized // offset values between 32bit and 64bit systems. typedef uint32_t uoffset_t; +typedef uint64_t uoffset64_t; // Signed offsets for references that can go in both directions. typedef int32_t soffset_t; +typedef int64_t soffset64_t; // Offset/index used in v-tables, can be changed to uint8_t in // format forks to save a bit of space if desired. @@ -325,10 +339,23 @@ typedef uint16_t voffset_t; typedef uintmax_t largest_scalar_t; // In 32bits, this evaluates to 2GB - 1 -#define FLATBUFFERS_MAX_BUFFER_SIZE ((1ULL << (sizeof(::flatbuffers::soffset_t) * 8 - 1)) - 1) +#define FLATBUFFERS_MAX_BUFFER_SIZE std::numeric_limits<::flatbuffers::soffset_t>::max() +#define FLATBUFFERS_MAX_64_BUFFER_SIZE std::numeric_limits<::flatbuffers::soffset64_t>::max() + +// The minimum size buffer that can be a valid flatbuffer. +// Includes the offset to the root table (uoffset_t), the offset to the vtable +// of the root table (soffset_t), the size of the vtable (uint16_t), and the +// size of the referring table (uint16_t). +#define FLATBUFFERS_MIN_BUFFER_SIZE sizeof(uoffset_t) + sizeof(soffset_t) + \ + sizeof(uint16_t) + sizeof(uint16_t) // We support aligning the contents of buffers up to this size. -#define FLATBUFFERS_MAX_ALIGNMENT 16 +#ifndef FLATBUFFERS_MAX_ALIGNMENT + #define FLATBUFFERS_MAX_ALIGNMENT 32 +#endif + +/// @brief The length of a FlatBuffer file header. +static const size_t kFileIdentifierLength = 4; inline bool VerifyAlignmentRequirements(size_t align, size_t min_align = 1) { return (min_align <= align) && (align <= (FLATBUFFERS_MAX_ALIGNMENT)) && @@ -336,7 +363,6 @@ inline bool VerifyAlignmentRequirements(size_t align, size_t min_align = 1) { } #if defined(_MSC_VER) - #pragma warning(disable: 4351) // C4351: new behavior: elements of array ... will be default initialized #pragma warning(push) #pragma warning(disable: 4127) // C4127: conditional expression is constant #endif @@ -397,7 +423,7 @@ template T EndianScalar(T t) { template // UBSAN: C++ aliasing type rules, see std::bit_cast<> for details. -__supress_ubsan__("alignment") +FLATBUFFERS_SUPPRESS_UBSAN("alignment") T ReadScalar(const void *p) { return EndianScalar(*reinterpret_cast(p)); } @@ -411,13 +437,13 @@ T ReadScalar(const void *p) { template // UBSAN: C++ aliasing type rules, see std::bit_cast<> for details. -__supress_ubsan__("alignment") +FLATBUFFERS_SUPPRESS_UBSAN("alignment") void WriteScalar(void *p, T t) { *reinterpret_cast(p) = EndianScalar(t); } template struct Offset; -template __supress_ubsan__("alignment") void WriteScalar(void *p, Offset t) { +template FLATBUFFERS_SUPPRESS_UBSAN("alignment") void WriteScalar(void *p, Offset t) { *reinterpret_cast(p) = EndianScalar(t.o); } @@ -428,10 +454,43 @@ template __supress_ubsan__("alignment") void WriteScalar(void *p, Of // Computes how many bytes you'd have to pad to be able to write an // "scalar_size" scalar if the buffer had grown to "buf_size" (downwards in // memory). -__supress_ubsan__("unsigned-integer-overflow") +FLATBUFFERS_SUPPRESS_UBSAN("unsigned-integer-overflow") inline size_t PaddingBytes(size_t buf_size, size_t scalar_size) { return ((~buf_size) + 1) & (scalar_size - 1); } +// Generic 'operator==' with conditional specialisations. +// T e - new value of a scalar field. +// T def - default of scalar (is known at compile-time). +template inline bool IsTheSameAs(T e, T def) { return e == def; } + +#if defined(FLATBUFFERS_NAN_DEFAULTS) && \ + defined(FLATBUFFERS_HAS_NEW_STRTOD) && (FLATBUFFERS_HAS_NEW_STRTOD > 0) +// Like `operator==(e, def)` with weak NaN if T=(float|double). +template inline bool IsFloatTheSameAs(T e, T def) { + return (e == def) || ((def != def) && (e != e)); +} +template<> inline bool IsTheSameAs(float e, float def) { + return IsFloatTheSameAs(e, def); +} +template<> inline bool IsTheSameAs(double e, double def) { + return IsFloatTheSameAs(e, def); +} +#endif + +// Check 'v' is out of closed range [low; high]. +// Workaround for GCC warning [-Werror=type-limits]: +// comparison is always true due to limited range of data type. +template +inline bool IsOutRange(const T &v, const T &low, const T &high) { + return (v < low) || (high < v); +} + +// Check 'v' is in closed range [low; high]. +template +inline bool IsInRange(const T &v, const T &low, const T &high) { + return !IsOutRange(v, low, high); +} + } // namespace flatbuffers #endif // FLATBUFFERS_BASE_H_ diff --git a/3rdparty/lexy/CMakeLists.txt b/3rdparty/lexy/CMakeLists.txt index 1d8cf7964..a76693a9e 100644 --- a/3rdparty/lexy/CMakeLists.txt +++ b/3rdparty/lexy/CMakeLists.txt @@ -1,4 +1,4 @@ -# Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +# Copyright (C) 2020-2024 Jonathan Müller and lexy contributors # SPDX-License-Identifier: BSL-1.0 cmake_minimum_required(VERSION 3.8) @@ -9,6 +9,42 @@ option(LEXY_FORCE_CPP17 "Whether or not lexy should use C++17 even if compil add_subdirectory(src) +option(LEXY_ENABLE_INSTALL "whether or not to enable the install rule" ON) +if(LEXY_ENABLE_INSTALL) + include(CMakePackageConfigHelpers) + include(GNUInstallDirs) + + install(TARGETS lexy lexy_core lexy_file lexy_unicode lexy_ext _lexy_base lexy_dev + EXPORT ${PROJECT_NAME}Targets + RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} + LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} + ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}) + + install(EXPORT ${PROJECT_NAME}Targets + NAMESPACE foonathan:: + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") + + configure_package_config_file( + cmake/lexyConfig.cmake.in + "${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" + INSTALL_DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") + install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") + + # YYYY.MM.N1 is compatible with YYYY.MM.N2. + write_basic_package_version_file( + "${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake" + COMPATIBILITY SameMinorVersion) + + install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake" + DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") + + install(DIRECTORY include/lexy include/lexy_ext + DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} + FILES_MATCHING + PATTERN "*.hpp") +endif() + if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR) cmake_minimum_required(VERSION 3.18) option(LEXY_BUILD_BENCHMARKS "whether or not benchmarks should be built" OFF) @@ -16,7 +52,6 @@ if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR) option(LEXY_BUILD_TESTS "whether or not tests should be built" ON) option(LEXY_BUILD_DOCS "whether or not docs should be built" OFF) option(LEXY_BUILD_PACKAGE "whether or not the package should be built" ON) - option(LEXY_ENABLE_INSTALL "whether or not to enable the install rule" ON) if(LEXY_BUILD_PACKAGE) set(package_files include/ src/ cmake/ CMakeLists.txt LICENSE) @@ -41,39 +76,4 @@ if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_SOURCE_DIR) if(LEXY_BUILD_DOCS) add_subdirectory(docs EXCLUDE_FROM_ALL) endif() - - if(LEXY_ENABLE_INSTALL) - include(CMakePackageConfigHelpers) - include(GNUInstallDirs) - - install(TARGETS lexy lexy_core lexy_file lexy_unicode lexy_ext _lexy_base lexy_dev - EXPORT ${PROJECT_NAME}Targets - RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} - LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} - ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}) - - install(EXPORT ${PROJECT_NAME}Targets - NAMESPACE foonathan:: - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") - - configure_package_config_file( - cmake/lexyConfig.cmake.in - "${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" - INSTALL_DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") - install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") - - # YYYY.MM.N1 is compatible with YYYY.MM.N2. - write_basic_package_version_file( - "${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake" - COMPATIBILITY SameMinorVersion) - - install(FILES "${PROJECT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake" - DESTINATION "${CMAKE_INSTALL_LIBDIR}/cmake/${PROJECT_NAME}") - - install(DIRECTORY include/lexy include/lexy_ext - DESTINATION ${CMAKE_INSTALL_INCLUDEDIR} - FILES_MATCHING - PATTERN "*.hpp") - endif() endif() diff --git a/3rdparty/lexy/README.adoc b/3rdparty/lexy/README.adoc index 069ab5216..6bc88487f 100644 --- a/3rdparty/lexy/README.adoc +++ b/3rdparty/lexy/README.adoc @@ -113,8 +113,9 @@ Why should I use lexy over XYZ?:: http://boost-spirit.com/home/[Boost.Spirit]::: The main difference: it is not a Boost library. - Otherwise, it is just a different implementation with a different flavor. - Use lexy if you like lexy more. + In addition, Boost.Spirit is quite old and doesn't support e.g. non-common ranges as input. + Boost.Spirit also eagerly creates attributes from the rules, which can lead to nested tuples/variants while lexy uses callbacks which enables zero-copy parsing directly into your own data structure. + However, lexy's grammar is more verbose and designed to parser bigger grammars instead of the small one-off rules that Boost.Spirit is good at. https://github.com/taocpp/PEGTL[PEGTL]::: PEGTL is very similar and was a big inspiration. The biggest difference is that lexy uses an operator based DSL instead of inheriting from templated classes as PEGTL does; diff --git a/3rdparty/lexy/cmake/lexyConfig.cmake.in b/3rdparty/lexy/cmake/lexyConfig.cmake.in index a10ef13fc..e6dc89d30 100644 --- a/3rdparty/lexy/cmake/lexyConfig.cmake.in +++ b/3rdparty/lexy/cmake/lexyConfig.cmake.in @@ -1,4 +1,4 @@ -# Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +# Copyright (C) 2020-2024 Jonathan Müller and lexy contributors # SPDX-License-Identifier: BSL-1.0 # lexy CMake configuration file. diff --git a/3rdparty/lexy/include/lexy/_detail/any_ref.hpp b/3rdparty/lexy/include/lexy/_detail/any_ref.hpp index 11e9c0d31..9eca714b2 100644 --- a/3rdparty/lexy/include/lexy/_detail/any_ref.hpp +++ b/3rdparty/lexy/include/lexy/_detail/any_ref.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_ANY_REF_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/assert.hpp b/3rdparty/lexy/include/lexy/_detail/assert.hpp index 01ccbf6ce..52aa115de 100644 --- a/3rdparty/lexy/include/lexy/_detail/assert.hpp +++ b/3rdparty/lexy/include/lexy/_detail/assert.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_ASSERT_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/buffer_builder.hpp b/3rdparty/lexy/include/lexy/_detail/buffer_builder.hpp index ede3bd324..94ba1fd27 100644 --- a/3rdparty/lexy/include/lexy/_detail/buffer_builder.hpp +++ b/3rdparty/lexy/include/lexy/_detail/buffer_builder.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_BUFFER_BUILDER_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/code_point.hpp b/3rdparty/lexy/include/lexy/_detail/code_point.hpp index 969fba5d3..bc805b11e 100644 --- a/3rdparty/lexy/include/lexy/_detail/code_point.hpp +++ b/3rdparty/lexy/include/lexy/_detail/code_point.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_CODE_POINT_HPP_INCLUDED @@ -133,9 +133,9 @@ enum class cp_error template struct cp_result { - char32_t cp; - cp_error error; - typename Reader::iterator end; + char32_t cp; + cp_error error; + typename Reader::marker end; }; template @@ -144,16 +144,16 @@ constexpr cp_result parse_code_point(Reader reader) if constexpr (std::is_same_v) { if (reader.peek() == Reader::encoding::eof()) - return {{}, cp_error::eof, reader.position()}; + return {{}, cp_error::eof, reader.current()}; auto cur = reader.peek(); reader.bump(); auto cp = static_cast(cur); if (cp <= 0x7F) - return {cp, cp_error::success, reader.position()}; + return {cp, cp_error::success, reader.current()}; else - return {cp, cp_error::out_of_range, reader.position()}; + return {cp, cp_error::out_of_range, reader.current()}; } else if constexpr (std::is_same_v // || std::is_same_v) @@ -176,11 +176,11 @@ constexpr cp_result parse_code_point(Reader reader) { // ASCII character. reader.bump(); - return {first, cp_error::success, reader.position()}; + return {first, cp_error::success, reader.current()}; } else if ((first & ~payload_cont) == pattern_cont) { - return {{}, cp_error::leads_with_trailing, reader.position()}; + return {{}, cp_error::leads_with_trailing, reader.current()}; } else if ((first & ~payload_lead2) == pattern_lead2) { @@ -188,7 +188,7 @@ constexpr cp_result parse_code_point(Reader reader) auto second = uchar_t(reader.peek()); if ((second & ~payload_cont) != pattern_cont) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; reader.bump(); auto result = char32_t(first & payload_lead2); @@ -197,9 +197,9 @@ constexpr cp_result parse_code_point(Reader reader) // C0 and C1 are overlong ASCII. if (first == 0xC0 || first == 0xC1) - return {result, cp_error::overlong_sequence, reader.position()}; + return {result, cp_error::overlong_sequence, reader.current()}; else - return {result, cp_error::success, reader.position()}; + return {result, cp_error::success, reader.current()}; } else if ((first & ~payload_lead3) == pattern_lead3) { @@ -207,12 +207,12 @@ constexpr cp_result parse_code_point(Reader reader) auto second = uchar_t(reader.peek()); if ((second & ~payload_cont) != pattern_cont) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; reader.bump(); auto third = uchar_t(reader.peek()); if ((third & ~payload_cont) != pattern_cont) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; reader.bump(); auto result = char32_t(first & payload_lead3); @@ -223,11 +223,11 @@ constexpr cp_result parse_code_point(Reader reader) auto cp = result; if (0xD800 <= cp && cp <= 0xDFFF) - return {cp, cp_error::surrogate, reader.position()}; + return {cp, cp_error::surrogate, reader.current()}; else if (first == 0xE0 && second < 0xA0) - return {cp, cp_error::overlong_sequence, reader.position()}; + return {cp, cp_error::overlong_sequence, reader.current()}; else - return {cp, cp_error::success, reader.position()}; + return {cp, cp_error::success, reader.current()}; } else if ((first & ~payload_lead4) == pattern_lead4) { @@ -235,17 +235,17 @@ constexpr cp_result parse_code_point(Reader reader) auto second = uchar_t(reader.peek()); if ((second & ~payload_cont) != pattern_cont) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; reader.bump(); auto third = uchar_t(reader.peek()); if ((third & ~payload_cont) != pattern_cont) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; reader.bump(); auto fourth = uchar_t(reader.peek()); if ((fourth & ~payload_cont) != pattern_cont) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; reader.bump(); auto result = char32_t(first & payload_lead4); @@ -258,15 +258,15 @@ constexpr cp_result parse_code_point(Reader reader) auto cp = result; if (cp > 0x10'FFFF) - return {cp, cp_error::out_of_range, reader.position()}; + return {cp, cp_error::out_of_range, reader.current()}; else if (first == 0xF0 && second < 0x90) - return {cp, cp_error::overlong_sequence, reader.position()}; + return {cp, cp_error::overlong_sequence, reader.current()}; else - return {cp, cp_error::success, reader.position()}; + return {cp, cp_error::success, reader.current()}; } else // FE or FF { - return {{}, cp_error::eof, reader.position()}; + return {{}, cp_error::eof, reader.current()}; } } else if constexpr (std::is_same_v) @@ -278,18 +278,18 @@ constexpr cp_result parse_code_point(Reader reader) constexpr auto pattern2 = 0b110111 << 10; if (reader.peek() == Reader::encoding::eof()) - return {{}, cp_error::eof, reader.position()}; + return {{}, cp_error::eof, reader.current()}; auto first = char16_t(reader.peek()); if ((first & ~payload1) == pattern1) { reader.bump(); if (reader.peek() == Reader::encoding::eof()) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; auto second = char16_t(reader.peek()); if ((second & ~payload2) != pattern2) - return {{}, cp_error::missing_trailing, reader.position()}; + return {{}, cp_error::missing_trailing, reader.current()}; reader.bump(); // We've got a valid code point. @@ -297,34 +297,34 @@ constexpr cp_result parse_code_point(Reader reader) result <<= 10; result |= char32_t(second & payload2); result |= 0x10000; - return {result, cp_error::success, reader.position()}; + return {result, cp_error::success, reader.current()}; } else if ((first & ~payload2) == pattern2) { - return {{}, cp_error::leads_with_trailing, reader.position()}; + return {{}, cp_error::leads_with_trailing, reader.current()}; } else { // Single code unit code point; always valid. reader.bump(); - return {first, cp_error::success, reader.position()}; + return {first, cp_error::success, reader.current()}; } } else if constexpr (std::is_same_v) { if (reader.peek() == Reader::encoding::eof()) - return {{}, cp_error::eof, reader.position()}; + return {{}, cp_error::eof, reader.current()}; auto cur = reader.peek(); reader.bump(); auto cp = cur; if (cp > 0x10'FFFF) - return {cp, cp_error::out_of_range, reader.position()}; + return {cp, cp_error::out_of_range, reader.current()}; else if (0xD800 <= cp && cp <= 0xDFFF) - return {cp, cp_error::surrogate, reader.position()}; + return {cp, cp_error::surrogate, reader.current()}; else - return {cp, cp_error::success, reader.position()}; + return {cp, cp_error::success, reader.current()}; } else { @@ -341,7 +341,7 @@ constexpr void recover_code_point(Reader& reader, cp_result result) { case cp_error::success: // Consume the entire code point. - reader.set_position(result.end); + reader.reset(result.end); break; case cp_error::eof: // We don't need to do anything to "recover" from EOF. @@ -349,7 +349,7 @@ constexpr void recover_code_point(Reader& reader, cp_result result) case cp_error::leads_with_trailing: // Invalid code unit, consume to recover. - LEXY_PRECONDITION(result.end == reader.position()); + LEXY_PRECONDITION(result.end.position() == reader.position()); reader.bump(); break; @@ -358,7 +358,7 @@ constexpr void recover_code_point(Reader& reader, cp_result result) case cp_error::out_of_range: case cp_error::overlong_sequence: // Consume all the invalid code units to recover. - reader.set_position(result.end); + reader.reset(result.end); break; } } diff --git a/3rdparty/lexy/include/lexy/_detail/config.hpp b/3rdparty/lexy/include/lexy/_detail/config.hpp index 0b57dfc5a..4aa40135b 100644 --- a/3rdparty/lexy/include/lexy/_detail/config.hpp +++ b/3rdparty/lexy/include/lexy/_detail/config.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_CONFIG_HPP_INCLUDED @@ -17,6 +17,14 @@ # endif #endif +#ifndef LEXY_HAS_UNICODE_DATABASE +# define LEXY_HAS_UNICODE_DATABASE 0 +#endif + +#ifndef LEXY_EXPERIMENTAL +# define LEXY_EXPERIMENTAL 0 +#endif + //=== utility traits===// #define LEXY_MOV(...) static_cast&&>(__VA_ARGS__) #define LEXY_FWD(...) static_cast(__VA_ARGS__) @@ -90,6 +98,21 @@ using type_or = std::conditional_t, Fallback, T>; # define LEXY_CONSTEVAL constexpr #endif +//=== constexpr ===// +#ifndef LEXY_HAS_CONSTEXPR_DTOR +# if __cpp_constexpr_dynamic_alloc +# define LEXY_HAS_CONSTEXPR_DTOR 1 +# else +# define LEXY_HAS_CONSTEXPR_DTOR 0 +# endif +#endif + +#if LEXY_HAS_CONSTEXPR_DTOR +# define LEXY_CONSTEXPR_DTOR constexpr +#else +# define LEXY_CONSTEXPR_DTOR +#endif + //=== char8_t ===// #ifndef LEXY_HAS_CHAR8_T # if __cpp_char8_t diff --git a/3rdparty/lexy/include/lexy/_detail/detect.hpp b/3rdparty/lexy/include/lexy/_detail/detect.hpp index 8174805c4..7534c44c4 100644 --- a/3rdparty/lexy/include/lexy/_detail/detect.hpp +++ b/3rdparty/lexy/include/lexy/_detail/detect.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_DETECT_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/integer_sequence.hpp b/3rdparty/lexy/include/lexy/_detail/integer_sequence.hpp index bc8cba6e1..36e3cb08c 100644 --- a/3rdparty/lexy/include/lexy/_detail/integer_sequence.hpp +++ b/3rdparty/lexy/include/lexy/_detail/integer_sequence.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_INTEGER_SEQUENCE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/invoke.hpp b/3rdparty/lexy/include/lexy/_detail/invoke.hpp index bc683c9bc..8604e582e 100644 --- a/3rdparty/lexy/include/lexy/_detail/invoke.hpp +++ b/3rdparty/lexy/include/lexy/_detail/invoke.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_INVOKE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/iterator.hpp b/3rdparty/lexy/include/lexy/_detail/iterator.hpp index 9d3f46dd9..42ec995a4 100644 --- a/3rdparty/lexy/include/lexy/_detail/iterator.hpp +++ b/3rdparty/lexy/include/lexy/_detail/iterator.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_ITERATOR_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/lazy_init.hpp b/3rdparty/lexy/include/lexy/_detail/lazy_init.hpp index 0a08ebcbb..29fcfa308 100644 --- a/3rdparty/lexy/include/lexy/_detail/lazy_init.hpp +++ b/3rdparty/lexy/include/lexy/_detail/lazy_init.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_LAZY_INIT_HPP_INCLUDED @@ -6,6 +6,7 @@ #include #include +#include namespace lexy::_detail { @@ -25,6 +26,12 @@ struct _lazy_init_storage_trivial constexpr _lazy_init_storage_trivial(int, Args&&... args) : _init(true), _value(LEXY_FWD(args)...) {} + + template + constexpr void _construct(Args&&... args) + { + *this = _lazy_init_storage_trivial(0, LEXY_FWD(args)...); + } }; template @@ -40,24 +47,29 @@ struct _lazy_init_storage_non_trivial constexpr _lazy_init_storage_non_trivial() noexcept : _init(false), _empty() {} template - constexpr _lazy_init_storage_non_trivial(int, Args&&... args) - : _init(true), _value(LEXY_FWD(args)...) - {} + LEXY_CONSTEXPR_DTOR void _construct(Args&&... args) + { + _detail::construct_at(&_value, LEXY_FWD(args)...); + _init = true; + } - ~_lazy_init_storage_non_trivial() noexcept + // Cannot add noexcept due to https://github.com/llvm/llvm-project/issues/59854. + LEXY_CONSTEXPR_DTOR ~_lazy_init_storage_non_trivial() /* noexcept */ { if (_init) _value.~T(); } - _lazy_init_storage_non_trivial(_lazy_init_storage_non_trivial&& other) noexcept + LEXY_CONSTEXPR_DTOR _lazy_init_storage_non_trivial( + _lazy_init_storage_non_trivial&& other) noexcept : _init(other._init), _empty() { if (_init) - ::new (static_cast(&_value)) T(LEXY_MOV(other._value)); + _detail::construct_at(&_value, LEXY_MOV(other._value)); } - _lazy_init_storage_non_trivial& operator=(_lazy_init_storage_non_trivial&& other) noexcept + LEXY_CONSTEXPR_DTOR _lazy_init_storage_non_trivial& operator=( + _lazy_init_storage_non_trivial&& other) noexcept { if (_init && other._init) _value = LEXY_MOV(other._value); @@ -68,7 +80,7 @@ struct _lazy_init_storage_non_trivial } else if (!_init && other._init) { - ::new (static_cast(&_value)) T(LEXY_MOV(other._value)); + _detail::construct_at(&_value, LEXY_MOV(other._value)); _init = true; } else @@ -104,9 +116,11 @@ class lazy_init : _lazy_init_storage template constexpr T& emplace(Args&&... args) { - LEXY_PRECONDITION(!*this); + if (*this) + this->_value = T(LEXY_FWD(args)...); + else + this->_construct(LEXY_FWD(args)...); - *this = lazy_init(0, LEXY_FWD(args)...); return this->_value; } @@ -169,7 +183,6 @@ class lazy_init constexpr T& emplace(T& ref) { - LEXY_PRECONDITION(!*this); _ptr = &ref; return ref; } @@ -210,7 +223,6 @@ class lazy_init constexpr void emplace() { - LEXY_PRECONDITION(!*this); _init = true; } template diff --git a/3rdparty/lexy/include/lexy/_detail/memory_resource.hpp b/3rdparty/lexy/include/lexy/_detail/memory_resource.hpp index bc651d208..324a96c84 100644 --- a/3rdparty/lexy/include/lexy/_detail/memory_resource.hpp +++ b/3rdparty/lexy/include/lexy/_detail/memory_resource.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_MEMORY_RESOURCE_HPP_INCLUDED @@ -29,7 +29,7 @@ class default_memory_resource static void* allocate(std::size_t bytes, std::size_t alignment) { if (alignment > __STDCPP_DEFAULT_NEW_ALIGNMENT__) - return ::operator new (bytes, std::align_val_t{alignment}); + return ::operator new(bytes, std::align_val_t{alignment}); else return ::operator new(bytes); } @@ -47,14 +47,14 @@ class default_memory_resource #ifdef __cpp_sized_deallocation if (alignment > __STDCPP_DEFAULT_NEW_ALIGNMENT__) - ::operator delete (ptr, bytes, std::align_val_t{alignment}); + ::operator delete(ptr, bytes, std::align_val_t{alignment}); else ::operator delete(ptr, bytes); #else (void)bytes; if (alignment > __STDCPP_DEFAULT_NEW_ALIGNMENT__) - ::operator delete (ptr, std::align_val_t{alignment}); + ::operator delete(ptr, std::align_val_t{alignment}); else ::operator delete(ptr); #endif @@ -140,9 +140,8 @@ using memory_resource_ptr _memory_resource_ptr>>; // clang-format on -template || std::is_empty_v>> +template + || std::is_empty_v>> constexpr MemoryResource* get_memory_resource() { return nullptr; diff --git a/3rdparty/lexy/include/lexy/_detail/nttp_string.hpp b/3rdparty/lexy/include/lexy/_detail/nttp_string.hpp index 5ef586cd2..7301914a8 100644 --- a/3rdparty/lexy/include/lexy/_detail/nttp_string.hpp +++ b/3rdparty/lexy/include/lexy/_detail/nttp_string.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_NTTP_STRING_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/stateless_lambda.hpp b/3rdparty/lexy/include/lexy/_detail/stateless_lambda.hpp index 75ac77fe1..63c8dc7e5 100644 --- a/3rdparty/lexy/include/lexy/_detail/stateless_lambda.hpp +++ b/3rdparty/lexy/include/lexy/_detail/stateless_lambda.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_STATELESS_LAMBDA_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/std.hpp b/3rdparty/lexy/include/lexy/_detail/std.hpp index f6525bd0b..bb3381f08 100644 --- a/3rdparty/lexy/include/lexy/_detail/std.hpp +++ b/3rdparty/lexy/include/lexy/_detail/std.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_STD_HPP_INCLUDED @@ -6,6 +6,7 @@ #include +//=== iterator tags ===// #if defined(__GLIBCXX__) namespace std @@ -35,5 +36,63 @@ struct bidirectional_iterator_tag; #endif +//=== (constexpr) construct_at ===// +#if !LEXY_HAS_CONSTEXPR_DTOR + +namespace lexy::_detail +{ +// We don't have constexpr dtor's, so this is just a regular function. +template +T* construct_at(T* ptr, Args&&... args) +{ + return ::new ((void*)ptr) T(LEXY_FWD(args)...); +} +} // namespace lexy::_detail + +#elif defined(_MSC_VER) + +namespace lexy::_detail +{ +// MSVC can make it constexpr if marked with an attribute given by a macro. +template +constexpr T* construct_at(T* ptr, Args&&... args) +{ +# if defined(_MSVC_CONSTEXPR) + _MSVC_CONSTEXPR +# endif + return ::new ((void*)ptr) T(LEXY_FWD(args)...); +} +} // namespace lexy::_detail + +#else + +namespace lexy::_detail +{ +struct _construct_at_tag +{}; +} // namespace lexy::_detail + +namespace std +{ +// GCC only allows constexpr placement new inside a function called `std::construct_at`. +// So we write our own. +template +constexpr T* construct_at(lexy::_detail::_construct_at_tag, T* ptr, Args&&... args) +{ + return ::new ((void*)ptr) T(LEXY_FWD(args)...); +} +} // namespace std + +namespace lexy::_detail +{ +template +constexpr T* construct_at(T* ptr, Args&&... args) +{ + return std::construct_at(lexy::_detail::_construct_at_tag{}, ptr, LEXY_FWD(args)...); +} +} // namespace lexy::_detail + +#endif + #endif // LEXY_DETAIL_STD_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/string_view.hpp b/3rdparty/lexy/include/lexy/_detail/string_view.hpp index 1ee3fa015..41d42bc42 100644 --- a/3rdparty/lexy/include/lexy/_detail/string_view.hpp +++ b/3rdparty/lexy/include/lexy/_detail/string_view.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_STRING_VIEW_HPP_INCLUDED @@ -136,10 +136,18 @@ class basic_string_view } } - constexpr bool starts_with(basic_string_view prefix) const + constexpr bool starts_with(basic_string_view prefix) const noexcept { return substr(0, prefix.size()) == prefix; } + constexpr bool try_remove_prefix(basic_string_view prefix) noexcept + { + if (!starts_with(prefix)) + return false; + + remove_prefix(prefix.length()); + return true; + } constexpr std::size_t find(basic_string_view str, std::size_t pos = 0) const noexcept { diff --git a/3rdparty/lexy/include/lexy/_detail/swar.hpp b/3rdparty/lexy/include/lexy/_detail/swar.hpp index 6d8b5e6d6..d7734d0f4 100644 --- a/3rdparty/lexy/include/lexy/_detail/swar.hpp +++ b/3rdparty/lexy/include/lexy/_detail/swar.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_SWAR_HPP_INCLUDED @@ -222,13 +222,13 @@ class swar_reader_base : _swar_base { auto ptr = static_cast(*this).position(); ptr += swar_length; - static_cast(*this).set_position(ptr); + static_cast(*this).reset({ptr}); } void bump_swar(std::size_t char_count) { auto ptr = static_cast(*this).position(); ptr += char_count; - static_cast(*this).set_position(ptr); + static_cast(*this).reset({ptr}); } }; diff --git a/3rdparty/lexy/include/lexy/_detail/tuple.hpp b/3rdparty/lexy/include/lexy/_detail/tuple.hpp index bb8eb86d2..b9c7ebf55 100644 --- a/3rdparty/lexy/include/lexy/_detail/tuple.hpp +++ b/3rdparty/lexy/include/lexy/_detail/tuple.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_TUPLE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/_detail/type_name.hpp b/3rdparty/lexy/include/lexy/_detail/type_name.hpp index 0dd6f652c..8e7ffe250 100644 --- a/3rdparty/lexy/include/lexy/_detail/type_name.hpp +++ b/3rdparty/lexy/include/lexy/_detail/type_name.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DETAIL_TYPE_NAME_HPP_INCLUDED @@ -29,6 +29,7 @@ constexpr auto _full_type_name() auto function = string_view(__PRETTY_FUNCTION__); function.remove_prefix(prefix.length()); function.remove_suffix(suffix.length()); + function.try_remove_prefix("(anonymous namespace)::"); return function; #elif defined(__GNUC__) @@ -46,6 +47,7 @@ constexpr auto _full_type_name() auto function = string_view(__PRETTY_FUNCTION__); function.remove_prefix(prefix.length()); function.remove_suffix(suffix.length()); + function.try_remove_prefix("{anonymous}::"); return function; #elif defined(_MSC_VER) @@ -58,12 +60,8 @@ constexpr auto _full_type_name() auto function = string_view(__FUNCSIG__); function.remove_prefix(prefix.length()); function.remove_suffix(suffix.length()); - - if (auto s = string_view("struct "); function.starts_with(s)) - function.remove_prefix(s.length()); - else if (auto c = string_view("class "); function.starts_with(c)) - function.remove_prefix(c.length()); - + function.try_remove_prefix("struct ") || function.try_remove_prefix("class "); + function.try_remove_prefix("`anonymous-namespace'::"); return function; #else diff --git a/3rdparty/lexy/include/lexy/action/base.hpp b/3rdparty/lexy/include/lexy/action/base.hpp index b74f9e1ce..84cc15f2d 100644 --- a/3rdparty/lexy/include/lexy/action/base.hpp +++ b/3rdparty/lexy/include/lexy/action/base.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ACTION_BASE_HPP_INCLUDED @@ -200,6 +200,7 @@ constexpr void* no_parse_state = nullptr; template constexpr auto _do_action(_pc& context, Reader& reader) { + context.on(parse_events::grammar_start{}, reader.position()); context.on(parse_events::production_start{}, reader.position()); // We parse whitespace, theen the rule, then finish. @@ -209,9 +210,15 @@ constexpr auto _do_action(_pc& context, Reader& read auto rule_result = parser::parse(context, reader); if (rule_result) + { context.on(parse_events::production_finish{}, reader.position()); + context.on(parse_events::grammar_finish{}, reader); + } else + { context.on(parse_events::production_cancel{}, reader.position()); + context.on(parse_events::grammar_cancel{}, reader); + } return rule_result; } diff --git a/3rdparty/lexy/include/lexy/action/match.hpp b/3rdparty/lexy/include/lexy/action/match.hpp index ca885848b..dade7042c 100644 --- a/3rdparty/lexy/include/lexy/action/match.hpp +++ b/3rdparty/lexy/include/lexy/action/match.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ACTION_MATCH_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/action/parse.hpp b/3rdparty/lexy/include/lexy/action/parse.hpp index a5cd9a2ac..ec4241590 100644 --- a/3rdparty/lexy/include/lexy/action/parse.hpp +++ b/3rdparty/lexy/include/lexy/action/parse.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ACTION_PARSE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/action/parse_as_tree.hpp b/3rdparty/lexy/include/lexy/action/parse_as_tree.hpp index dbf865330..b2f54d2c0 100644 --- a/3rdparty/lexy/include/lexy/action/parse_as_tree.hpp +++ b/3rdparty/lexy/include/lexy/action/parse_as_tree.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ACTION_PARSE_AS_TREE_HPP_INCLUDED @@ -18,7 +18,7 @@ class _pth template explicit _pth(Tree& tree, const _detail::any_holder& input, _detail::any_holder& sink) - : _tree(&tree), _depth(0), _validate(input, sink), _reader(input.get()->reader()) + : _tree(&tree), _depth(0), _validate(input, sink) {} class event_handler @@ -28,40 +28,53 @@ class _pth public: event_handler(production_info info) : _validate(info) {} + void on(_pth& handler, parse_events::grammar_start, iterator) + { + LEXY_PRECONDITION(handler._depth == 0); + + handler._builder.emplace(LEXY_MOV(*handler._tree), _validate.get_info()); + } + void on(_pth& handler, parse_events::grammar_finish, Reader& reader) + { + LEXY_PRECONDITION(handler._depth == 0); + + auto begin = reader.position(); + lexy::try_match_token(dsl::any, reader); + auto end = reader.position(); + + *handler._tree = LEXY_MOV(*handler._builder).finish({begin, end}); + } + void on(_pth& handler, parse_events::grammar_cancel, Reader&) + { + LEXY_PRECONDITION(handler._depth == 0); + + handler._tree->clear(); + } + void on(_pth& handler, parse_events::production_start ev, iterator pos) { - if (handler._depth++ == 0) - handler._builder.emplace(LEXY_MOV(*handler._tree), _validate.get_info()); - else + if (handler._depth++ > 0) _marker = handler._builder->start_production(_validate.get_info()); _validate.on(handler._validate, ev, pos); } - void on(_pth& handler, parse_events::production_finish, iterator pos) + void on(_pth& handler, parse_events::production_finish ev, iterator pos) { - if (--handler._depth == 0) - { - auto reader = handler._reader; - reader.set_position(pos); - lexy::try_match_token(dsl::any, reader); - auto end = reader.position(); - - *handler._tree = LEXY_MOV(*handler._builder).finish({pos, end}); - } - else + if (--handler._depth > 0) { + if (handler._builder->current_child_count() == 0) + handler._builder->token(lexy::position_token_kind, _validate.production_begin(), + _validate.production_begin()); handler._builder->finish_production(LEXY_MOV(_marker)); } + + _validate.on(handler._validate, ev, pos); } - void on(_pth& handler, parse_events::production_cancel, iterator pos) + void on(_pth& handler, parse_events::production_cancel ev, iterator pos) { - if (--handler._depth == 0) - { - handler._tree->clear(); - } - else + if (--handler._depth > 0) { // Cancelling the production removes all nodes from the tree. // To ensure that the parse tree remains lossless, we add everything consumed by it @@ -69,6 +82,8 @@ class _pth handler._builder->cancel_production(LEXY_MOV(_marker)); handler._builder->token(lexy::error_token_kind, _validate.production_begin(), pos); } + + _validate.on(handler._validate, ev, pos); } auto on(_pth& handler, lexy::parse_events::operation_chain_start, iterator) @@ -130,7 +145,6 @@ class _pth int _depth; _vh _validate; - Reader _reader; }; template _input; - _detail::any_holder<_error_sink_t> _sink; - _detail::parse_context_control_block<_handler> _cb; - _pc<_handler, State, _production> _context; + _detail::any_holder _input; + _detail::any_holder<_error_sink_t> _sink; + _detail::parse_context_control_block<_handler, State> _cb; + _pc<_handler, State, _production> _context; friend _impl; }; diff --git a/3rdparty/lexy/include/lexy/action/trace.hpp b/3rdparty/lexy/include/lexy/action/trace.hpp index 84417d854..1a38fac64 100644 --- a/3rdparty/lexy/include/lexy/action/trace.hpp +++ b/3rdparty/lexy/include/lexy/action/trace.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ACTION_TRACE_HPP_INCLUDED @@ -319,6 +319,10 @@ class _th public: constexpr event_handler(production_info info) : _info(info) {} + void on(_th&, parse_events::grammar_start, iterator) {} + void on(_th&, parse_events::grammar_finish, lexy::input_reader&) {} + void on(_th&, parse_events::grammar_cancel, lexy::input_reader&) {} + void on(_th& handler, parse_events::production_start, iterator pos) { auto loc = handler.get_location(pos); diff --git a/3rdparty/lexy/include/lexy/action/validate.hpp b/3rdparty/lexy/include/lexy/action/validate.hpp index f3644fa30..ac6b48f0e 100644 --- a/3rdparty/lexy/include/lexy/action/validate.hpp +++ b/3rdparty/lexy/include/lexy/action/validate.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ACTION_VALIDATE_HPP_INCLUDED @@ -196,27 +196,31 @@ class _vh handler._top = _prev; } - template - constexpr void on(_vh& handler, parse_events::error, const error& error) + template + constexpr void on(_vh& handler, parse_events::error, const error& error) { handler._cb.generic(handler._cb.sink, get_info(), handler._cb.input, _begin, error); } - constexpr void on(_vh& handler, parse_events::error, const error& error) + template + constexpr void on(_vh& handler, parse_events::error, const error& error) { handler._cb.generic(handler._cb.sink, get_info(), handler._cb.input, _begin, error); } - constexpr void on(_vh& handler, parse_events::error, - const error& error) + template + constexpr void on(_vh& handler, parse_events::error, + const error& error) { handler._cb.literal(handler._cb.sink, get_info(), handler._cb.input, _begin, error); } - constexpr void on(_vh& handler, parse_events::error, - const error& error) + template + constexpr void on(_vh& handler, parse_events::error, + const error& error) { handler._cb.keyword(handler._cb.sink, get_info(), handler._cb.input, _begin, error); } - constexpr void on(_vh& handler, parse_events::error, - const error& error) + template + constexpr void on(_vh& handler, parse_events::error, + const error& error) { handler._cb.char_class(handler._cb.sink, get_info(), handler._cb.input, _begin, error); } diff --git a/3rdparty/lexy/include/lexy/callback.hpp b/3rdparty/lexy/include/lexy/callback.hpp index 237ebe316..fa65f3ebb 100644 --- a/3rdparty/lexy/include/lexy/callback.hpp +++ b/3rdparty/lexy/include/lexy/callback.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/adapter.hpp b/3rdparty/lexy/include/lexy/callback/adapter.hpp index 29b2750d6..b212fd2ab 100644 --- a/3rdparty/lexy/include/lexy/callback/adapter.hpp +++ b/3rdparty/lexy/include/lexy/callback/adapter.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_ADAPTER_HPP_INCLUDED @@ -16,6 +16,33 @@ struct _callback : _overloaded constexpr explicit _callback(Fns... fns) : _overloaded(LEXY_MOV(fns)...) {} }; +template +struct _callback_with_state : _overloaded +{ + using return_type = ReturnType; + + template + struct _with_state + { + const _callback_with_state& _cb; + State& _state; + + template + constexpr return_type operator()(Args&&... args) const&& + { + return _cb(_state, LEXY_FWD(args)...); + } + }; + + constexpr explicit _callback_with_state(Fns... fns) : _overloaded(LEXY_MOV(fns)...) {} + + template + constexpr auto operator[](State& state) const + { + return _with_state{*this, state}; + } +}; + /// Creates a callback. template constexpr auto callback(Fns&&... fns) @@ -26,14 +53,28 @@ constexpr auto callback(Fns&&... fns) else return _callback...>(LEXY_FWD(fns)...); } - -/// Creates a callback. template constexpr auto callback(Fns&&... fns) { return _callback...>(LEXY_FWD(fns)...); } +/// Creates a callback that also receives the parse state. +template +constexpr auto callback_with_state(Fns&&... fns) +{ + if constexpr ((lexy::is_callback> && ...)) + return _callback_with_state::return_type...>, + std::decay_t...>(LEXY_FWD(fns)...); + else + return _callback_with_state...>(LEXY_FWD(fns)...); +} +template +constexpr auto callback_with_state(Fns&&... fns) +{ + return _callback_with_state...>(LEXY_FWD(fns)...); +} + template struct _cb_from_sink { diff --git a/3rdparty/lexy/include/lexy/callback/aggregate.hpp b/3rdparty/lexy/include/lexy/callback/aggregate.hpp index fbb04688d..97420f146 100644 --- a/3rdparty/lexy/include/lexy/callback/aggregate.hpp +++ b/3rdparty/lexy/include/lexy/callback/aggregate.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_AGGREGATE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/base.hpp b/3rdparty/lexy/include/lexy/callback/base.hpp index 8edaa86a5..915afdd7d 100644 --- a/3rdparty/lexy/include/lexy/callback/base.hpp +++ b/3rdparty/lexy/include/lexy/callback/base.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_BASE_HPP_INCLUDED @@ -27,6 +27,13 @@ template constexpr bool is_callback_state = _detail::is_detected<_detect_callback_state, T, std::decay_t>; +template +using _detect_callback_with_state_for + = decltype(LEXY_DECLVAL(const T)[LEXY_DECLVAL(State&)](LEXY_DECLVAL(Args)...)); +template +constexpr bool is_callback_with_state_for + = _detail::is_detected<_detect_callback_with_state_for, std::decay_t, State, Args...>; + /// Returns the type of the `.sink()` function. template using sink_callback = decltype(LEXY_DECLVAL(Sink).sink(LEXY_DECLVAL(Args)...)); @@ -82,4 +89,3 @@ constexpr auto _make_overloaded(Op&&... op) } // namespace lexy #endif // LEXY_CALLBACK_BASE_HPP_INCLUDED - diff --git a/3rdparty/lexy/include/lexy/callback/bind.hpp b/3rdparty/lexy/include/lexy/callback/bind.hpp index 520e62853..a73851ad4 100644 --- a/3rdparty/lexy/include/lexy/callback/bind.hpp +++ b/3rdparty/lexy/include/lexy/callback/bind.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_BIND_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/bit_cast.hpp b/3rdparty/lexy/include/lexy/callback/bit_cast.hpp index dbb488f01..9401c00f5 100644 --- a/3rdparty/lexy/include/lexy/callback/bit_cast.hpp +++ b/3rdparty/lexy/include/lexy/callback/bit_cast.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_BIT_CAST_HPP_INCLUDED @@ -16,7 +16,7 @@ #ifndef LEXY_HAS_BITCAST # if defined(__has_include) -# if __has_include() +# if __has_include() && __cplusplus >= 202002L # include # ifdef __cpp_lib_bit_cast # define LEXY_HAS_BITCAST 1 diff --git a/3rdparty/lexy/include/lexy/callback/composition.hpp b/3rdparty/lexy/include/lexy/callback/composition.hpp index b072b4dd6..0936c541b 100644 --- a/3rdparty/lexy/include/lexy/callback/composition.hpp +++ b/3rdparty/lexy/include/lexy/callback/composition.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_COMPOSITION_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/constant.hpp b/3rdparty/lexy/include/lexy/callback/constant.hpp index 0421edc78..4ffbd61ca 100644 --- a/3rdparty/lexy/include/lexy/callback/constant.hpp +++ b/3rdparty/lexy/include/lexy/callback/constant.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_CONSTANT_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/container.hpp b/3rdparty/lexy/include/lexy/callback/container.hpp index d35dbf32c..a96c64bab 100644 --- a/3rdparty/lexy/include/lexy/callback/container.hpp +++ b/3rdparty/lexy/include/lexy/callback/container.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_CONTAINER_HPP_INCLUDED @@ -16,7 +16,7 @@ template constexpr auto _has_reserve = _detail::is_detected<_detect_reserve, Container>; template -using _detect_append = decltype(LEXY_DECLVAL(Container&).append(LEXY_DECLVAL(Container &&))); +using _detect_append = decltype(LEXY_DECLVAL(Container&).append(LEXY_DECLVAL(Container&&))); template constexpr auto _has_append = _detail::is_detected<_detect_append, Container>; } // namespace lexy @@ -32,18 +32,19 @@ struct _list_sink using return_type = Container; template - auto operator()(U&& obj) -> decltype(LEXY_DECLVAL(C&).push_back(LEXY_FWD(obj))) + constexpr auto operator()(U&& obj) -> decltype(LEXY_DECLVAL(C&).push_back(LEXY_FWD(obj))) { return _result.push_back(LEXY_FWD(obj)); } template - auto operator()(Args&&... args) -> decltype(LEXY_DECLVAL(C&).emplace_back(LEXY_FWD(args)...)) + constexpr auto operator()(Args&&... args) + -> decltype(LEXY_DECLVAL(C&).emplace_back(LEXY_FWD(args)...)) { return _result.emplace_back(LEXY_FWD(args)...); } - Container&& finish() && + constexpr Container&& finish() && { return LEXY_MOV(_result); } @@ -171,18 +172,19 @@ struct _collection_sink using return_type = Container; template - auto operator()(U&& obj) -> decltype(LEXY_DECLVAL(C&).insert(LEXY_FWD(obj))) + constexpr auto operator()(U&& obj) -> decltype(LEXY_DECLVAL(C&).insert(LEXY_FWD(obj))) { return _result.insert(LEXY_FWD(obj)); } template - auto operator()(Args&&... args) -> decltype(LEXY_DECLVAL(C&).emplace(LEXY_FWD(args)...)) + constexpr auto operator()(Args&&... args) + -> decltype(LEXY_DECLVAL(C&).emplace(LEXY_FWD(args)...)) { return _result.emplace(LEXY_FWD(args)...); } - Container&& finish() && + constexpr Container&& finish() && { return LEXY_MOV(_result); } @@ -355,7 +357,7 @@ struct _concat using return_type = Container; - void operator()(Container&& container) + constexpr void operator()(Container&& container) { if (_result.empty()) { @@ -389,7 +391,7 @@ struct _concat } } - Container&& finish() && + constexpr Container&& finish() && { return LEXY_MOV(_result); } diff --git a/3rdparty/lexy/include/lexy/callback/fold.hpp b/3rdparty/lexy/include/lexy/callback/fold.hpp index 478bc5482..b58e80dc7 100644 --- a/3rdparty/lexy/include/lexy/callback/fold.hpp +++ b/3rdparty/lexy/include/lexy/callback/fold.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_FOLD_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/forward.hpp b/3rdparty/lexy/include/lexy/callback/forward.hpp index f4342816c..2655af45b 100644 --- a/3rdparty/lexy/include/lexy/callback/forward.hpp +++ b/3rdparty/lexy/include/lexy/callback/forward.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_FORWARD_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/integer.hpp b/3rdparty/lexy/include/lexy/callback/integer.hpp index de2f94cdc..ab7500210 100644 --- a/3rdparty/lexy/include/lexy/callback/integer.hpp +++ b/3rdparty/lexy/include/lexy/callback/integer.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_INTEGER_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/noop.hpp b/3rdparty/lexy/include/lexy/callback/noop.hpp index 03485d9e3..a36e0dc30 100644 --- a/3rdparty/lexy/include/lexy/callback/noop.hpp +++ b/3rdparty/lexy/include/lexy/callback/noop.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_NOOP_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/object.hpp b/3rdparty/lexy/include/lexy/callback/object.hpp index a99dfbf3f..eb87e841d 100644 --- a/3rdparty/lexy/include/lexy/callback/object.hpp +++ b/3rdparty/lexy/include/lexy/callback/object.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_OBJECT_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/callback/string.hpp b/3rdparty/lexy/include/lexy/callback/string.hpp index 954c56e14..984f1e1ba 100644 --- a/3rdparty/lexy/include/lexy/callback/string.hpp +++ b/3rdparty/lexy/include/lexy/callback/string.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CALLBACK_STRING_HPP_INCLUDED @@ -149,39 +149,39 @@ struct _as_string using return_type = String; template - void operator()(CharT c) + constexpr void operator()(CharT c) { _result.push_back(c); } - void operator()(String&& str) + constexpr void operator()(String&& str) { _result.append(LEXY_MOV(str)); } template - auto operator()(Iterator begin, Iterator end) + constexpr auto operator()(Iterator begin, Iterator end) -> decltype(void(LEXY_DECLVAL(Str).append(begin, end))) { _result.append(begin, end); } template - void operator()(lexeme lex) + constexpr void operator()(lexeme lex) { static_assert(lexy::char_type_compatible_with_reader, "cannot convert lexeme to this string type"); _result.append(lex.begin(), lex.end()); } - void operator()(code_point cp) + constexpr void operator()(code_point cp) { typename Encoding::char_type buffer[4] = {}; auto size = _detail::encode_code_point(cp.value(), buffer, 4); _result.append(buffer, buffer + size); } - String&& finish() && + constexpr String&& finish() && { return _case_folding(LEXY_MOV(_result)); } diff --git a/3rdparty/lexy/include/lexy/code_point.hpp b/3rdparty/lexy/include/lexy/code_point.hpp index d77d73fbd..a67b3bdbe 100644 --- a/3rdparty/lexy/include/lexy/code_point.hpp +++ b/3rdparty/lexy/include/lexy/code_point.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_CODE_POINT_HPP_INCLUDED @@ -8,10 +8,6 @@ #include #include -#ifndef LEXY_HAS_UNICODE_DATABASE -# define LEXY_HAS_UNICODE_DATABASE 0 -#endif - #if LEXY_HAS_UNICODE_DATABASE # define LEXY_UNICODE_CONSTEXPR constexpr #else @@ -89,7 +85,7 @@ class code_point LEXY_UNICODE_CATEGORY(Lo, other_letter), LEXY_UNICODE_CATEGORY(Mn, nonspacing_mark), - LEXY_UNICODE_CATEGORY(Mc, spaing_mark), + LEXY_UNICODE_CATEGORY(Mc, spacing_mark), LEXY_UNICODE_CATEGORY(Me, enclosing_mark), LEXY_UNICODE_CATEGORY(Nd, decimal_number), diff --git a/3rdparty/lexy/include/lexy/dsl.hpp b/3rdparty/lexy/include/lexy/dsl.hpp index 2cc8a21cc..b41eeceb1 100644 --- a/3rdparty/lexy/include/lexy/dsl.hpp +++ b/3rdparty/lexy/include/lexy/dsl.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_HPP_INCLUDED @@ -60,5 +60,9 @@ #include #include +#if LEXY_EXPERIMENTAL +# include +#endif + #endif // LEXY_DSL_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/any.hpp b/3rdparty/lexy/include/lexy/dsl/any.hpp index 734ec611c..2b30e39ef 100644 --- a/3rdparty/lexy/include/lexy/dsl/any.hpp +++ b/3rdparty/lexy/include/lexy/dsl/any.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_ANY_HPP_INCLUDED @@ -15,9 +15,9 @@ struct _any : token_base<_any, unconditional_branch_base> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr std::true_type try_parse(Reader reader) { @@ -32,7 +32,7 @@ struct _any : token_base<_any, unconditional_branch_base> while (reader.peek() != encoding::eof()) reader.bump(); - end = reader.position(); + end = reader.current(); return {}; } }; diff --git a/3rdparty/lexy/include/lexy/dsl/ascii.hpp b/3rdparty/lexy/include/lexy/dsl/ascii.hpp index 16613f8db..7bc237034 100644 --- a/3rdparty/lexy/include/lexy/dsl/ascii.hpp +++ b/3rdparty/lexy/include/lexy/dsl/ascii.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_ASCII_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/base.hpp b/3rdparty/lexy/include/lexy/dsl/base.hpp index 918e447a8..ea965b0b6 100644 --- a/3rdparty/lexy/include/lexy/dsl/base.hpp +++ b/3rdparty/lexy/include/lexy/dsl/base.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_BASE_HPP_INCLUDED @@ -12,6 +12,19 @@ //=== parse_events ===// namespace lexy::parse_events { +/// Parsing started. +/// Arguments: position +struct grammar_start +{}; +/// Parsing finished succesfully. +/// Arguments: the reader at the final parse position. +struct grammar_finish +{}; +/// Parsing finished unsuccesfully. +/// Arguments: the reader at the final parse position. +struct grammar_cancel +{}; + /// Start of the current production. /// Arguments: position struct production_start @@ -237,7 +250,7 @@ LEXY_FORCE_INLINE constexpr auto try_match_token(TokenRule, Reader& reader) if constexpr (std::is_same_v) { parser.try_parse(reader); - reader.set_position(parser.end); + reader.reset(parser.end); return std::true_type{}; } else if constexpr (std::is_same_v) @@ -250,7 +263,7 @@ LEXY_FORCE_INLINE constexpr auto try_match_token(TokenRule, Reader& reader) if (!parser.try_parse(reader)) return false; - reader.set_position(parser.end); + reader.reset(parser.end); return true; } } diff --git a/3rdparty/lexy/include/lexy/dsl/bits.hpp b/3rdparty/lexy/include/lexy/dsl/bits.hpp index adce85bf4..ab6d484c0 100644 --- a/3rdparty/lexy/include/lexy/dsl/bits.hpp +++ b/3rdparty/lexy/include/lexy/dsl/bits.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_BITS_HPP_INCLUDED @@ -104,13 +104,13 @@ struct _bits : token_base<_bits> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { - static_assert(std::is_same_v); + static_assert(lexy::is_byte_encoding); auto byte = reader.peek(); if (byte == Reader::encoding::eof() @@ -118,14 +118,14 @@ struct _bits : token_base<_bits> return false; reader.bump(); - end = reader.position(); + end = reader.current(); return true; } template constexpr void report_error(Context& context, const Reader&) { - auto err = lexy::error(end, "bits"); + auto err = lexy::error(end.position(), "bits"); context.on(_ev::error{}, err); } }; diff --git a/3rdparty/lexy/include/lexy/dsl/bom.hpp b/3rdparty/lexy/include/lexy/dsl/bom.hpp index e19fa7e55..a3010cc37 100644 --- a/3rdparty/lexy/include/lexy/dsl/bom.hpp +++ b/3rdparty/lexy/include/lexy/dsl/bom.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_BOM_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/brackets.hpp b/3rdparty/lexy/include/lexy/dsl/brackets.hpp index cde68ffee..7e461c929 100644 --- a/3rdparty/lexy/include/lexy/dsl/brackets.hpp +++ b/3rdparty/lexy/include/lexy/dsl/brackets.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_BRACKETS_HPP_INCLUDED @@ -100,7 +100,8 @@ struct _brackets template constexpr auto brackets(Open, Close) { - static_assert(lexy::is_branch_rule && lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Open, "brackets()"); + LEXY_REQUIRE_BRANCH_RULE(Close, "brackets()"); return _brackets{}; } diff --git a/3rdparty/lexy/include/lexy/dsl/branch.hpp b/3rdparty/lexy/include/lexy/dsl/branch.hpp index 2156c9d75..1bbd7fd18 100644 --- a/3rdparty/lexy/include/lexy/dsl/branch.hpp +++ b/3rdparty/lexy/include/lexy/dsl/branch.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_BRANCH_HPP_INCLUDED @@ -31,19 +31,19 @@ struct _br : _copy_base template constexpr auto operator>>(Condition, Then) { - static_assert(lexy::is_branch_rule, "condition must be a branch"); + LEXY_REQUIRE_BRANCH_RULE(Condition, "Left-hand-side of >>"); return _br{}; } template constexpr auto operator>>(Condition, _seq) { - static_assert(lexy::is_branch_rule, "condition must be a branch"); + LEXY_REQUIRE_BRANCH_RULE(Condition, "Left-hand-side of >>"); return _br{}; } template constexpr auto operator>>(Condition, _br) { - static_assert(lexy::is_branch_rule, "condition must be a branch"); + LEXY_REQUIRE_BRANCH_RULE(Condition, "Left-hand-side of >>"); return _br{}; } diff --git a/3rdparty/lexy/include/lexy/dsl/byte.hpp b/3rdparty/lexy/include/lexy/dsl/byte.hpp index 804dba1be..32df8597b 100644 --- a/3rdparty/lexy/include/lexy/dsl/byte.hpp +++ b/3rdparty/lexy/include/lexy/dsl/byte.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_BYTE_HPP_INCLUDED @@ -7,58 +7,140 @@ #include #include #include +#include #include //=== byte ===// namespace lexyd { -template -struct _b : token_base<_b> +template +struct _b : token_base<_b> { static_assert(N > 0); + static constexpr bool _match(lexy::byte_encoding::int_type cur) + { + if (cur == lexy::byte_encoding::eof()) + return false; + + if constexpr (!std::is_void_v) + { + constexpr auto predicate = Predicate{}; + return predicate(static_cast(cur)); + } + else + { + return true; + } + } + template > struct tp; + template struct tp> { - typename Reader::iterator end; + static_assert(lexy::is_byte_encoding); + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { - static_assert(std::is_same_v); - // Bump N times. - auto result = ((reader.peek() == Reader::encoding::eof() ? ((void)Idx, false) - : (reader.bump(), true)) - && ...); - end = reader.position(); + auto result + = ((_match(reader.peek()) ? (reader.bump(), true) : ((void)Idx, false)) && ...); + end = reader.current(); return result; } template constexpr void report_error(Context& context, const Reader&) { - auto err = lexy::error(end, "byte"); + constexpr auto name + = std::is_void_v ? "byte" : lexy::_detail::type_name(); + auto err = lexy::error(end.position(), name); context.on(_ev::error{}, err); } }; + + //=== dsl ===// + template + constexpr auto if_() const + { + static_assert(std::is_void_v); + return _b{}; + } + + template + constexpr auto range() const + { + struct predicate + { + static LEXY_CONSTEVAL auto name() + { + return "byte.range"; + } + + constexpr bool operator()(unsigned char byte) const + { + return Low <= byte && byte <= High; + } + }; + + return if_(); + } + + template + constexpr auto set() const + { + struct predicate + { + static LEXY_CONSTEVAL auto name() + { + return "byte.set"; + } + + constexpr bool operator()(unsigned char byte) const + { + return ((byte == Bytes) || ...); + } + }; + + return if_(); + } + + constexpr auto ascii() const + { + struct predicate + { + static LEXY_CONSTEVAL auto name() + { + return "byte.ASCII"; + } + + constexpr bool operator()(unsigned char byte) const + { + return byte <= 0x7F; + } + }; + + return if_(); + } }; /// Matches an arbitrary byte. -constexpr auto byte = _b<1>{}; +constexpr auto byte = _b<1, void>{}; /// Matches N arbitrary bytes. template -constexpr auto bytes = _b{}; +constexpr auto bytes = _b{}; } // namespace lexyd namespace lexy { template -constexpr auto token_kind_of> = lexy::any_token_kind; +constexpr auto token_kind_of> = lexy::any_token_kind; } // namespace lexy //=== padding bytes ===// @@ -84,13 +166,14 @@ struct _pb : branch_base template struct bp { - typename Reader::iterator end; + static_assert(lexy::is_byte_encoding); + typename Reader::marker end; constexpr auto try_parse(const void*, const Reader& reader) { - lexy::token_parser_for<_b, Reader> parser(reader); - auto result = parser.try_parse(reader); - end = parser.end; + lexy::token_parser_for<_b, Reader> parser(reader); + auto result = parser.try_parse(reader); + end = parser.end; return result; } @@ -102,10 +185,10 @@ struct _pb : branch_base LEXY_PARSER_FUNC auto finish(Context& context, Reader& reader, Args&&... args) { auto begin = reader.position(); - context.on(_ev::token{}, lexy::any_token_kind, begin, end); - reader.set_position(end); + context.on(_ev::token{}, lexy::any_token_kind, begin, end.position()); + reader.reset(end); - _validate(context, reader, begin, end); + _validate(context, reader, begin, end.position()); return lexy::whitespace_parser::parse(context, reader, LEXY_FWD(args)...); } @@ -117,8 +200,9 @@ struct _pb : branch_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_byte_encoding); auto begin = reader.position(); - if (!_b::token_parse(context, reader)) + if (!_b::token_parse(context, reader)) return false; auto end = reader.position(); @@ -167,6 +251,7 @@ auto _bint() return 0; } } + template using bint = decltype(_bint()); } // namespace lexy::_detail @@ -187,10 +272,11 @@ namespace lexyd template struct _bint : branch_base { - using _rule = lexy::_detail::type_or>; + using _rule = lexy::_detail::type_or>; template > struct _pc; + template struct _pc> { @@ -238,7 +324,8 @@ struct _bint : branch_base template struct bp { - typename Reader::iterator end; + static_assert(lexy::is_byte_encoding); + typename Reader::marker end; constexpr auto try_parse(const void*, const Reader& reader) { @@ -256,10 +343,11 @@ struct _bint : branch_base LEXY_PARSER_FUNC auto finish(Context& context, Reader& reader, Args&&... args) { auto begin = reader.position(); - context.on(_ev::token{}, _rule{}, begin, end); - reader.set_position(end); + context.on(_ev::token{}, _rule{}, begin, end.position()); + reader.reset(end); - return _pc::parse(context, reader, begin, end, LEXY_FWD(args)...); + return _pc::parse(context, reader, begin, end.position(), + LEXY_FWD(args)...); } }; @@ -269,6 +357,7 @@ struct _bint : branch_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_byte_encoding); auto begin = reader.position(); if (!_rule::token_parse(context, reader)) return false; @@ -308,4 +397,3 @@ inline constexpr auto big_bint64 = _bint<8, lexy::_detail::bint_big>{}; } // namespace lexyd #endif // LEXY_DSL_BYTE_HPP_INCLUDED - diff --git a/3rdparty/lexy/include/lexy/dsl/capture.hpp b/3rdparty/lexy/include/lexy/dsl/capture.hpp index e6fa59520..92d4d8de2 100644 --- a/3rdparty/lexy/include/lexy/dsl/capture.hpp +++ b/3rdparty/lexy/include/lexy/dsl/capture.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CAPTURE_HPP_INCLUDED @@ -16,7 +16,7 @@ struct _cap : _copy_base template struct bp { - typename Reader::iterator end; + typename Reader::marker end; constexpr auto try_parse(const void*, const Reader& reader) { @@ -35,12 +35,12 @@ struct _cap : _copy_base { auto begin = reader.position(); - context.on(_ev::token{}, Token{}, begin, end); - reader.set_position(end); + context.on(_ev::token{}, Token{}, begin, end.position()); + reader.reset(end); using continuation = lexy::whitespace_parser; return continuation::parse(context, reader, LEXY_FWD(args)..., - lexy::lexeme(begin, end)); + lexy::lexeme(begin, end.position())); } }; diff --git a/3rdparty/lexy/include/lexy/dsl/case_folding.hpp b/3rdparty/lexy/include/lexy/dsl/case_folding.hpp index d51647a62..c4beb55dd 100644 --- a/3rdparty/lexy/include/lexy/dsl/case_folding.hpp +++ b/3rdparty/lexy/include/lexy/dsl/case_folding.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CASE_FOLDING_HPP_INCLUDED @@ -45,10 +45,10 @@ struct _cfl : token_base<_cfl>, _lit_base struct tp { lexy::token_parser_for> impl; - typename Reader::iterator end; + typename Reader::marker end; constexpr explicit tp(const Reader& reader) - : impl(CaseFolding{reader}), end(reader.position()) + : impl(CaseFolding{reader}), end(reader.current()) {} constexpr bool try_parse(Reader _reader) @@ -84,6 +84,7 @@ struct _acfr // ascii case folding reader using encoding = typename Reader::encoding; using iterator = typename Reader::iterator; + using marker = typename Reader::marker; constexpr auto peek() const -> typename encoding::int_type { @@ -104,9 +105,13 @@ struct _acfr // ascii case folding reader return _impl.position(); } - constexpr void set_position(iterator new_pos) + constexpr marker current() const noexcept { - _impl.set_position(new_pos); + return _impl.current(); + } + constexpr void reset(marker m) noexcept + { + _impl.reset(m); } }; } // namespace lexy @@ -146,6 +151,7 @@ struct _sucfr32 // simple unicode case folding reader, UTF-32 using encoding = typename Reader::encoding; using iterator = typename Reader::iterator; + using marker = typename Reader::marker; constexpr auto peek() const -> typename encoding::int_type { @@ -163,9 +169,13 @@ struct _sucfr32 // simple unicode case folding reader, UTF-32 return _impl.position(); } - constexpr void set_position(iterator new_pos) + constexpr marker current() const noexcept { - _impl.set_position(new_pos); + return _impl.current(); + } + constexpr void reset(marker m) noexcept + { + _impl.reset(m); } }; @@ -174,22 +184,23 @@ struct _sucfrm // simple unicode case folding reader, UTF-8 and UTF-16 { using encoding = typename Reader::encoding; using iterator = typename Reader::iterator; + using marker = typename Reader::marker; Reader _impl; - typename Reader::iterator _cur_pos; + typename Reader::marker _cur_pos; typename encoding::char_type _buffer[4]; unsigned char _buffer_size; unsigned char _buffer_cur; constexpr explicit _sucfrm(Reader impl) - : _impl(impl), _cur_pos(_impl.position()), _buffer{}, _buffer_size(0), _buffer_cur(0) + : _impl(impl), _cur_pos(_impl.current()), _buffer{}, _buffer_size(0), _buffer_cur(0) { _fill(); } constexpr void _fill() { - _cur_pos = _impl.position(); + _cur_pos = _impl.current(); // We need to read the next code point at this point. auto result = lexy::_detail::parse_code_point(_impl); @@ -200,13 +211,13 @@ struct _sucfrm // simple unicode case folding reader, UTF-8 and UTF-16 _buffer_size = static_cast( lexy::_detail::encode_code_point(folded.value(), _buffer, 4)); _buffer_cur = 0; - _impl.set_position(result.end); + _impl.reset(result.end); } else { // Fill the buffer with the partial code point. _buffer_cur = _buffer_size = 0; - while (_impl.position() != result.end) + while (_impl.position() != result.end.position()) { _buffer[_buffer_size] = static_cast(_impl.peek()); ++_buffer_size; @@ -233,22 +244,23 @@ struct _sucfrm // simple unicode case folding reader, UTF-8 and UTF-16 constexpr iterator position() const { - // We only report the position at a code point boundary. + return current().position(); + } + + constexpr marker current() const noexcept + { + // We only report a marker at a code point boundary. // This has two consequences: // 1. If we don't match a rule, the error token does not include any common start code - // units. - // That's actually nice, and makes it unnecessary to handle that situation in the error - // reporting. The only relevant difference is in the error token. + // units. That's actually nice, and makes it unnecessary to handle that situation in the + // error reporting. The only relevant difference is in the error token. // 2. If the user wants to match partial code unit sequences, the behavior can become buggy. // However, that's not really something we should worry about. return _cur_pos; } - - constexpr void set_position(iterator new_pos) + constexpr void reset(marker m) noexcept { - // It's a code point boundary, so reset. - _impl.set_position(new_pos); - _fill(); + _impl.reset(m); } }; diff --git a/3rdparty/lexy/include/lexy/dsl/char_class.hpp b/3rdparty/lexy/include/lexy/dsl/char_class.hpp index c8e7516f7..cc09ab31e 100644 --- a/3rdparty/lexy/include/lexy/dsl/char_class.hpp +++ b/3rdparty/lexy/include/lexy/dsl/char_class.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CHAR_CLASS_HPP_INCLUDED @@ -204,17 +204,19 @@ struct char_class_base : token_base, _char_class_base template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { + static_assert(lexy::is_char_encoding); + using matcher = lexy::_detail::ascii_set_matcher<_cas>; if (matcher::template match(reader.peek())) { reader.bump(); - end = reader.position(); + end = reader.current(); return true; } @@ -223,38 +225,37 @@ struct char_class_base : token_base, _char_class_base { return false; } - else if constexpr (std::is_same_v // - || std::is_same_v) + else if constexpr (lexy::is_unicode_encoding) { - static_assert(!Derived::char_class_unicode(), - "cannot use this character class with default/byte_encoding"); + static_assert(Derived::char_class_unicode(), + "cannot use this character class with Unicode encoding"); - if (reader.peek() == Reader::encoding::eof()) + // Parse one code point. + auto result = lexy::_detail::parse_code_point(reader); + if (result.error != lexy::_detail::cp_error::success) return false; - auto cp = static_cast(reader.peek()); - reader.bump(); - - if (!Derived::char_class_match_cp(cp)) + if (!Derived::char_class_match_cp(result.cp)) return false; - end = reader.position(); + end = result.end; return true; } else { - static_assert(Derived::char_class_unicode(), - "cannot use this character class with Unicode encoding"); + static_assert(!Derived::char_class_unicode(), + "cannot use this character class with non-Unicode char encodings"); - // Parse one code point. - auto result = lexy::_detail::parse_code_point(reader); - if (result.error != lexy::_detail::cp_error::success) + if (reader.peek() == Reader::encoding::eof()) return false; - if (!Derived::char_class_match_cp(result.cp)) + auto cp = static_cast(reader.peek()); + reader.bump(); + + if (!Derived::char_class_match_cp(cp)) return false; - end = result.end; + end = reader.current(); return true; } } @@ -361,8 +362,8 @@ constexpr auto _make_char_class(C c) return c; } template || std::is_same_v>> + typename = std::enable_if_t + || std::is_same_v>> constexpr auto _make_char_class(_lit) { if constexpr (std::is_same_v) @@ -411,8 +412,8 @@ struct _calt : char_class_base<_calt> static constexpr auto char_class_match_cp(char32_t cp) { - if constexpr ((std::is_same_v && ...)) + if constexpr ((std::is_same_v + && ...)) return std::false_type{}; else return (Cs::char_class_match_cp(cp) || ...); @@ -572,8 +573,8 @@ struct _cand : char_class_base<_cand> static constexpr auto char_class_match_cp(char32_t cp) { - if constexpr ((std::is_same_v && ...)) + if constexpr ((std::is_same_v + && ...)) return std::false_type{}; else return (Cs::char_class_match_cp(cp) && ...); diff --git a/3rdparty/lexy/include/lexy/dsl/choice.hpp b/3rdparty/lexy/include/lexy/dsl/choice.hpp index 22672147a..ed0fbd952 100644 --- a/3rdparty/lexy/include/lexy/dsl/choice.hpp +++ b/3rdparty/lexy/include/lexy/dsl/choice.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CHOICE_HPP_INCLUDED @@ -134,20 +134,20 @@ struct _chc template constexpr auto operator|(R, S) { - static_assert(lexy::is_branch_rule, "choice requires a branch condition"); - static_assert(lexy::is_branch_rule, "choice requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(R, "choice"); + LEXY_REQUIRE_BRANCH_RULE(S, "choice"); return _chc{}; } template constexpr auto operator|(_chc, S) { - static_assert(lexy::is_branch_rule, "choice requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(S, "choice"); return _chc{}; } template constexpr auto operator|(R, _chc) { - static_assert(lexy::is_branch_rule, "choice requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(R, "choice"); return _chc{}; } template diff --git a/3rdparty/lexy/include/lexy/dsl/code_point.hpp b/3rdparty/lexy/include/lexy/dsl/code_point.hpp index 2658be22a..bc09666aa 100644 --- a/3rdparty/lexy/include/lexy/dsl/code_point.hpp +++ b/3rdparty/lexy/include/lexy/dsl/code_point.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CODE_POINT_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/combination.hpp b/3rdparty/lexy/include/lexy/dsl/combination.hpp index 0d6b96c71..73db67b97 100644 --- a/3rdparty/lexy/include/lexy/dsl/combination.hpp +++ b/3rdparty/lexy/include/lexy/dsl/combination.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_COMBINATION_HPP_INCLUDED @@ -130,7 +130,7 @@ struct _comb : rule_base template constexpr auto combination(R...) { - static_assert((lexy::is_branch_rule && ...), "combination() requires a branch rule"); + LEXY_REQUIRE_BRANCH_RULE(R..., "combination()"); static_assert((!lexy::is_unconditional_branch_rule && ...), "combination() does not support unconditional branches"); return _comb{}; @@ -141,7 +141,7 @@ constexpr auto combination(R...) template constexpr auto partial_combination(R...) { - static_assert((lexy::is_branch_rule && ...), "partial_combination() requires a branch rule"); + LEXY_REQUIRE_BRANCH_RULE(R..., "partial_combination()"); static_assert((!lexy::is_unconditional_branch_rule && ...), "partial_combination() does not support unconditional branches"); // If the choice no longer matches, we just break. diff --git a/3rdparty/lexy/include/lexy/dsl/context_counter.hpp b/3rdparty/lexy/include/lexy/dsl/context_counter.hpp index 5efe371bc..411ca1ad9 100644 --- a/3rdparty/lexy/include/lexy/dsl/context_counter.hpp +++ b/3rdparty/lexy/include/lexy/dsl/context_counter.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CONTEXT_COUNTER_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/context_flag.hpp b/3rdparty/lexy/include/lexy/dsl/context_flag.hpp index 20d0e6307..6782c5322 100644 --- a/3rdparty/lexy/include/lexy/dsl/context_flag.hpp +++ b/3rdparty/lexy/include/lexy/dsl/context_flag.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CONTEXT_FLAG_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/context_identifier.hpp b/3rdparty/lexy/include/lexy/dsl/context_identifier.hpp index b797c0293..6850904f9 100644 --- a/3rdparty/lexy/include/lexy/dsl/context_identifier.hpp +++ b/3rdparty/lexy/include/lexy/dsl/context_identifier.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_CONTEXT_IDENTIFIER_HPP_INCLUDED @@ -73,7 +73,7 @@ struct _ctx_irem : branch_base template struct bp { - typename Reader::iterator end; + typename Reader::marker end; template constexpr bool try_parse(const ControlBlock* cb, const Reader& reader) @@ -85,7 +85,7 @@ struct _ctx_irem : branch_base end = parser.end; // The two lexemes need to be equal. - auto lexeme = lexy::lexeme(reader.position(), end); + auto lexeme = lexy::lexeme(reader.position(), end.position()); return lexy::_detail::equal_lexemes(_ctx_id::get(cb), lexeme); } @@ -97,8 +97,9 @@ struct _ctx_irem : branch_base LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { // Finish parsing the token. - context.on(_ev::token{}, lexy::identifier_token_kind, reader.position(), end); - reader.set_position(end); + context.on(_ev::token{}, lexy::identifier_token_kind, reader.position(), + end.position()); + reader.reset(end); return lexy::whitespace_parser::parse(context, reader, LEXY_FWD(args)...); } diff --git a/3rdparty/lexy/include/lexy/dsl/delimited.hpp b/3rdparty/lexy/include/lexy/dsl/delimited.hpp index d72dc502f..0b6684d94 100644 --- a/3rdparty/lexy/include/lexy/dsl/delimited.hpp +++ b/3rdparty/lexy/include/lexy/dsl/delimited.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_DELIMITED_HPP_INCLUDED @@ -110,12 +110,33 @@ struct _del_chars else if constexpr (!std::is_same_v) { - // Try to match any code point in default_encoding or byte_encoding. - if constexpr (std::is_same_v // - || std::is_same_v) + if constexpr (lexy::is_unicode_encoding) + { + static_assert(CharClass::char_class_unicode(), + "cannot use this character class with Unicode encoding"); + + auto result = lexy::_detail::parse_code_point(reader); + if (result.error == lexy::_detail::cp_error::success + && CharClass::char_class_match_cp(result.cp)) + { + reader.reset(result.end); + } + else + { + finish(context, sink, reader.position()); + + auto recover_begin = reader.position(); + if (recover_begin == result.end.position()) + reader.bump(); + else + reader.reset(result.end); + _recover(context, recover_begin, reader.position()); + } + } + else { static_assert(!CharClass::char_class_unicode(), - "cannot use this character class with default/byte_encoding"); + "cannot use this character class with non-Unicode char encoding"); LEXY_ASSERT(reader.peek() != encoding::eof(), "EOF should be checked before calling this"); @@ -129,25 +150,6 @@ struct _del_chars _recover(context, recover_begin, reader.position()); } } - // Otherwise, try to match Unicode characters. - else - { - static_assert(CharClass::char_class_unicode(), - "cannot use this character class with Unicode encoding"); - - auto result = lexy::_detail::parse_code_point(reader); - if (result.error == lexy::_detail::cp_error::success - && CharClass::char_class_match_cp(result.cp)) - { - reader.set_position(result.end); - } - else - { - finish(context, sink, reader.position()); - _recover(context, reader.position(), result.end); - reader.set_position(result.end); - } - } } // It doesn't match Unicode characters. else @@ -253,6 +255,7 @@ struct _del : rule_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_char_encoding); auto sink = context.value_callback().sink(); // Parse characters until we have the closing delimiter. @@ -325,7 +328,8 @@ struct _delim_dsl template constexpr auto delimited(Open, Close) { - static_assert(lexy::is_branch_rule && lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Open, "delimited()"); + LEXY_REQUIRE_BRANCH_RULE(Close, "delimited()"); return _delim_dsl{}; } @@ -333,7 +337,7 @@ constexpr auto delimited(Open, Close) template constexpr auto delimited(Delim) { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Delim, "delimited()"); return _delim_dsl{}; } @@ -424,7 +428,7 @@ struct _escape : _escape_base template constexpr auto rule(Branch) const { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Branch, "escape()"); return _escape{}; } @@ -432,7 +436,7 @@ struct _escape : _escape_base template constexpr auto capture(Branch branch) const { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Branch, "escape()"); return this->rule(lexy::dsl::capture(branch)); } diff --git a/3rdparty/lexy/include/lexy/dsl/digit.hpp b/3rdparty/lexy/include/lexy/dsl/digit.hpp index 33232cfc2..a3f1dc61d 100644 --- a/3rdparty/lexy/include/lexy/dsl/digit.hpp +++ b/3rdparty/lexy/include/lexy/dsl/digit.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_DIGIT_HPP_INCLUDED @@ -310,7 +310,8 @@ constexpr bool _match_digits(Reader& reader) // ... then manually to get any trailing digits. while (lexy::try_match_token(digit, reader)) - {} + { + } return true; } @@ -355,24 +356,27 @@ struct _digits_st : token_base<_digits_st> template struct tp { - typename Reader::iterator end; - bool forbidden_leading_zero; + typename Reader::marker end; + bool forbidden_leading_zero; constexpr explicit tp(const Reader& reader) - : end(reader.position()), forbidden_leading_zero(false) + : end(reader.current()), forbidden_leading_zero(false) {} constexpr bool try_parse(Reader reader) { using char_type = typename Reader::encoding::char_type; - auto begin = reader.position(); + auto begin = reader.current(); auto result = _match_digits_sep(reader); - end = reader.position(); + end = reader.current(); - if (result && lexy::_detail::next(begin) != end - && *begin == lexy::_detail::transcode_char('0')) + if (result && lexy::_detail::next(begin.position()) != end.position() + && *begin.position() == lexy::_detail::transcode_char('0')) { - end = lexy::_detail::next(begin); + reader.reset(begin); + reader.bump(); + end = reader.current(); + forbidden_leading_zero = true; return false; } @@ -385,14 +389,14 @@ struct _digits_st : token_base<_digits_st> { if (forbidden_leading_zero) { - auto err - = lexy::error(reader.position(), end); + auto err = lexy::error(reader.position(), + end.position()); context.on(_ev::error{}, err); } else { - auto err - = lexy::error(end, Base::char_class_name()); + auto err = lexy::error(end.position(), + Base::char_class_name()); context.on(_ev::error{}, err); } } @@ -405,21 +409,22 @@ struct _digits_s : token_base<_digits_s> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { auto result = _match_digits_sep(reader); - end = reader.position(); + end = reader.current(); return result; } template constexpr void report_error(Context& context, const Reader&) { - auto err = lexy::error(end, Base::char_class_name()); + auto err = lexy::error(end.position(), + Base::char_class_name()); context.on(_ev::error{}, err); } }; @@ -436,24 +441,27 @@ struct _digits_t : token_base<_digits_t> template struct tp { - typename Reader::iterator end; - bool forbidden_leading_zero; + typename Reader::marker end; + bool forbidden_leading_zero; constexpr explicit tp(const Reader& reader) - : end(reader.position()), forbidden_leading_zero(false) + : end(reader.current()), forbidden_leading_zero(false) {} constexpr bool try_parse(Reader reader) { using char_type = typename Reader::encoding::char_type; - auto begin = reader.position(); + auto begin = reader.current(); auto result = _match_digits(reader); - end = reader.position(); + end = reader.current(); - if (result && lexy::_detail::next(begin) != end - && *begin == lexy::_detail::transcode_char('0')) + if (result && lexy::_detail::next(begin.position()) != end.position() + && *begin.position() == lexy::_detail::transcode_char('0')) { - end = lexy::_detail::next(begin); + reader.reset(begin); + reader.bump(); + end = reader.current(); + forbidden_leading_zero = true; return false; } @@ -467,7 +475,7 @@ struct _digits_t : token_base<_digits_t> if (forbidden_leading_zero) { auto err = lexy::error(reader.position(), - this->end); + end.position()); context.on(_ev::error{}, err); } else @@ -493,14 +501,14 @@ struct _digits : token_base<_digits> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { auto result = _match_digits(reader); - end = reader.position(); + end = reader.current(); return result; } @@ -557,16 +565,16 @@ struct _ndigits_s : token_base<_ndigits_s> template struct tp> { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { // Match the Base one time. if (!lexy::try_match_token(digit, reader)) { - end = reader.position(); + end = reader.current(); return false; } @@ -574,14 +582,15 @@ struct _ndigits_s : token_base<_ndigits_s> auto success = (((void)Idx, lexy::try_match_token(Sep{}, reader), lexy::try_match_token(digit, reader)) && ...); - end = reader.position(); + end = reader.current(); return success; } template constexpr void report_error(Context& context, const Reader&) { - auto err = lexy::error(end, Base::char_class_name()); + auto err = lexy::error(end.position(), + Base::char_class_name()); context.on(_ev::error{}, err); } }; @@ -597,22 +606,23 @@ struct _ndigits : token_base<_ndigits> template struct tp> { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { // Match the Base N times. auto success = (((void)Idx, lexy::try_match_token(digit, reader)) && ...); - end = reader.position(); + end = reader.current(); return success; } template constexpr void report_error(Context& context, const Reader&) { - auto err = lexy::error(end, Base::char_class_name()); + auto err = lexy::error(end.position(), + Base::char_class_name()); context.on(_ev::error{}, err); } }; diff --git a/3rdparty/lexy/include/lexy/dsl/eof.hpp b/3rdparty/lexy/include/lexy/dsl/eof.hpp index 3c934357e..beb96cb97 100644 --- a/3rdparty/lexy/include/lexy/dsl/eof.hpp +++ b/3rdparty/lexy/include/lexy/dsl/eof.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_EOF_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/error.hpp b/3rdparty/lexy/include/lexy/dsl/error.hpp index 341e26f7d..7be4fac65 100644 --- a/3rdparty/lexy/include/lexy/dsl/error.hpp +++ b/3rdparty/lexy/include/lexy/dsl/error.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_ERROR_HPP_INCLUDED @@ -10,6 +10,14 @@ namespace lexyd { +template +struct _err_production +{ + static constexpr auto name = ""; + static constexpr auto max_recursion_depth = 0; + static constexpr auto rule = Rule{}; +}; + template struct _err : unconditional_branch_base { @@ -23,9 +31,17 @@ struct _err : unconditional_branch_base auto end = reader.position(); if constexpr (!std::is_same_v) { - lexy::token_parser_for parser(reader); - parser.try_parse(reader); - end = parser.end; + auto backtrack = reader.current(); + + // We match a dummy production that only consists of the rule. + lexy::do_action< + _err_production, + lexy::match_action::template result_type>(lexy::_mh(), + context.control_block + ->parse_state, + reader); + end = reader.position(); + reader.reset(LEXY_MOV(backtrack)); } auto err = lexy::error(begin, end); @@ -102,7 +118,7 @@ struct _must_dsl template constexpr auto must(Branch) { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Branch, "must()"); static_assert(!lexy::is_unconditional_branch_rule); return _must_dsl{}; } diff --git a/3rdparty/lexy/include/lexy/dsl/expression.hpp b/3rdparty/lexy/include/lexy/dsl/expression.hpp index 0b8fe505e..a7d4f94fa 100644 --- a/3rdparty/lexy/include/lexy/dsl/expression.hpp +++ b/3rdparty/lexy/include/lexy/dsl/expression.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_EXPRESSION_HPP_INCLUDED @@ -197,7 +197,7 @@ struct operation_list (void)((cur_idx <= op.idx && op.idx < cur_idx + op_of::op_literals::size ? (result = Continuation::parse(context, reader, - parsed_operator{op.pos, + parsed_operator{op.cur, op.idx - cur_idx}, LEXY_FWD(args)...), true) @@ -309,7 +309,7 @@ struct _expr : rule_base if (op.idx >= op_rule::op_literals::size) { // The list ends at this point. - reader.set_position(op.pos); + reader.reset(op.cur); break; } @@ -381,10 +381,11 @@ struct _expr : rule_base if (op.idx < op_rule::op_literals::size) { using tag = typename Context::production::operator_chain_error; - auto err = lexy::error(op.pos, reader.position()); + auto err + = lexy::error(op.cur.position(), reader.position()); context.on(_ev::error{}, err); } - reader.set_position(op.pos); + reader.reset(op.cur); } } else if constexpr (binding_power.is_postfix()) @@ -416,11 +417,11 @@ struct _expr : rule_base if (state.cur_nesting_level++ >= production::max_operator_nesting) { using tag = typename production::operator_nesting_error; - auto err = lexy::error(op.pos, reader.position()); + auto err = lexy::error(op.cur.position(), reader.position()); context.on(_ev::error{}, err); // We do not recover, to prevent stack overflow. - reader.set_position(op.pos); + reader.reset(op.cur); return false; } @@ -437,7 +438,7 @@ struct _expr : rule_base { // Operators can't be grouped. using tag = typename production::operator_group_error; - auto err = lexy::error(op.pos, reader.position()); + auto err = lexy::error(op.cur.position(), reader.position()); context.on(_ev::error{}, err); // Trivially recover, but don't update group: // let the first one stick. @@ -470,11 +471,11 @@ struct _expr : rule_base if (op.idx >= op_list::ops::size) { // We don't have a prefix operator, so it must be an atom. - reader.set_position(op.pos); + reader.reset(op.cur); return atom_parser::parse(context, reader); } - auto start_event = context.on(_ev::operation_chain_start{}, op.pos); + auto start_event = context.on(_ev::operation_chain_start{}, op.cur.position()); auto result = op_list::template apply<_continuation>(context, reader, op, state); context.on(_ev::operation_chain_finish{}, LEXY_MOV(start_event), reader.position()); return result; @@ -507,7 +508,7 @@ struct _expr : rule_base auto op = parse_operator(reader); if (op.idx >= op_list::ops::size) { - reader.set_position(op.pos); + reader.reset(op.cur); break; } diff --git a/3rdparty/lexy/include/lexy/dsl/flags.hpp b/3rdparty/lexy/include/lexy/dsl/flags.hpp index a86f7410a..51cd1f948 100644 --- a/3rdparty/lexy/include/lexy/dsl/flags.hpp +++ b/3rdparty/lexy/include/lexy/dsl/flags.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_FLAGS_HPP_INCLUDED @@ -126,9 +126,15 @@ struct _flag : rule_base template constexpr auto flag(Rule) { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Rule, "flag()"); return _flag{}; } + +template +constexpr auto flag(Rule rule) +{ + return flag(rule); +} } // namespace lexyd #endif // LEXY_DSL_FLAGS_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/follow.hpp b/3rdparty/lexy/include/lexy/dsl/follow.hpp index 38abd3539..9d47e09dd 100644 --- a/3rdparty/lexy/include/lexy/dsl/follow.hpp +++ b/3rdparty/lexy/include/lexy/dsl/follow.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_FOLLOW_HPP_INCLUDED @@ -49,11 +49,11 @@ struct _nf : token_base<_nf>, _lit_base struct tp { lexy::token_parser_for impl; - typename Reader::iterator end; + typename Reader::marker end; bool literal_success; constexpr explicit tp(const Reader& reader) - : impl(reader), end(reader.position()), literal_success(false) + : impl(reader), end(reader.current()), literal_success(false) {} constexpr bool try_parse(Reader reader) @@ -67,7 +67,7 @@ struct _nf : token_base<_nf>, _lit_base literal_success = true; // To match, we must not match the char class now. - reader.set_position(end); + reader.reset(end); if constexpr (std::is_void_v) { return !lexy::try_match_token(CharClass{}, reader); @@ -88,7 +88,8 @@ struct _nf : token_base<_nf>, _lit_base } else { - auto err = lexy::error(end, end); + auto err + = lexy::error(end.position(), end.position()); context.on(_ev::error{}, err); } } diff --git a/3rdparty/lexy/include/lexy/dsl/identifier.hpp b/3rdparty/lexy/include/lexy/dsl/identifier.hpp index b388a7bc3..1b780f00f 100644 --- a/3rdparty/lexy/include/lexy/dsl/identifier.hpp +++ b/3rdparty/lexy/include/lexy/dsl/identifier.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_IDENTIFIER_HPP_INCLUDED @@ -36,12 +36,14 @@ struct _idp : token_base<_idp> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { + static_assert(lexy::is_char_encoding); + // Need to match Leading character. if (!lexy::try_match_token(Leading{}, reader)) return false; @@ -61,7 +63,7 @@ struct _idp : token_base<_idp> break; } - end = reader.position(); + end = reader.current(); return true; } @@ -170,7 +172,7 @@ struct _id : branch_base template struct bp { - typename Reader::iterator end; + typename Reader::marker end; constexpr bool try_parse(const void*, const Reader& reader) { @@ -181,7 +183,8 @@ struct _id : branch_base end = parser.end; // We only succeed if it's not a reserved identifier. - [[maybe_unused]] auto input = lexy::partial_input(reader, reader.position(), end); + [[maybe_unused]] auto input + = lexy::partial_input(reader, reader.position(), end.position()); return !(ReservedPredicate::is_reserved(input) || ...); } @@ -194,12 +197,12 @@ struct _id : branch_base { auto begin = reader.position(); - context.on(_ev::token{}, lexy::identifier_token_kind, begin, end); - reader.set_position(end); + context.on(_ev::token{}, lexy::identifier_token_kind, begin, end.position()); + reader.reset(end); using continuation = lexy::whitespace_parser; return continuation::parse(context, reader, LEXY_FWD(args)..., - lexy::lexeme(begin, end)); + lexy::lexeme(begin, end.position())); } }; @@ -341,16 +344,16 @@ struct _kw : token_base<_kw>, _lit_base template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { // Need to match the literal. if (!lexy::_detail::match_literal<0, CharT, C...>(reader)) return false; - end = reader.position(); + end = reader.current(); // To qualify as a keyword, and not just the prefix of an identifier, // we must not have a trailing identifier character. diff --git a/3rdparty/lexy/include/lexy/dsl/if.hpp b/3rdparty/lexy/include/lexy/dsl/if.hpp index 48889a679..ff4f1d4a0 100644 --- a/3rdparty/lexy/include/lexy/dsl/if.hpp +++ b/3rdparty/lexy/include/lexy/dsl/if.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_IF_HPP_INCLUDED @@ -36,7 +36,7 @@ struct _if : rule_base template constexpr auto if_(Branch) { - static_assert(lexy::is_branch_rule, "if_() requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(Branch, "if()"); if constexpr (lexy::is_unconditional_branch_rule) // Branch is always taken, so don't wrap in if_(). return Branch{}; diff --git a/3rdparty/lexy/include/lexy/dsl/integer.hpp b/3rdparty/lexy/include/lexy/dsl/integer.hpp index 435f2adec..95475fa0c 100644 --- a/3rdparty/lexy/include/lexy/dsl/integer.hpp +++ b/3rdparty/lexy/include/lexy/dsl/integer.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_INTEGER_HPP_INCLUDED @@ -414,7 +414,7 @@ struct _int : _copy_base template struct bp { - typename Reader::iterator end; + typename Reader::marker end; constexpr auto try_parse(const void*, const Reader& reader) { @@ -432,10 +432,11 @@ struct _int : _copy_base LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { auto begin = reader.position(); - context.on(_ev::token{}, Token{}, begin, end); - reader.set_position(end); + context.on(_ev::token{}, Token{}, begin, end.position()); + reader.reset(end); - return _pc::parse(context, reader, begin, end, LEXY_FWD(args)...); + return _pc::parse(context, reader, begin, end.position(), + LEXY_FWD(args)...); } }; @@ -448,17 +449,19 @@ struct _int : _copy_base auto begin = reader.position(); if (lexy::token_parser_for parser(reader); parser.try_parse(reader)) { - context.on(_ev::token{}, typename Token::token_type{}, begin, parser.end); - reader.set_position(parser.end); + context.on(_ev::token{}, typename Token::token_type{}, begin, + parser.end.position()); + reader.reset(parser.end); } else { parser.report_error(context, reader); - reader.set_position(parser.end); + reader.reset(parser.end); // To recover we try and skip additional digits. while (lexy::try_match_token(digit, reader)) - {} + { + } auto recovery_end = reader.position(); if (begin == recovery_end) diff --git a/3rdparty/lexy/include/lexy/dsl/list.hpp b/3rdparty/lexy/include/lexy/dsl/list.hpp index 861d93177..25b85076d 100644 --- a/3rdparty/lexy/include/lexy/dsl/list.hpp +++ b/3rdparty/lexy/include/lexy/dsl/list.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_LIST_HPP_INCLUDED @@ -132,8 +132,7 @@ struct _lst : _copy_base template constexpr auto list(Item) { - static_assert(lexy::is_branch_rule, - "list() without a separator requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(Item, "list() without a separator"); return _lst{}; } @@ -148,8 +147,7 @@ constexpr auto list(Item, _sep) template constexpr auto list(Item, _tsep) { - static_assert(lexy::is_branch_rule, - "list() without a trailing separator requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(Item, "list() with a trailing separator"); return _lst>{}; } diff --git a/3rdparty/lexy/include/lexy/dsl/literal.hpp b/3rdparty/lexy/include/lexy/dsl/literal.hpp index 4fc1b6066..adc2cf599 100644 --- a/3rdparty/lexy/include/lexy/dsl/literal.hpp +++ b/3rdparty/lexy/include/lexy/dsl/literal.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_LITERAL_HPP_INCLUDED @@ -18,6 +18,7 @@ namespace lexy::_detail template constexpr auto match_literal(Reader& reader) { + static_assert(lexy::is_char_encoding); using char_type = typename Reader::encoding::char_type; if constexpr (CurCharIndex >= sizeof...(Cs)) { @@ -184,11 +185,11 @@ struct _merge_case_folding typename H::lit_case_folding, CurrentCaseFolding>, T...> { - static_assert( - std::is_same_v // - || std::is_void_v || std::is_void_v, - "cannot mix literals with different case foldings in a literal_set"); + static_assert(std::is_same_v // + || std::is_void_v + || std::is_void_v, + "cannot mix literals with different case foldings in a literal_set"); }; template @@ -272,7 +273,7 @@ struct lit_trie_matcher if constexpr (sizeof...(Idx) > 0) { - auto cur_pos = reader.position(); + auto cur = reader.current(); auto cur_char = reader.peek(); auto next_value = Trie.node_no_match; @@ -283,7 +284,7 @@ struct lit_trie_matcher return next_value; // We haven't found a longer match, return our match. - reader.set_position(cur_pos); + reader.reset(cur); } // But first, we might need to check that we don't match that nodes char class. @@ -305,6 +306,7 @@ struct lit_trie_matcher template LEXY_FORCE_INLINE static constexpr std::size_t try_match(Reader& _reader) { + static_assert(lexy::is_char_encoding); if constexpr (std::is_same_v, Reader>) { return _impl<>::try_match(_reader); @@ -313,7 +315,7 @@ struct lit_trie_matcher { CaseFolding reader{_reader}; auto result = _impl<>::try_match(reader); - _reader.set_position(reader.position()); + _reader.reset(reader.current()); return result; } } @@ -350,14 +352,14 @@ struct _lit template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr auto try_parse(Reader reader) { auto result = lexy::_detail::match_literal<0, CharT, C...>(reader); - end = reader.position(); + end = reader.current(); return result; } @@ -368,7 +370,7 @@ struct _lit constexpr auto str = lexy::_detail::type_string::template c_str; auto begin = reader.position(); - auto index = lexy::_detail::range_size(begin, this->end); + auto index = lexy::_detail::range_size(begin, end.position()); auto err = lexy::error(begin, str, index, sizeof...(C)); context.on(_ev::error{}, err); } @@ -445,9 +447,9 @@ struct _lcp : token_base<_lcp>, _lit_base template struct tp> { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { @@ -455,7 +457,7 @@ struct _lcp : token_base<_lcp>, _lit_base auto result = lexy::_detail::match_literal<0, typename encoding::char_type, _string.data[Idx]...>(reader); - end = reader.position(); + end = reader.current(); return result; } @@ -465,7 +467,7 @@ struct _lcp : token_base<_lcp>, _lit_base using encoding = typename Reader::encoding; auto begin = reader.position(); - auto index = lexy::_detail::range_size(begin, end); + auto index = lexy::_detail::range_size(begin, end.position()); auto err = lexy::error(begin, _string.data, index, _string.length); context.on(_ev::error{}, err); @@ -537,9 +539,9 @@ struct _lset : token_base<_lset>, _lset_base template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { @@ -547,7 +549,7 @@ struct _lset : token_base<_lset>, _lset_base using matcher = lexy::_detail::lit_trie_matcher<_t, 0>; auto result = matcher::try_match(reader); - end = reader.position(); + end = reader.current(); return result != _t.node_no_match; } diff --git a/3rdparty/lexy/include/lexy/dsl/lookahead.hpp b/3rdparty/lexy/include/lexy/dsl/lookahead.hpp index 37c69d611..3ee995a18 100644 --- a/3rdparty/lexy/include/lexy/dsl/lookahead.hpp +++ b/3rdparty/lexy/include/lexy/dsl/lookahead.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_LOOKAHEAD_HPP_INCLUDED @@ -50,6 +50,8 @@ struct _look : branch_base template struct bp { + static_assert(lexy::is_char_encoding); + typename Reader::iterator begin; typename Reader::iterator end; @@ -102,6 +104,7 @@ struct _look : branch_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_char_encoding); bp impl{}; if (!impl.try_parse(context.control_block, reader)) { diff --git a/3rdparty/lexy/include/lexy/dsl/loop.hpp b/3rdparty/lexy/include/lexy/dsl/loop.hpp index 68a433ff2..e7988d270 100644 --- a/3rdparty/lexy/include/lexy/dsl/loop.hpp +++ b/3rdparty/lexy/include/lexy/dsl/loop.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_LOOP_HPP_INCLUDED @@ -94,7 +94,7 @@ struct _whl : rule_base template constexpr auto while_(Rule) { - static_assert(lexy::is_branch_rule, "while() requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(Rule, "while()"); return _whl{}; } } // namespace lexyd @@ -105,7 +105,7 @@ namespace lexyd template constexpr auto while_one(Rule rule) { - static_assert(lexy::is_branch_rule, "while_one() requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(Rule, "while_one()"); return rule >> while_(rule); } } // namespace lexyd diff --git a/3rdparty/lexy/include/lexy/dsl/member.hpp b/3rdparty/lexy/include/lexy/dsl/member.hpp index afc6bf3e5..a478ef2dd 100644 --- a/3rdparty/lexy/include/lexy/dsl/member.hpp +++ b/3rdparty/lexy/include/lexy/dsl/member.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_MEMBER_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/newline.hpp b/3rdparty/lexy/include/lexy/dsl/newline.hpp index fc34556c7..24078f032 100644 --- a/3rdparty/lexy/include/lexy/dsl/newline.hpp +++ b/3rdparty/lexy/include/lexy/dsl/newline.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_NEWLINE_HPP_INCLUDED @@ -36,6 +36,8 @@ struct _eol : branch_base template struct bp { + static_assert(lexy::is_char_encoding); + constexpr bool try_parse(const void*, Reader reader) { return reader.peek() == Reader::encoding::eof() @@ -70,6 +72,7 @@ struct _eol : branch_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_char_encoding); return bp{}.template finish(context, reader, LEXY_FWD(args)...); } }; diff --git a/3rdparty/lexy/include/lexy/dsl/operator.hpp b/3rdparty/lexy/include/lexy/dsl/operator.hpp index b992e31df..188ebf19a 100644 --- a/3rdparty/lexy/include/lexy/dsl/operator.hpp +++ b/3rdparty/lexy/include/lexy/dsl/operator.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_OPERATOR_HPP_INCLUDED @@ -75,8 +75,8 @@ struct op_lit_list template struct parsed_operator { - typename Reader::iterator pos; - std::size_t idx; + typename Reader::marker cur; + std::size_t idx; }; template @@ -85,7 +85,7 @@ constexpr auto parse_operator(Reader& reader) using encoding = typename Reader::encoding; using op_matcher = lexy::_detail::lit_trie_matcher, 0>; - auto begin = reader.position(); + auto begin = reader.current(); auto op = op_matcher::try_match(reader); return parsed_operator{begin, op}; } @@ -96,6 +96,11 @@ namespace lexyd template using _detect_op_tag_ctor = decltype(Tag(LEXY_DECLVAL(Reader).position())); +template +using _detect_op_tag_ctor_with_state + = decltype(Tag(*LEXY_DECLVAL(Context).control_block->parse_state, + LEXY_DECLVAL(Reader).position())); + template struct _op : branch_base { @@ -107,15 +112,20 @@ struct _op : branch_base lexy::_detail::parsed_operator op, Args&&... args) { - context.on(_ev::token{}, typename Literal::token_type{}, op.pos, reader.position()); + context.on(_ev::token{}, typename Literal::token_type{}, op.cur.position(), + reader.position()); using continuation = lexy::whitespace_parser, NextParser>>; if constexpr (std::is_void_v) return continuation::parse(context, reader, LEXY_FWD(args)...); + else if constexpr (lexy::_detail::is_detected<_detect_op_tag_ctor_with_state, op_tag_type, + Reader, Context>) + return continuation::parse(context, reader, LEXY_FWD(args)..., + op_tag_type(*context.control_block->parse_state, op.pos)); else if constexpr (lexy::_detail::is_detected<_detect_op_tag_ctor, op_tag_type, Reader>) return continuation::parse(context, reader, LEXY_FWD(args)..., - op_tag_type(reader.position())); + op_tag_type(op.cur.position())); else return continuation::parse(context, reader, LEXY_FWD(args)..., op_tag_type{}); } @@ -144,6 +154,12 @@ struct _op : branch_base if constexpr (std::is_void_v) return impl.template finish(context, reader, LEXY_FWD(args)...); + else if constexpr (lexy::_detail::is_detected<_detect_op_tag_ctor_with_state, + op_tag_type, Reader, Context>) + return impl + .template finish(context, reader, LEXY_FWD(args)..., + op_tag_type(*context.control_block->parse_state, + reader.position())); else if constexpr (lexy::_detail::is_detected<_detect_op_tag_ctor, op_tag_type, Reader>) return impl.template finish(context, reader, LEXY_FWD(args)..., op_tag_type(reader.position())); @@ -165,6 +181,10 @@ struct _op : branch_base = lexy::parser_for, NextParser>>; if constexpr (std::is_void_v) return continuation::parse(context, reader, LEXY_FWD(args)...); + else if constexpr (lexy::_detail::is_detected<_detect_op_tag_ctor_with_state, + op_tag_type, Reader, Context>) + return continuation::parse(context, reader, LEXY_FWD(args)..., + op_tag_type(*context.control_block->parse_state, pos)); else if constexpr (lexy::_detail::is_detected<_detect_op_tag_ctor, op_tag_type, Reader>) return continuation::parse(context, reader, LEXY_FWD(args)..., op_tag_type(pos)); else @@ -246,12 +266,12 @@ struct _opc : branch_base struct bp { lexy::_detail::parsed_operator op; - typename Reader::iterator end; + typename Reader::marker end; constexpr auto try_parse(const void*, Reader reader) { op = lexy::_detail::parse_operator(reader); - end = reader.position(); + end = reader.current(); return op.idx < op_literals::size; } @@ -262,7 +282,7 @@ struct _opc : branch_base template LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { - reader.set_position(end); + reader.reset(end); return op_finish(context, reader, op, LEXY_FWD(args)...); } }; @@ -276,7 +296,7 @@ struct _opc : branch_base bp impl{}; if (!impl.try_parse(context.control_block, reader)) { - auto err = lexy::error(impl.op.pos); + auto err = lexy::error(impl.op.cur.position()); context.on(_ev::error{}, err); return false; } @@ -311,4 +331,3 @@ constexpr auto operator/(_opc, _opc) } // namespace lexyd #endif // LEXY_DSL_OPERATOR_HPP_INCLUDED - diff --git a/3rdparty/lexy/include/lexy/dsl/option.hpp b/3rdparty/lexy/include/lexy/dsl/option.hpp index 9ef1f7cc0..0ba0c7404 100644 --- a/3rdparty/lexy/include/lexy/dsl/option.hpp +++ b/3rdparty/lexy/include/lexy/dsl/option.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_OPTION_HPP_INCLUDED @@ -80,7 +80,7 @@ struct _opt : rule_base template constexpr auto opt(Rule) { - static_assert(lexy::is_branch_rule, "opt() requires a branch condition"); + LEXY_REQUIRE_BRANCH_RULE(Rule, "opt()"); if constexpr (lexy::is_unconditional_branch_rule) // Branch is always taken, so don't wrap in opt(). return Rule{}; diff --git a/3rdparty/lexy/include/lexy/dsl/parse_as.hpp b/3rdparty/lexy/include/lexy/dsl/parse_as.hpp index 27e8e161a..4efdde15a 100644 --- a/3rdparty/lexy/include/lexy/dsl/parse_as.hpp +++ b/3rdparty/lexy/include/lexy/dsl/parse_as.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_PARSE_AS_HPP_INCLUDED @@ -11,7 +11,7 @@ namespace lexyd { // Custom handler that forwards events but overrides the value callback. -template +template struct _pas_handler { Handler& _handler; @@ -26,31 +26,38 @@ struct _pas_handler return static_cast(_handler); } - // For child productions, use ::value to get a value. + // We use ::value to get a value. + // We can't use it unconditionally, as the initial production that contains the parse_as might + // not have one. So we silently fallback if that's the case - this might cause worse errors if + // the value is missing. template - struct value_callback : lexy::production_value_callback - { - using lexy::production_value_callback::production_value_callback; - }; - // For the production that contains parse_as, use lexy::construct. - template - struct value_callback : lexy::_construct + using value_callback + = std::conditional_t, + lexy::production_value_callback, + lexy::_detail::void_value_callback>; +}; + +struct _pas_final_parser +{ + template + LEXY_PARSER_FUNC static bool parse(Context&, Reader&, lexy::_detail::lazy_init& value, + Args&&... args) { - constexpr value_callback() = default; - constexpr value_callback(State*) {} - }; + value.emplace_result(lexy::construct, LEXY_FWD(args)...); + return true; + } }; -template +template constexpr auto _make_pas_handler(Handler& handler) { - return _pas_handler{handler}; + return _pas_handler{handler}; } // Prevent infinite nesting when parse_as itself is recursive. -template -constexpr auto _make_pas_handler(_pas_handler& handler) +template +constexpr auto _make_pas_handler(_pas_handler& handler) { - return _pas_handler{handler._handler}; + return handler; } template @@ -77,18 +84,20 @@ struct _pas : _copy_base template LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { - auto handler = _make_pas_handler( - context.control_block->parse_handler); + auto handler = _make_pas_handler(context.control_block->parse_handler); lexy::_detail::parse_context_control_block cb(LEXY_MOV(handler), context.control_block); using context_type = lexy::_pc; context_type sub_context(&cb); + sub_context.handler = LEXY_MOV(context).handler; - auto result - = rule_parser.template finish(sub_context, reader); + lexy::_detail::lazy_init value; + auto result + = rule_parser.template finish<_pas_final_parser>(sub_context, reader, value); context.control_block->copy_vars_from(&cb); + context.handler = LEXY_MOV(sub_context).handler; if (!result) return false; @@ -96,11 +105,9 @@ struct _pas : _copy_base // NOLINTNEXTLINE: clang-tidy wrongly thinks the branch is repeated. return NextParser::parse(context, reader, LEXY_FWD(args)...); else if constexpr (Front) - return NextParser::parse(context, reader, *LEXY_MOV(sub_context.value), - LEXY_FWD(args)...); + return NextParser::parse(context, reader, *LEXY_MOV(value), LEXY_FWD(args)...); else - return NextParser::parse(context, reader, LEXY_FWD(args)..., - *LEXY_MOV(sub_context.value)); + return NextParser::parse(context, reader, LEXY_FWD(args)..., *LEXY_MOV(value)); } }; @@ -110,18 +117,20 @@ struct _pas : _copy_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { - auto handler = _make_pas_handler( - context.control_block->parse_handler); + auto handler = _make_pas_handler(context.control_block->parse_handler); lexy::_detail::parse_context_control_block cb(LEXY_MOV(handler), context.control_block); using context_type = lexy::_pc; context_type sub_context(&cb); + sub_context.handler = LEXY_MOV(context).handler; - auto result - = lexy::parser_for::parse(sub_context, reader); + lexy::_detail::lazy_init value; + auto result + = lexy::parser_for::parse(sub_context, reader, value); context.control_block->copy_vars_from(&cb); + context.handler = LEXY_MOV(sub_context).handler; if (!result) return false; @@ -129,11 +138,9 @@ struct _pas : _copy_base // NOLINTNEXTLINE: clang-tidy wrongly thinks the branch is repeated. return NextParser::parse(context, reader, LEXY_FWD(args)...); else if constexpr (Front) - return NextParser::parse(context, reader, *LEXY_MOV(sub_context.value), - LEXY_FWD(args)...); + return NextParser::parse(context, reader, *LEXY_MOV(value), LEXY_FWD(args)...); else - return NextParser::parse(context, reader, LEXY_FWD(args)..., - *LEXY_MOV(sub_context.value)); + return NextParser::parse(context, reader, LEXY_FWD(args)..., *LEXY_MOV(value)); } }; }; diff --git a/3rdparty/lexy/include/lexy/dsl/parse_tree_node.hpp b/3rdparty/lexy/include/lexy/dsl/parse_tree_node.hpp new file mode 100644 index 000000000..c8aff2505 --- /dev/null +++ b/3rdparty/lexy/include/lexy/dsl/parse_tree_node.hpp @@ -0,0 +1,251 @@ +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors +// SPDX-License-Identifier: BSL-1.0 + +#ifndef LEXY_DSL_PARSE_TREE_NODE_HPP_INCLUDED +#define LEXY_DSL_PARSE_TREE_NODE_HPP_INCLUDED + +#include +#include + +#if !LEXY_EXPERIMENTAL +# error "lexy::dsl::tnode/pnode are experimental" +#endif + +//=== impl ===// +namespace lexyd +{ +template +struct _n; + +template +struct _nr : branch_base +{ + template + struct _cont + { + template + LEXY_PARSER_FUNC static bool parse(Context& context, ChildReader& child_reader, + bool& rule_succeded, Reader& reader, Args&&... args) + { + rule_succeded = true; + + if (child_reader.peek() != ChildReader::encoding::eof()) + { + auto begin = child_reader.position(); + auto end = reader.position(); + context.on(_ev::token{}, lexy::error_token_kind, begin, end); + + auto err = lexy::error(begin, end); + context.on(_ev::error{}, err); + } + + return lexy::whitespace_parser::parse(context, reader, + LEXY_FWD(args)...); + } + }; + + template + LEXY_PARSER_FUNC static bool _parse_rule(Context& context, Reader& reader, + typename Reader::marker end, Args&&... args) + { + auto child_reader = Derived::node_child_reader(reader); + reader.reset(end); + + using rule_parser + = lexy::whitespace_parser>>; + if (auto rule_succeded = false; + rule_parser::parse(context, child_reader, rule_succeded, reader, LEXY_FWD(args)...)) + { + return true; + } + else + { + if (!rule_succeded) + // Report an error token for the child span that wasn't able to be parsed. + context.on(_ev::token{}, lexy::error_token_kind, child_reader.position(), + end.position()); + return false; + } + } + + template + struct bp + { + typename Reader::marker end; + + constexpr bool try_parse(const void*, const Reader& reader) + { + lexy::token_parser_for<_n, Reader> parser(reader); + auto result = parser.try_parse(reader); + end = parser.end; + return result; + } + + template + constexpr void cancel(Context&) + {} + + template + LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) + { + return _parse_rule(context, reader, end, LEXY_FWD(args)...); + } + }; + + template + struct p + { + template + LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) + { + lexy::token_parser_for<_n, Reader> parser(reader); + if (!parser.try_parse(reader)) + { + LEXY_ASSERT(parser.end.position() == reader.position(), "impl should be LL(1)"); + parser.report_error(context, reader); + return false; + } + + return _parse_rule(context, reader, parser.end, LEXY_FWD(args)...); + } + }; +}; + +template +struct _n : token_base +{ + template + struct tp + { + typename Reader::marker end; + + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} + + constexpr auto try_parse(Reader reader) + { + if constexpr (lexy::is_node_encoding) + { + if (!Reader::encoding::match(reader.peek(), Derived::node_kind())) + return false; + + reader.bump(); + end = reader.current(); + return true; + } + else + { + // This happens when it is used as whitespace, which is inherited while parsing the + // token lexeme, we don't match anything in that case. + return std::false_type{}; + } + } + + template + constexpr void report_error(Context& context, Reader reader) + { + constexpr auto name = Derived::node_kind_name(); + + auto err = lexy::error(reader.position(), name); + context.on(_ev::error{}, err); + } + }; + + template + constexpr auto operator()(Rule) const + { + return _nr{}; + } +}; +} // namespace lexyd + +//=== dsl::tnode ===// +namespace lexy +{ +struct expected_token_end +{ + static LEXY_CONSTEVAL auto name() + { + return "expected token end"; + } +}; +} // namespace lexy + +namespace lexyd +{ +template +struct _tn : _n<_tn> +{ + static LEXY_CONSTEVAL auto node_kind() + { + return Kind; + } + + static LEXY_CONSTEVAL auto node_kind_name() + { + using lexy::token_kind_name; + return token_kind_name(Kind); + } + + using node_end_error = lexy::expected_token_end; + + template + static constexpr auto node_child_reader(Reader& reader) + { + return reader.lexeme_reader(); + } +}; + +template +constexpr auto tnode = _tn{}; +} // namespace lexyd + +namespace lexy +{ +template +constexpr auto token_kind_of> = Kind; +} // namespace lexy + +//=== dsl::pnode ===// +namespace lexy +{ +struct expected_production_end +{ + static LEXY_CONSTEVAL auto name() + { + return "expected production end"; + } +}; +} // namespace lexy + +namespace lexyd +{ +template +struct _pn : _n<_pn> +{ + static_assert(lexy::is_production); + + static LEXY_CONSTEVAL auto node_kind() + { + return Production{}; + } + + static LEXY_CONSTEVAL auto node_kind_name() + { + return lexy::production_name(); + } + + using node_end_error = lexy::expected_production_end; + + template + static constexpr auto node_child_reader(Reader& reader) + { + return reader.child_reader(); + } +}; + +template +constexpr auto pnode = _pn{}; +} // namespace lexyd + +#endif // LEXY_DSL_PARSE_TREE_NODE_HPP_INCLUDED + diff --git a/3rdparty/lexy/include/lexy/dsl/peek.hpp b/3rdparty/lexy/include/lexy/dsl/peek.hpp index 933521f8a..57b3576b8 100644 --- a/3rdparty/lexy/include/lexy/dsl/peek.hpp +++ b/3rdparty/lexy/include/lexy/dsl/peek.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_PEEK_HPP_INCLUDED @@ -37,7 +37,7 @@ struct _peek : branch_base struct bp { typename Reader::iterator begin; - typename Reader::iterator end; + typename Reader::marker end; constexpr bool try_parse(const void*, Reader reader) { @@ -54,13 +54,13 @@ struct _peek : branch_base template constexpr void cancel(Context& context) { - context.on(_ev::backtracked{}, begin, end); + context.on(_ev::backtracked{}, begin, end.position()); } template LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { - context.on(_ev::backtracked{}, begin, end); + context.on(_ev::backtracked{}, begin, end.position()); return NextParser::parse(context, reader, LEXY_FWD(args)...); } }; @@ -76,13 +76,13 @@ struct _peek : branch_base { // Report that we've failed. using tag = lexy::_detail::type_or; - auto err = lexy::error(impl.begin, impl.end); + auto err = lexy::error(impl.begin, impl.end.position()); context.on(_ev::error{}, err); // But recover immediately, as we wouldn't have consumed anything either way. } - context.on(_ev::backtracked{}, impl.begin, impl.end); + context.on(_ev::backtracked{}, impl.begin, impl.end.position()); return NextParser::parse(context, reader, LEXY_FWD(args)...); } }; @@ -98,7 +98,7 @@ struct _peekn : branch_base struct bp { typename Reader::iterator begin; - typename Reader::iterator end; + typename Reader::marker end; constexpr bool try_parse(const void*, Reader reader) { @@ -115,13 +115,13 @@ struct _peekn : branch_base template constexpr void cancel(Context& context) { - context.on(_ev::backtracked{}, begin, end); + context.on(_ev::backtracked{}, begin, end.position()); } template LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { - context.on(_ev::backtracked{}, begin, end); + context.on(_ev::backtracked{}, begin, end.position()); return NextParser::parse(context, reader, LEXY_FWD(args)...); } }; @@ -137,20 +137,20 @@ struct _peekn : branch_base { // Report that we've failed. using tag = lexy::_detail::type_or; - auto err = lexy::error(impl.begin, impl.end); + auto err = lexy::error(impl.begin, impl.end.position()); context.on(_ev::error{}, err); // And recover by consuming the input. context.on(_ev::recovery_start{}, impl.begin); - context.on(_ev::token{}, lexy::error_token_kind, impl.begin, impl.end); - context.on(_ev::recovery_finish{}, impl.end); + context.on(_ev::token{}, lexy::error_token_kind, impl.begin, impl.end.position()); + context.on(_ev::recovery_finish{}, impl.end.position()); - reader.set_position(impl.end); + reader.reset(impl.end); return NextParser::parse(context, reader, LEXY_FWD(args)...); } else { - context.on(_ev::backtracked{}, impl.begin, impl.end); + context.on(_ev::backtracked{}, impl.begin, impl.end.position()); return NextParser::parse(context, reader, LEXY_FWD(args)...); } } diff --git a/3rdparty/lexy/include/lexy/dsl/position.hpp b/3rdparty/lexy/include/lexy/dsl/position.hpp index b1ab70232..088187bf9 100644 --- a/3rdparty/lexy/include/lexy/dsl/position.hpp +++ b/3rdparty/lexy/include/lexy/dsl/position.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_POSITION_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/production.hpp b/3rdparty/lexy/include/lexy/dsl/production.hpp index 225b55bd2..ba74de2a7 100644 --- a/3rdparty/lexy/include/lexy/dsl/production.hpp +++ b/3rdparty/lexy/include/lexy/dsl/production.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_PRODUCTION_HPP_INCLUDED @@ -179,7 +179,7 @@ struct _recb : branch_base template struct bp { - static_assert(lexy::is_branch_rule>); + LEXY_REQUIRE_BRANCH_RULE(lexy::production_rule, "recurse_branch"); using impl = lexy::branch_parser_for<_prd, Reader>; impl _impl; diff --git a/3rdparty/lexy/include/lexy/dsl/punctuator.hpp b/3rdparty/lexy/include/lexy/dsl/punctuator.hpp index 9e1937a7c..e53f7b0f2 100644 --- a/3rdparty/lexy/include/lexy/dsl/punctuator.hpp +++ b/3rdparty/lexy/include/lexy/dsl/punctuator.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_PUNCTUATOR_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/recover.hpp b/3rdparty/lexy/include/lexy/dsl/recover.hpp index 0a31ae659..208d8c409 100644 --- a/3rdparty/lexy/include/lexy/dsl/recover.hpp +++ b/3rdparty/lexy/include/lexy/dsl/recover.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_RECOVER_HPP_INCLUDED @@ -38,17 +38,24 @@ struct _recovery_wrapper : _recovery_base LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { context.on(_ev::recovery_start{}, reader.position()); - auto recovery_finished = false; - auto result - = lexy::parser_for::parse(context, reader, recovery_finished, - LEXY_FWD(args)...); + + // As part of the recovery, we parse the rule and whitespace. + using parser = lexy::parser_for>; + auto result = parser::parse(context, reader, recovery_finished, LEXY_FWD(args)...); + if (!recovery_finished) context.on(_ev::recovery_cancel{}, reader.position()); return result; } }; }; + +struct _noop_recovery : rule_base +{ + template + using p = NextParser; +}; } // namespace lexyd namespace lexyd @@ -72,20 +79,20 @@ struct _find : _recovery_base context.on(_ev::recovery_start{}, begin); while (true) { - auto end = reader.position(); // *before* we've consumed Token/Limit + auto end = reader.current(); // *before* we've consumed Token/Limit auto result = matcher::try_match(reader); if (result == 0) { - context.on(_ev::token{}, lexy::error_token_kind, begin, end); - context.on(_ev::recovery_finish{}, end); - reader.set_position(end); // reset to before the token + context.on(_ev::token{}, lexy::error_token_kind, begin, end.position()); + context.on(_ev::recovery_finish{}, end.position()); + reader.reset(end); // reset to before the token return NextParser::parse(context, reader, LEXY_FWD(args)...); } else if (result == 1 || reader.peek() == Reader::encoding::eof()) { - context.on(_ev::token{}, lexy::error_token_kind, begin, end); - context.on(_ev::recovery_cancel{}, end); - reader.set_position(end); // reset to before the limit + context.on(_ev::token{}, lexy::error_token_kind, begin, end.position()); + context.on(_ev::recovery_cancel{}, end.position()); + reader.reset(end); // reset to before the limit return false; } else @@ -200,7 +207,7 @@ template constexpr auto recover(Branches...) { static_assert(sizeof...(Branches) > 0); - static_assert((lexy::is_branch_rule && ...)); + LEXY_REQUIRE_BRANCH_RULE(Branches..., "recover"); return _reco{}; } } // namespace lexyd @@ -231,13 +238,23 @@ struct _tryt : rule_base LEXY_PARSER_FUNC static bool recover(Context& context, Reader& reader, Args&&... args) { if constexpr (std::is_void_v) - return NextParser::parse(context, reader, LEXY_FWD(args)...); + { + using recovery_rule = _recovery_wrapper<_noop_recovery>; + return lexy::parser_for::parse(context, reader, + LEXY_FWD(args)...); + } else if constexpr (std::is_base_of_v<_recovery_base, Recover>) - return lexy::parser_for::parse(context, reader, - LEXY_FWD(args)...); + { + using recovery_rule = Recover; + return lexy::parser_for::parse(context, reader, + LEXY_FWD(args)...); + } else - return lexy::parser_for<_recovery_wrapper, - NextParser>::parse(context, reader, LEXY_FWD(args)...); + { + using recovery_rule = _recovery_wrapper; + return lexy::parser_for::parse(context, reader, + LEXY_FWD(args)...); + } } }; diff --git a/3rdparty/lexy/include/lexy/dsl/repeat.hpp b/3rdparty/lexy/include/lexy/dsl/repeat.hpp index 7321d54ad..bb41404b6 100644 --- a/3rdparty/lexy/include/lexy/dsl/repeat.hpp +++ b/3rdparty/lexy/include/lexy/dsl/repeat.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_REPEAT_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/return.hpp b/3rdparty/lexy/include/lexy/dsl/return.hpp index e3f7269ba..46477f6f0 100644 --- a/3rdparty/lexy/include/lexy/dsl/return.hpp +++ b/3rdparty/lexy/include/lexy/dsl/return.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_RETURN_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/scan.hpp b/3rdparty/lexy/include/lexy/dsl/scan.hpp index 313dd2d57..d630fd61c 100644 --- a/3rdparty/lexy/include/lexy/dsl/scan.hpp +++ b/3rdparty/lexy/include/lexy/dsl/scan.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_SCAN_HPP_INCLUDED @@ -18,7 +18,9 @@ struct _prd; template struct _peek; template -struct _capt; +struct _cap; +template +struct _capr; template struct _int_dsl; @@ -184,6 +186,10 @@ class scanner { return _reader.position(); } + constexpr auto current() const noexcept -> typename Reader::marker + { + return _reader.current(); + } constexpr auto remaining_input() const noexcept { @@ -228,7 +234,7 @@ class scanner template >> constexpr bool branch(scan_result& result, Rule) { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Rule, "branch"); if (_state == _state_failed) return false; @@ -323,6 +329,13 @@ class scanner context.on(parse_events::error{}, lexy::error(LEXY_FWD(args)...)); } + template + constexpr void error(const char* msg, Args&&... args) + { + auto& context = static_cast(*this).context(); + context.on(parse_events::error{}, lexy::error(LEXY_FWD(args)..., msg)); + } + template constexpr void fatal_error(Tag tag, Args&&... args) { @@ -330,6 +343,13 @@ class scanner _state = _state_failed; } + template + constexpr void fatal_error(const char* msg, Args&&... args) + { + error(msg, LEXY_FWD(args)...); + _state = _state_failed; + } + //=== convenience ===// template >> constexpr auto parse(Rule rule) @@ -361,25 +381,18 @@ class scanner return result; } - template - constexpr auto capture(Rule rule) -> scan_result> + template + constexpr auto capture(Token) { - static_assert(lexy::is_rule); - - auto begin = _reader.position(); - parse(rule); - auto end = _reader.position(); - - if (*this) - return lexeme(begin, end); - else - return scan_failed; + scan_result> result; + parse(result, lexyd::_cap{}); + return result; } - template - constexpr auto capture_token(Token) + template + constexpr auto capture(lexyd::_prd) { scan_result> result; - parse(result, lexyd::_capt{}); + parse(result, lexyd::_capr>{}); return result; } @@ -446,8 +459,9 @@ class rule_scanner : public _detail::scanner, Read namespace lexyd { -template -using _detect_scan_state = decltype(Context::production::scan(LEXY_DECLVAL(Scanner&), *StatePtr())); +template +using _detect_scan_state = decltype(Context::production::scan(LEXY_DECLVAL(Scanner&), *StatePtr(), + LEXY_DECLVAL(Args)...)); struct _scan : rule_base { @@ -458,16 +472,16 @@ struct _scan : rule_base LEXY_PARSER_FUNC static bool _parse(Scanner& scanner, Context& context, Reader& reader, Args&&... args) { - lexy::scan_result result = [&] { + typename Context::production::scan_result result = [&] { if constexpr (lexy::_detail::is_detected< _detect_scan_state, Context, decltype(scanner), - decltype(context.control_block->parse_state)>) + decltype(context.control_block->parse_state), Args&&...>) return Context::production::scan(scanner, *context.control_block->parse_state, LEXY_FWD(args)...); else return Context::production::scan(scanner, LEXY_FWD(args)...); }(); - reader.set_position(scanner.position()); + reader.reset(scanner.current()); if (!result) return false; diff --git a/3rdparty/lexy/include/lexy/dsl/separator.hpp b/3rdparty/lexy/include/lexy/dsl/separator.hpp index 175f7bf92..f50744d66 100644 --- a/3rdparty/lexy/include/lexy/dsl/separator.hpp +++ b/3rdparty/lexy/include/lexy/dsl/separator.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_SEPARATOR_HPP_INCLUDED @@ -90,7 +90,7 @@ struct _sep : _sep_base template constexpr auto sep(Branch) { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Branch, "sep"); return _sep{}; } @@ -110,7 +110,7 @@ struct _tsep : _sep_base template constexpr auto trailing_sep(Branch) { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Branch, "trailing_sep"); return _tsep{}; } diff --git a/3rdparty/lexy/include/lexy/dsl/sequence.hpp b/3rdparty/lexy/include/lexy/dsl/sequence.hpp index 02a098fa3..6715e7ae3 100644 --- a/3rdparty/lexy/include/lexy/dsl/sequence.hpp +++ b/3rdparty/lexy/include/lexy/dsl/sequence.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_SEQUENCE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/sign.hpp b/3rdparty/lexy/include/lexy/dsl/sign.hpp index c0178b275..5cf38de0d 100644 --- a/3rdparty/lexy/include/lexy/dsl/sign.hpp +++ b/3rdparty/lexy/include/lexy/dsl/sign.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_SIGN_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/subgrammar.hpp b/3rdparty/lexy/include/lexy/dsl/subgrammar.hpp index 9f4bbc97f..6a291dfc4 100644 --- a/3rdparty/lexy/include/lexy/dsl/subgrammar.hpp +++ b/3rdparty/lexy/include/lexy/dsl/subgrammar.hpp @@ -20,6 +20,9 @@ using _subgrammar_for = _subgrammar \ + constexpr auto production_has_value_callback = true; \ + \ template \ struct _subgrammar \ { \ @@ -104,4 +107,3 @@ constexpr auto subgrammar = _subg{}; } // namespace lexyd #endif // LEXY_DSL_SUBGRAMMAR_HPP_INCLUDED - diff --git a/3rdparty/lexy/include/lexy/dsl/symbol.hpp b/3rdparty/lexy/include/lexy/dsl/symbol.hpp index f8a58a9b1..f94511068 100644 --- a/3rdparty/lexy/include/lexy/dsl/symbol.hpp +++ b/3rdparty/lexy/include/lexy/dsl/symbol.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_SYMBOL_HPP_INCLUDED @@ -268,7 +268,8 @@ struct _sym : branch_base template struct bp { - typename Reader::iterator end; + static_assert(lexy::is_char_encoding); + typename Reader::marker end; typename LEXY_DECAY_DECLTYPE(Table)::key_index symbol; constexpr auto value() const @@ -286,7 +287,7 @@ struct _sym : branch_base end = parser.end; // Check whether this is a symbol. - auto content = lexy::partial_input(reader, end); + auto content = lexy::partial_input(reader, end.position()); symbol = Table.parse(content); // Only succeed if it is a symbol. @@ -301,8 +302,8 @@ struct _sym : branch_base LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { // We need to consume and report the token. - context.on(_ev::token{}, Token{}, reader.position(), end); - reader.set_position(end); + context.on(_ev::token{}, Token{}, reader.position(), end.position()); + reader.reset(end); // And continue parsing with the symbol value after whitespace skipping. using continuation = lexy::whitespace_parser; @@ -340,6 +341,7 @@ struct _sym : branch_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_char_encoding); // Capture the token and continue with special continuation. return lexy::parser_for<_cap, _cont>::parse(context, reader, LEXY_FWD(args)...); @@ -360,8 +362,9 @@ struct _sym, Tag> : branch_base template struct bp { + static_assert(lexy::is_char_encoding); typename LEXY_DECAY_DECLTYPE(Table)::key_index symbol; - typename Reader::iterator end; + typename Reader::marker end; constexpr auto value() const { @@ -374,7 +377,7 @@ struct _sym, Tag> : branch_base symbol = Table.try_parse(reader); if (!symbol) return false; - end = reader.position(); + end = reader.current(); // We had a symbol, but it must not be the prefix of a valid identifier. return !lexy::try_match_token(T{}, reader); @@ -388,8 +391,8 @@ struct _sym, Tag> : branch_base LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { // We need to consume and report the identifier pattern. - context.on(_ev::token{}, _idp{}, reader.position(), end); - reader.set_position(end); + context.on(_ev::token{}, _idp{}, reader.position(), end.position()); + reader.reset(end); // And continue parsing with the symbol value after whitespace skipping. using continuation = lexy::whitespace_parser; @@ -403,6 +406,7 @@ struct _sym, Tag> : branch_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_char_encoding); auto begin = reader.position(); // Try to parse a symbol that is not the prefix of an identifier. @@ -427,9 +431,9 @@ struct _sym, Tag> : branch_base else { // We need to consume and report the identifier pattern. - auto end = symbol_reader.position(); - context.on(_ev::token{}, _idp{}, begin, end); - reader.set_position(end); + auto end = symbol_reader.current(); + context.on(_ev::token{}, _idp{}, begin, end.position()); + reader.reset(end); // And continue parsing with the symbol value after whitespace skipping. using continuation = lexy::whitespace_parser; @@ -449,8 +453,9 @@ struct _sym : branch_base template struct bp { + static_assert(lexy::is_char_encoding); typename LEXY_DECAY_DECLTYPE(Table)::key_index symbol; - typename Reader::iterator end; + typename Reader::marker end; constexpr auto value() const { @@ -461,7 +466,7 @@ struct _sym : branch_base { // Try to parse a symbol. symbol = Table.try_parse(reader); - end = reader.position(); + end = reader.current(); // Only succeed if it is a symbol. return static_cast(symbol); @@ -475,8 +480,9 @@ struct _sym : branch_base LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { // We need to consume and report the token. - context.on(_ev::token{}, lexy::identifier_token_kind, reader.position(), end); - reader.set_position(end); + context.on(_ev::token{}, lexy::identifier_token_kind, reader.position(), + end.position()); + reader.reset(end); // And continue parsing with the symbol value after whitespace skipping. using continuation = lexy::whitespace_parser; @@ -490,6 +496,7 @@ struct _sym : branch_base template LEXY_PARSER_FUNC static bool parse(Context& context, Reader& reader, Args&&... args) { + static_assert(lexy::is_char_encoding); bp impl{}; if (impl.try_parse(context.control_block, reader)) return impl.template finish(context, reader, LEXY_FWD(args)...); diff --git a/3rdparty/lexy/include/lexy/dsl/terminator.hpp b/3rdparty/lexy/include/lexy/dsl/terminator.hpp index 9535d01d9..56a4e53df 100644 --- a/3rdparty/lexy/include/lexy/dsl/terminator.hpp +++ b/3rdparty/lexy/include/lexy/dsl/terminator.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_TERMINATOR_HPP_INCLUDED @@ -105,7 +105,7 @@ struct _term template constexpr auto terminator(Branch) { - static_assert(lexy::is_branch_rule); + LEXY_REQUIRE_BRANCH_RULE(Branch, "terminator"); return _term{}; } } // namespace lexyd diff --git a/3rdparty/lexy/include/lexy/dsl/times.hpp b/3rdparty/lexy/include/lexy/dsl/times.hpp index 9aa1e9c2c..7ebf327cd 100644 --- a/3rdparty/lexy/include/lexy/dsl/times.hpp +++ b/3rdparty/lexy/include/lexy/dsl/times.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_TIMES_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/token.hpp b/3rdparty/lexy/include/lexy/dsl/token.hpp index 809fb8684..95c812c1c 100644 --- a/3rdparty/lexy/include/lexy/dsl/token.hpp +++ b/3rdparty/lexy/include/lexy/dsl/token.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_TOKEN_HPP_INCLUDED @@ -46,7 +46,7 @@ struct token_base : _token_inherit template struct bp { - typename Reader::iterator end; + typename Reader::marker end; constexpr auto try_parse(const void*, const Reader& reader) { @@ -63,8 +63,8 @@ struct token_base : _token_inherit template LEXY_PARSER_FUNC bool finish(Context& context, Reader& reader, Args&&... args) { - context.on(_ev::token{}, Derived{}, reader.position(), end); - reader.set_position(end); + context.on(_ev::token{}, Derived{}, reader.position(), end.position()); + reader.reset(end); return lexy::whitespace_parser::parse(context, reader, LEXY_FWD(args)...); } @@ -85,16 +85,17 @@ struct token_base : _token_inherit { if (!parser.try_parse(reader)) { - context.on(_ev::token{}, lexy::error_token_kind, reader.position(), parser.end); + context.on(_ev::token{}, lexy::error_token_kind, reader.position(), + parser.end.position()); parser.report_error(context, reader); - reader.set_position(parser.end); + reader.reset(parser.end); return false; } } - context.on(_ev::token{}, typename Derived::token_type{}, begin, parser.end); - reader.set_position(parser.end); + context.on(_ev::token{}, typename Derived::token_type{}, begin, parser.end.position()); + reader.reset(parser.end); return true; } @@ -153,7 +154,7 @@ struct _toke : token_base<_toke, Token> constexpr void report_error(Context& context, const Reader& reader) { // Report a different error. - auto err = lexy::error(reader.position(), this->end); + auto err = lexy::error(reader.position(), this->end.position()); context.on(_ev::error{}, err); } }; @@ -184,9 +185,9 @@ struct _token : token_base<_token> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { @@ -196,14 +197,14 @@ struct _token : token_base<_token> lexy::match_action::template result_type>(lexy::_mh(), lexy::no_parse_state, reader); - end = reader.position(); + end = reader.current(); return success; } template constexpr void report_error(Context& context, const Reader& reader) { - auto err = lexy::error(reader.position(), end); + auto err = lexy::error(reader.position(), end.position()); context.on(_ev::error{}, err); } }; diff --git a/3rdparty/lexy/include/lexy/dsl/unicode.hpp b/3rdparty/lexy/include/lexy/dsl/unicode.hpp index 1737b06fd..e895ae3c8 100644 --- a/3rdparty/lexy/include/lexy/dsl/unicode.hpp +++ b/3rdparty/lexy/include/lexy/dsl/unicode.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_UNICODE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/dsl/until.hpp b/3rdparty/lexy/include/lexy/dsl/until.hpp index 6dc0b6346..626d0b4fd 100644 --- a/3rdparty/lexy/include/lexy/dsl/until.hpp +++ b/3rdparty/lexy/include/lexy/dsl/until.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_UNTIL_HPP_INCLUDED @@ -39,9 +39,9 @@ struct _until_eof : token_base<_until_eof, unconditional_branch_base> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr std::true_type try_parse(Reader reader) { @@ -64,7 +64,7 @@ struct _until_eof : token_base<_until_eof, unconditional_branch_base> reader.bump(); } - end = reader.position(); + end = reader.current(); return {}; } }; @@ -76,9 +76,9 @@ struct _until : token_base<_until> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr bool try_parse(Reader reader) { @@ -90,7 +90,7 @@ struct _until : token_base<_until> if (lexy::try_match_token(Condition{}, reader)) { // It did match, we're done at that end. - end = reader.position(); + end = reader.current(); return true; } @@ -100,7 +100,7 @@ struct _until : token_base<_until> if (reader.peek() == Reader::encoding::eof()) { // It did, so we did not succeed. - end = reader.position(); + end = reader.current(); return false; } @@ -117,7 +117,7 @@ struct _until : token_base<_until> // We need to trigger the error `Condition` would. // As such, we try parsing it, which will report an error. - reader.set_position(end); + reader.reset(end); LEXY_ASSERT(reader.peek() == Reader::encoding::eof(), "forgot to set end in try_parse()"); diff --git a/3rdparty/lexy/include/lexy/dsl/whitespace.hpp b/3rdparty/lexy/include/lexy/dsl/whitespace.hpp index 5ffb87dbb..f80b30dcf 100644 --- a/3rdparty/lexy/include/lexy/dsl/whitespace.hpp +++ b/3rdparty/lexy/include/lexy/dsl/whitespace.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_DSL_WHITESPACE_HPP_INCLUDED @@ -137,7 +137,8 @@ constexpr auto skip_whitespace(ws_handler&& handler, Reader& reader) { // Without SWAR, we just repeatedly skip the whitespace rule. while (lexy::try_match_token(WhitespaceRule{}, reader)) - {} + { + } } handler.real_on(lexy::parse_events::token{}, lexy::whitespace_token_kind, begin, diff --git a/3rdparty/lexy/include/lexy/encoding.hpp b/3rdparty/lexy/include/lexy/encoding.hpp index 6c154d069..4529e67a1 100644 --- a/3rdparty/lexy/include/lexy/encoding.hpp +++ b/3rdparty/lexy/include/lexy/encoding.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ENCODING_HPP_INCLUDED @@ -253,13 +253,35 @@ struct _deduce_encoding }; } // namespace lexy +//=== encoding traits ===// +namespace lexy +{ +template +constexpr auto is_unicode_encoding + = std::is_same_v || std::is_same_v + || std::is_same_v || std::is_same_v + || std::is_same_v; + +template +constexpr auto is_text_encoding + = is_unicode_encoding || std::is_same_v; + +template +constexpr auto is_byte_encoding = std::is_same_v; + +template +constexpr auto is_char_encoding = is_text_encoding || is_byte_encoding; + +template +constexpr auto is_node_encoding = false; +} // namespace lexy + //=== impls ===// namespace lexy::_detail { template -constexpr bool is_compatible_char_type - = std::is_same_v || Encoding::template is_secondary_char_type(); +constexpr bool is_compatible_char_type = std::is_same_v + || Encoding::template is_secondary_char_type(); template using require_secondary_char_type diff --git a/3rdparty/lexy/include/lexy/error.hpp b/3rdparty/lexy/include/lexy/error.hpp index 0a7645c68..77a40937d 100644 --- a/3rdparty/lexy/include/lexy/error.hpp +++ b/3rdparty/lexy/include/lexy/error.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_ERROR_HPP_INCLUDED @@ -26,6 +26,13 @@ class error : _pos(begin), _end(end), _msg(msg) {} + template >> + constexpr operator error() const noexcept + { + return error(_pos, _end, _msg); + } + constexpr auto position() const noexcept { return _pos; @@ -68,6 +75,13 @@ class error : public error typename Reader::iterator end) noexcept : error(begin, end, _detail::type_name()) {} + + template >> + constexpr operator error() const noexcept + { + return error(this->begin(), this->end()); + } }; /// Expected the literal character sequence. @@ -83,6 +97,13 @@ class error : _pos(pos), _str(str), _idx(index), _length(length) {} + template >> + constexpr operator error() const noexcept + { + return error(_pos, _str, _idx, _length); + } + constexpr auto position() const noexcept { return _pos; @@ -127,6 +148,13 @@ class error : _begin(begin), _end(end), _str(str), _length(length) {} + template >> + constexpr operator error() const noexcept + { + return error(_begin, _end, _str, _length); + } + constexpr auto position() const noexcept { return _begin; @@ -169,6 +197,13 @@ class error : _pos(pos), _name(name) {} + template >> + constexpr operator error() const noexcept + { + return error(_pos, _name); + } + constexpr auto position() const noexcept { return _pos; diff --git a/3rdparty/lexy/include/lexy/grammar.hpp b/3rdparty/lexy/include/lexy/grammar.hpp index 4c03b0aa9..b1986594e 100644 --- a/3rdparty/lexy/include/lexy/grammar.hpp +++ b/3rdparty/lexy/include/lexy/grammar.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_GRAMMAR_HPP_INCLUDED @@ -65,8 +65,17 @@ constexpr auto is_separator = std::is_base_of_v; template constexpr auto is_operation = std::is_base_of_v; + +template +constexpr bool _require_branch_rule = (is_branch_rule && ...); } // namespace lexy +#define LEXY_REQUIRE_BRANCH_RULE(Rule, Name) \ + static_assert(lexy::_require_branch_rule, Name \ + " requires a branch condition." \ + " You may need to use `>>` to specify the condition that is used for dispatch." \ + " See https://lexy.foonathan.net/learn/branching/ for more information.") + //=== predefined_token_kind ===// namespace lexy { @@ -88,7 +97,12 @@ enum predefined_token_kind : std::uint_least16_t _smallest_predefined_token_kind = digits_token_kind, }; -constexpr const char* _kind_name(predefined_token_kind kind) noexcept +template +constexpr const char* token_kind_name(const T&) noexcept +{ + return "token"; +} +constexpr const char* token_kind_name(predefined_token_kind kind) noexcept { switch (kind) { @@ -244,6 +258,9 @@ using _detect_value_of = // qualify value_of() (it causes a hard error instead of going to ::value). typename decltype(LEXY_DECLVAL(ParseState&).value_of(Production{}))::return_type; +template +using _detect_value = decltype(Production::value); + template struct _sfinae_sink { @@ -270,6 +287,11 @@ struct _sfinae_sink } }; +template +constexpr bool production_has_value_callback + = lexy::_detail::is_detected<_detect_value_of, ParseState, Production> + || lexy::_detail::is_detected<_detect_value, Production>; + template class production_value_callback { @@ -323,12 +345,14 @@ class production_value_callback template constexpr return_type operator()(Args&&... args) const { - if constexpr (lexy::is_callback_for<_type, Args&&...>) + if constexpr (lexy::is_callback_with_state_for<_type, ParseState, Args&&...> + && !std::is_void_v) { - if constexpr (!std::is_void_v && lexy::is_callback_state<_type, ParseState>) - return _get_value(_state)[*_state](LEXY_FWD(args)...); - else - return _get_value(_state)(LEXY_FWD(args)...); + return _get_value(_state)[*_state](LEXY_FWD(args)...); + } + else if constexpr (lexy::is_callback_for<_type, Args&&...>) + { + return _get_value(_state)(LEXY_FWD(args)...); } else if constexpr ((lexy::is_sink<_type> // || lexy::is_sink<_type, std::add_lvalue_reference_t>) // @@ -355,4 +379,3 @@ class production_value_callback } // namespace lexy #endif // LEXY_GRAMMAR_HPP_INCLUDED - diff --git a/3rdparty/lexy/include/lexy/input/argv_input.hpp b/3rdparty/lexy/include/lexy/input/argv_input.hpp index f60ee4e8f..f4b4f1d70 100644 --- a/3rdparty/lexy/include/lexy/input/argv_input.hpp +++ b/3rdparty/lexy/include/lexy/input/argv_input.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_INPUT_ARGV_INPUT_HPP_INCLUDED @@ -121,9 +121,9 @@ class argv_input public: using encoding = Encoding; using char_type = typename encoding::char_type; - static_assert( - std::is_same_v || Encoding::template is_secondary_char_type(), - "invalid encoding for argv"); + static_assert(std::is_same_v + || Encoding::template is_secondary_char_type(), + "invalid encoding for argv"); //=== constructors ===// constexpr argv_input() = default; @@ -147,7 +147,7 @@ class argv_input argv_iterator _begin, _end; }; -argv_input(int argc, char* argv[])->argv_input<>; +argv_input(int argc, char* argv[]) -> argv_input<>; } // namespace lexy namespace lexy @@ -169,9 +169,9 @@ struct _argvsep : token_base<_argvsep> template struct tp { - typename Reader::iterator end; + typename Reader::marker end; - constexpr explicit tp(const Reader& reader) : end(reader.position()) {} + constexpr explicit tp(const Reader& reader) : end(reader.current()) {} constexpr auto try_parse([[maybe_unused]] Reader reader) { @@ -182,7 +182,7 @@ struct _argvsep : token_base<_argvsep> return false; reader.bump(); - end = reader.position(); + end = reader.current(); return true; } else diff --git a/3rdparty/lexy/include/lexy/input/base.hpp b/3rdparty/lexy/include/lexy/input/base.hpp index ebd35f319..2bc260e2f 100644 --- a/3rdparty/lexy/include/lexy/input/base.hpp +++ b/3rdparty/lexy/include/lexy/input/base.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_INPUT_BASE_HPP_INCLUDED @@ -18,6 +18,16 @@ class _rr using encoding = Encoding; using iterator = Iterator; + struct marker + { + iterator _it; + + constexpr iterator position() const noexcept + { + return _it; + } + }; + constexpr explicit _rr(Iterator begin, Sentinel end) noexcept : _cur(begin), _end(end) { LEXY_PRECONDITION(lexy::_detail::precedes(begin, end)); @@ -42,10 +52,14 @@ class _rr return _cur; } - constexpr void set_position(iterator new_pos) noexcept + constexpr marker current() const noexcept + { + return {_cur}; + } + constexpr void reset(marker m) noexcept { - LEXY_PRECONDITION(lexy::_detail::precedes(new_pos, _end)); - _cur = new_pos; + LEXY_PRECONDITION(lexy::_detail::precedes(m._it, _end)); + _cur = m._it; } private: diff --git a/3rdparty/lexy/include/lexy/input/buffer.hpp b/3rdparty/lexy/include/lexy/input/buffer.hpp index b6bfe758b..450ecf254 100644 --- a/3rdparty/lexy/include/lexy/input/buffer.hpp +++ b/3rdparty/lexy/include/lexy/input/buffer.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_INPUT_BUFFER_HPP_INCLUDED @@ -21,6 +21,16 @@ class _br : public _detail::swar_reader_base<_br> using encoding = Encoding; using iterator = const typename Encoding::char_type*; + struct marker + { + iterator _it; + + constexpr iterator position() const noexcept + { + return _it; + } + }; + explicit _br(iterator begin) noexcept : _cur(begin) {} auto peek() const noexcept @@ -39,9 +49,13 @@ class _br : public _detail::swar_reader_base<_br> return _cur; } - void set_position(iterator new_pos) noexcept + marker current() const noexcept + { + return {_cur}; + } + void reset(marker m) noexcept { - _cur = new_pos; + _cur = m._it; } private: @@ -80,13 +94,14 @@ namespace lexy template class buffer { + static_assert(lexy::is_char_encoding); static constexpr auto _has_sentinel = std::is_same_v; public: using encoding = Encoding; using char_type = typename encoding::char_type; - static_assert(std::is_trivial_v); + static_assert(std::is_trivially_copyable_v); //=== constructors ===// /// Allows the creation of an uninitialized buffer that is then filled by the user. @@ -119,6 +134,17 @@ class buffer buffer _buffer; }; + static buffer adopt(const char_type* data, std::size_t size, + MemoryResource* resource = _detail::get_memory_resource()) + { + buffer result(resource); + // We can cast away the const-ness, since we require that `data` came from a buffer + // origionally, where it wasn't const. + result._data = const_cast(data); + result._size = size; + return result; + } + constexpr buffer() noexcept : buffer(_detail::get_memory_resource()) {} constexpr explicit buffer(MemoryResource* resource) noexcept @@ -227,6 +253,14 @@ class buffer return _size; } + const char_type* release() && noexcept + { + auto result = _data; + _data = nullptr; + _size = 0; + return result; + } + //=== input ===// auto reader() const& noexcept { diff --git a/3rdparty/lexy/include/lexy/input/file.hpp b/3rdparty/lexy/include/lexy/input/file.hpp index 9c47d2d6c..8c4a2d57d 100644 --- a/3rdparty/lexy/include/lexy/input/file.hpp +++ b/3rdparty/lexy/include/lexy/input/file.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_INPUT_FILE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/input/parse_tree_input.hpp b/3rdparty/lexy/include/lexy/input/parse_tree_input.hpp new file mode 100644 index 000000000..fdc7572f9 --- /dev/null +++ b/3rdparty/lexy/include/lexy/input/parse_tree_input.hpp @@ -0,0 +1,184 @@ +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors +// SPDX-License-Identifier: BSL-1.0 + +#ifndef LEXY_INPUT_PARSE_TREE_INPUT_HPP_INCLUDED +#define LEXY_INPUT_PARSE_TREE_INPUT_HPP_INCLUDED + +#include +#include +#include +#include + +#if !LEXY_EXPERIMENTAL +# error "lexy::parse_tree_input is experimental" +#endif + +namespace lexy +{ +template +struct parse_tree_input_traits; + +struct _parse_tree_eof // not real EOF, just no more siblings +{ + template + friend constexpr bool operator==(const Node& node, _parse_tree_eof) noexcept + { + return parse_tree_input_traits::is_null(node); + } + template + friend constexpr bool operator==(_parse_tree_eof, const Node& node) noexcept + { + return parse_tree_input_traits::is_null(node); + } + + template + friend constexpr bool operator!=(const Node& node, _parse_tree_eof) noexcept + { + return !parse_tree_input_traits::is_null(node); + } + template + friend constexpr bool operator!=(_parse_tree_eof, const Node& node) noexcept + { + return !parse_tree_input_traits::is_null(node); + } +}; + +template +class parse_tree_encoding +{ + using _traits = parse_tree_input_traits; + +public: + using char_encoding = typename _traits::char_encoding; + using char_type = typename char_encoding::char_type; + using value_type = Node; + + static LEXY_CONSTEVAL auto eof() + { + return _parse_tree_eof{}; + } + + template + static bool match(const Node& node, const NodeKind& node_kind) + { + return _traits::has_kind(node, node_kind); + } +}; +template +constexpr auto is_node_encoding> = true; + +template +class _ptr // parse tree reader +{ + using _traits = parse_tree_input_traits; + +public: + using encoding = parse_tree_encoding; + using iterator = typename _traits::iterator; + + struct marker + { + Node _parent = _traits::null(); + Node _cur = _traits::null(); + + constexpr iterator position() const noexcept + { + return _cur == _parse_tree_eof{} ? _traits::position_end(_parent) + : _traits::position_begin(_cur); + } + }; + + constexpr explicit _ptr(const Node& root) noexcept + : _parent(root), _cur(_traits::first_child(root)) + {} + + constexpr _ptr child_reader() const& noexcept + { + return _ptr(_cur); + } + constexpr auto lexeme_reader() const& noexcept + { + auto lexeme = _traits::lexeme(_cur); + return _range_reader(lexeme.begin(), lexeme.end()); + } + + constexpr const Node& peek() const noexcept + { + return _cur; + } + + constexpr void bump() noexcept + { + LEXY_PRECONDITION(_cur != _parse_tree_eof{}); + _cur = _traits::sibling(_cur); + } + + constexpr marker current() const noexcept + { + return {_parent, _cur}; + } + constexpr void reset(marker m) noexcept + { + _cur = m._cur; + } + + constexpr iterator position() const noexcept + { + return current().position(); + } + +private: + Node _parent; + Node _cur; +}; + +template +class parse_tree_input +{ +public: + using encoding = parse_tree_encoding; + using value_type = Node; + + //=== constructors ===// + constexpr parse_tree_input() noexcept : _root(nullptr) {} + + constexpr explicit parse_tree_input(Node root) noexcept : _root(LEXY_MOV(root)) {} + + template >> + constexpr explicit parse_tree_input(const ParseTree& tree) noexcept : _root(tree.root()) + {} + + //=== access ===// + constexpr const Node& root() const noexcept + { + return _root; + } + + //=== reader ===// + constexpr auto reader() const& noexcept + { + return _ptr(_root); + } + +private: + Node _root; +}; + +template +parse_tree_input(const ParseTree&) + -> parse_tree_input; + +//=== convenience typedefs ===// +template +using parse_tree_lexeme = lexeme_for>; + +template +using parse_tree_error = error_for, Tag>; + +template +using parse_tree_error_context = error_context>; +} // namespace lexy + +#endif // LEXY_INPUT_PARSE_TREE_INPUT_HPP_INCLUDED + diff --git a/3rdparty/lexy/include/lexy/input/range_input.hpp b/3rdparty/lexy/include/lexy/input/range_input.hpp index 3ad00f189..c32c0d3a7 100644 --- a/3rdparty/lexy/include/lexy/input/range_input.hpp +++ b/3rdparty/lexy/include/lexy/input/range_input.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_INPUT_RANGE_INPUT_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/input/string_input.hpp b/3rdparty/lexy/include/lexy/input/string_input.hpp index 72568a715..a59a387da 100644 --- a/3rdparty/lexy/include/lexy/input/string_input.hpp +++ b/3rdparty/lexy/include/lexy/input/string_input.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_INPUT_STRING_INPUT_HPP_INCLUDED @@ -17,6 +17,8 @@ using _string_view_char_type = LEXY_DECAY_DECLTYPE(*LEXY_DECLVAL(View).data()); template class string_input { + static_assert(lexy::is_char_encoding); + public: using encoding = Encoding; using char_type = typename encoding::char_type; diff --git a/3rdparty/lexy/include/lexy/input_location.hpp b/3rdparty/lexy/include/lexy/input_location.hpp index e6c14cf2d..983d2aa02 100644 --- a/3rdparty/lexy/include/lexy/input_location.hpp +++ b/3rdparty/lexy/include/lexy/input_location.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_INPUT_LOCATION_HPP_INCLUDED @@ -16,18 +16,18 @@ namespace lexy template struct input_location_anchor { - using iterator = typename lexy::input_reader::iterator; + using marker = typename lexy::input_reader::marker; constexpr explicit input_location_anchor(const Input& input) - : _line_begin(input.reader().position()), _line_nr(1) + : _line_begin(input.reader().current()), _line_nr(1) {} // implementation detail - constexpr explicit input_location_anchor(iterator line_begin, unsigned line_nr) + constexpr explicit input_location_anchor(marker line_begin, unsigned line_nr) : _line_begin(line_begin), _line_nr(line_nr) {} - iterator _line_begin; + marker _line_begin; unsigned _line_nr; }; } // namespace lexy @@ -42,12 +42,14 @@ class code_unit_location_counting template constexpr bool try_match_newline(Reader& reader) { + static_assert(lexy::is_char_encoding); return lexy::try_match_token(lexy::dsl::newline, reader); } template constexpr void match_column(Reader& reader) { + static_assert(lexy::is_char_encoding); reader.bump(); } }; @@ -59,12 +61,14 @@ class code_point_location_counting template constexpr bool try_match_newline(Reader& reader) { + static_assert(lexy::is_char_encoding); return lexy::try_match_token(lexy::dsl::newline, reader); } template constexpr void match_column(Reader& reader) { + static_assert(lexy::is_char_encoding); if (!lexy::try_match_token(lexy::dsl::code_point, reader)) reader.bump(); } @@ -78,6 +82,7 @@ class byte_location_counting template constexpr bool try_match_newline(Reader& reader) { + static_assert(lexy::is_byte_encoding); LEXY_PRECONDITION(_cur_index <= LineWidth - 1); if (_cur_index == LineWidth - 1) { @@ -98,7 +103,7 @@ class byte_location_counting template constexpr void match_column(Reader& reader) { - static_assert(std::is_same_v); + static_assert(lexy::is_byte_encoding); reader.bump(); ++_cur_index; @@ -109,9 +114,20 @@ class byte_location_counting }; template -using _default_location_counting = std::conditional_t< - std::is_same_v::encoding, lexy::byte_encoding>, - byte_location_counting<>, code_unit_location_counting>; +auto _compute_default_location_counting() +{ + using encoding = typename lexy::input_reader::encoding; + if constexpr (lexy::is_byte_encoding) + return byte_location_counting{}; + else if constexpr (lexy::is_char_encoding) + return code_unit_location_counting{}; + else + static_assert(_detail::error, + "input encoding does not have a default location counting policy"); +} + +template +using _default_location_counting = decltype(_compute_default_location_counting()); } // namespace lexy //=== input_location ===// @@ -122,10 +138,12 @@ template > class input_location { using iterator = typename lexy::input_reader::iterator; + using marker = typename lexy::input_reader::marker; public: constexpr explicit input_location(const Input& input) - : _line_begin(input.reader().position()), _column_begin(_line_begin), _line_nr(1), _column_nr(1) + : _line_begin(input.reader().current()), _column_begin(_line_begin.position()), _line_nr(1), + _column_nr(1) {} /// The closest previous anchor. @@ -162,7 +180,7 @@ class input_location { if (lhs._line_nr != rhs._line_nr) return lhs._line_nr < rhs._line_nr; - return lhs._column_nr < rhs._colum_nr; + return lhs._column_nr < rhs._column_nr; } friend constexpr bool operator<=(const input_location& lhs, const input_location& rhs) { @@ -178,12 +196,13 @@ class input_location } private: - constexpr input_location(iterator line_begin, unsigned line_nr, iterator column_begin, + constexpr input_location(marker line_begin, unsigned line_nr, iterator column_begin, unsigned column_nr) : _line_begin(line_begin), _column_begin(column_begin), _line_nr(line_nr), _column_nr(column_nr) {} - iterator _line_begin, _column_begin; + marker _line_begin; + iterator _column_begin; unsigned _line_nr, _column_nr; template @@ -201,7 +220,7 @@ constexpr auto get_input_location(const Input& i -> input_location { auto reader = input.reader(); - reader.set_position(anchor._line_begin); + reader.reset(anchor._line_begin); auto line_begin = anchor._line_begin; auto line_nr = anchor._line_nr; @@ -227,8 +246,10 @@ constexpr auto get_input_location(const Input& i else if (counting.try_match_newline(reader)) { // [column_begin, newline_end) covers the newline. - auto newline_end = reader.position(); - if (lexy::_detail::min_range_end(column_begin, newline_end, position) != newline_end) + auto newline_end = reader.current(); + if (lexy::_detail::min_range_end(column_begin.position(), newline_end.position(), + position) + != newline_end.position()) break; // Advance to the next line. @@ -242,8 +263,10 @@ constexpr auto get_input_location(const Input& i counting.match_column(reader); // [column_begin, column_end) covers the column. - auto column_end = reader.position(); - if (lexy::_detail::min_range_end(column_begin, column_end, position) != column_end) + auto column_end = reader.current(); + if (lexy::_detail::min_range_end(column_begin.position(), column_end.position(), + position) + != column_end.position()) break; // Advance to the next column. @@ -252,7 +275,7 @@ constexpr auto get_input_location(const Input& i } } - return {line_begin, line_nr, column_begin, column_nr}; + return {line_begin, line_nr, column_begin.position(), column_nr}; } template @@ -281,11 +304,11 @@ constexpr auto get_input_location(const Input& i namespace lexy::_detail { template -constexpr auto get_input_line(const Input& input, - typename lexy::input_reader::iterator line_begin) +constexpr auto get_input_line(const Input& input, + typename lexy::input_reader::marker line_begin) { auto reader = input.reader(); - reader.set_position(line_begin); + reader.reset(line_begin); auto line_end = reader.position(); for (Counting counting; @@ -301,7 +324,7 @@ constexpr auto get_input_line(const Input& input lexy::lexeme_for line; lexy::lexeme_for newline; }; - return result_t{{line_begin, line_end}, {line_end, newline_end}}; + return result_t{{line_begin.position(), line_end}, {line_end, newline_end}}; } // Advances the iterator to the beginning of the next code point. diff --git a/3rdparty/lexy/include/lexy/lexeme.hpp b/3rdparty/lexy/include/lexy/lexeme.hpp index 09d274a27..69d609854 100644 --- a/3rdparty/lexy/include/lexy/lexeme.hpp +++ b/3rdparty/lexy/include/lexy/lexeme.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_LEXEME_HPP_INCLUDED @@ -17,8 +17,8 @@ class lexeme { public: using encoding = typename Reader::encoding; - using char_type = typename encoding::char_type; using iterator = typename Reader::iterator; + using char_type = LEXY_DECAY_DECLTYPE(*LEXY_DECLVAL(iterator&)); constexpr lexeme() noexcept : _begin(), _end() {} constexpr lexeme(iterator begin, iterator end) noexcept : _begin(begin), _end(end) {} @@ -30,6 +30,13 @@ class lexeme : _begin(begin), _end(reader.position()) {} + template >> + constexpr operator lexeme() const noexcept + { + return lexeme(this->begin(), this->end()); + } + constexpr bool empty() const noexcept { return _begin == _end; diff --git a/3rdparty/lexy/include/lexy/parse_tree.hpp b/3rdparty/lexy/include/lexy/parse_tree.hpp index 403c6b4d0..b82efb1bd 100644 --- a/3rdparty/lexy/include/lexy/parse_tree.hpp +++ b/3rdparty/lexy/include/lexy/parse_tree.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_PARSE_TREE_HPP_INCLUDED @@ -11,6 +11,12 @@ #include #include +namespace lexy +{ +template +struct parse_tree_input_traits; +} + //=== internal: pt_node ===// namespace lexy::_detail { @@ -125,7 +131,8 @@ struct pt_node_token : pt_node { if constexpr (_optimize_end) { - static_assert(sizeof(pt_node_token) == 3 * sizeof(void*)); + static_assert(!std::is_pointer_v + || sizeof(pt_node_token) == 3 * sizeof(void*)); auto size = std::size_t(end - begin); LEXY_PRECONDITION(size <= UINT_LEAST32_MAX); @@ -133,7 +140,8 @@ struct pt_node_token : pt_node } else { - static_assert(sizeof(pt_node_token) <= 4 * sizeof(void*)); + static_assert(!std::is_pointer_v + || sizeof(pt_node_token) <= 4 * sizeof(void*)); end_impl = end; } @@ -327,9 +335,16 @@ class pt_buffer //=== parse_tree ===// namespace lexy { +template +class _pt_node_kind; +template +class _pt_node; + template class parse_tree { + static_assert(lexy::is_char_encoding); + public: //=== construction ===// class builder; @@ -363,8 +378,8 @@ class parse_tree } //=== node access ===// - class node; - class node_kind; + using node_kind = _pt_node_kind; + using node = _pt_node; node root() const noexcept { @@ -390,7 +405,11 @@ class parse_tree //=== remaining input ===// lexy::lexeme remaining_input() const noexcept { - return _remaining_input; + if (empty()) + return {}; + + auto token = _root->next_node()->as_token(); + return {token->begin, token->end()}; } private: @@ -398,7 +417,6 @@ class parse_tree _detail::pt_node_production* _root; std::size_t _size; std::size_t _depth; - lexy::lexeme _remaining_input; }; template @@ -542,12 +560,29 @@ class parse_tree::builder } explicit builder(production_info production) : builder(parse_tree(), production) {} - parse_tree&& finish(lexy::lexeme remaining_input = {}) && + [[deprecated("Pass the remaining input, or `input.end()` if there is none.")]] parse_tree&& + finish() && + { + return LEXY_MOV(*this).finish(lexy::lexeme()); + } + parse_tree&& finish(typename Reader::iterator end) && + { + return LEXY_MOV(*this).finish({end, end}); + } + parse_tree&& finish(lexy::lexeme remaining_input) && { LEXY_PRECONDITION(_cur.prod == _result._root); + _cur.insert_children_into(_cur.prod); _cur.update_size_depth(_result._size, _result._depth); - _result._remaining_input = remaining_input; + + _result._buffer.reserve(sizeof(_detail::pt_node_token)); + auto node = _result._buffer + .template allocate<_detail::pt_node_token>(lexy::eof_token_kind, + remaining_input.begin(), + remaining_input.end()); + _result._root->set_sibling(node); + return LEXY_MOV(_result); } @@ -712,13 +747,19 @@ class parse_tree::builder } } + //=== accessors ===// + std::size_t current_child_count() const noexcept + { + return _cur.child_count; + } + private: parse_tree _result; marker _cur; }; -template -class parse_tree::node_kind +template +class _pt_node_kind { public: bool is_token() const noexcept @@ -732,8 +773,10 @@ class parse_tree::node_kind bool is_root() const noexcept { - // Root node has no next node. - return _ptr->next_node() == nullptr; + // Root node has a next node (the remaining input node) which has no next node. + // We assume that _ptr is never the remaining input node, so we know that we have a next + // node. + return _ptr->next_node()->next_node() == nullptr; } bool is_token_production() const noexcept { @@ -753,65 +796,65 @@ class parse_tree::node_kind } } - friend bool operator==(node_kind lhs, node_kind rhs) + friend bool operator==(_pt_node_kind lhs, _pt_node_kind rhs) { if (lhs.is_token() && rhs.is_token()) return lhs._ptr->as_token()->kind == rhs._ptr->as_token()->kind; else return lhs._ptr->as_production()->id == rhs._ptr->as_production()->id; } - friend bool operator!=(node_kind lhs, node_kind rhs) + friend bool operator!=(_pt_node_kind lhs, _pt_node_kind rhs) { return !(lhs == rhs); } - friend bool operator==(node_kind nk, token_kind tk) + friend bool operator==(_pt_node_kind nk, token_kind tk) { if (auto token = nk._ptr->as_token()) return token_kind::from_raw(token->kind) == tk; else return false; } - friend bool operator==(token_kind tk, node_kind nk) + friend bool operator==(token_kind tk, _pt_node_kind nk) { return nk == tk; } - friend bool operator!=(node_kind nk, token_kind tk) + friend bool operator!=(_pt_node_kind nk, token_kind tk) { return !(nk == tk); } - friend bool operator!=(token_kind tk, node_kind nk) + friend bool operator!=(token_kind tk, _pt_node_kind nk) { return !(nk == tk); } - friend bool operator==(node_kind nk, production_info info) + friend bool operator==(_pt_node_kind nk, production_info info) { return nk.is_production() && nk._ptr->as_production()->id == info.id; } - friend bool operator==(production_info info, node_kind nk) + friend bool operator==(production_info info, _pt_node_kind nk) { return nk == info; } - friend bool operator!=(node_kind nk, production_info info) + friend bool operator!=(_pt_node_kind nk, production_info info) { return !(nk == info); } - friend bool operator!=(production_info info, node_kind nk) + friend bool operator!=(production_info info, _pt_node_kind nk) { return !(nk == info); } private: - explicit node_kind(_detail::pt_node* ptr) : _ptr(ptr) {} + explicit _pt_node_kind(_detail::pt_node* ptr) : _ptr(ptr) {} _detail::pt_node* _ptr; - friend parse_tree::node; + friend _pt_node; }; -template -class parse_tree::node +template +class _pt_node { public: void* address() const noexcept @@ -821,7 +864,7 @@ class parse_tree::node auto kind() const noexcept { - return node_kind(_ptr); + return _pt_node_kind(_ptr); } auto parent() const noexcept @@ -834,20 +877,20 @@ class parse_tree::node auto cur = _ptr; while (cur->next_role() == _detail::pt_node::role_sibling) cur = cur->next_node(); - return node(cur->next_node()); + return _pt_node(cur->next_node()); } class children_range { public: - class iterator : public _detail::forward_iterator_base + class iterator : public _detail::forward_iterator_base { public: iterator() noexcept : _cur(nullptr) {} - node deref() const noexcept + auto deref() const noexcept { - return node(_cur); + return _pt_node(_cur); } void increment() noexcept @@ -903,7 +946,7 @@ class parse_tree::node _detail::pt_node* _node; - friend node; + friend _pt_node; }; auto children() const noexcept @@ -914,14 +957,14 @@ class parse_tree::node class sibling_range { public: - class iterator : public _detail::forward_iterator_base + class iterator : public _detail::forward_iterator_base { public: iterator() noexcept : _cur() {} - node deref() const noexcept + auto deref() const noexcept { - return node(_cur); + return _pt_node(_cur); } void increment() noexcept @@ -969,7 +1012,7 @@ class parse_tree::node _detail::pt_node* _node; - friend node; + friend _pt_node; }; auto siblings() const noexcept @@ -983,6 +1026,19 @@ class parse_tree::node return _ptr->next_role() == _detail::pt_node::role_parent; } + auto position() const noexcept -> typename Reader::iterator + { + // Find the first descendant that is a token. + auto cur = _ptr; + while (cur->type() == _detail::pt_node::type_production) + { + cur = cur->as_production()->first_child(); + LEXY_PRECONDITION(cur); + } + + return cur->as_token()->begin; + } + auto lexeme() const noexcept { if (auto token = _ptr->as_token()) @@ -991,6 +1047,28 @@ class parse_tree::node return lexy::lexeme(); } + auto covering_lexeme() const noexcept + { + if (auto token = _ptr->as_token()) + return lexy::lexeme(token->begin, token->end()); + + auto begin = position(); + + auto sibling = _ptr; + while (true) + { + auto next_role = sibling->next_role(); + sibling = sibling->next_node(); + // If we went to parent, we need to continue finding siblings. + if (next_role == _detail::pt_node::role_sibling) + break; + } + auto end = _pt_node(sibling).position(); + + LEXY_PRECONDITION(begin == end || end != typename Reader::iterator()); + return lexy::lexeme(begin, end); + } + auto token() const noexcept { LEXY_PRECONDITION(kind().is_token()); @@ -1000,21 +1078,22 @@ class parse_tree::node return lexy::token(kind, token->begin, token->end()); } - friend bool operator==(node lhs, node rhs) noexcept + friend bool operator==(_pt_node lhs, _pt_node rhs) noexcept { return lhs._ptr == rhs._ptr; } - friend bool operator!=(node lhs, node rhs) noexcept + friend bool operator!=(_pt_node lhs, _pt_node rhs) noexcept { return lhs._ptr != rhs._ptr; } private: - explicit node(_detail::pt_node* ptr) noexcept : _ptr(ptr) {} + explicit _pt_node(_detail::pt_node* ptr) noexcept : _ptr(ptr) {} _detail::pt_node* _ptr; - friend parse_tree; + friend parse_tree; + friend parse_tree_input_traits<_pt_node>; }; enum class traverse_event @@ -1143,5 +1222,70 @@ class parse_tree::traverse_range }; } // namespace lexy +#if LEXY_EXPERIMENTAL +namespace lexy +{ +template +struct parse_tree_input_traits<_pt_node> +{ + using _node = _pt_node; + + using char_encoding = typename Reader::encoding; + + static bool is_null(_node cur) noexcept + { + return cur._ptr == nullptr; + } + + static _node null() noexcept + { + return _node(nullptr); + } + + static _node first_child(_node cur) noexcept + { + LEXY_PRECONDITION(!is_null(cur)); + if (auto prod = cur._ptr->as_production()) + return _node(prod->first_child()); + else + return _node(nullptr); + } + + static _node sibling(_node cur) noexcept + { + LEXY_PRECONDITION(!is_null(cur)); + return cur._ptr->next_role() == _detail::pt_node::role_sibling + ? _node(cur._ptr->next_node()) + : _node(nullptr); + } + + template + static bool has_kind(_node cur, const Kind& kind) noexcept + { + return !is_null(cur) && cur.kind() == kind; + } + + using iterator = typename Reader::iterator; + + static iterator position_begin(_node cur) noexcept + { + LEXY_PRECONDITION(!is_null(cur)); + return cur.position(); + } + static iterator position_end(_node cur) noexcept + { + LEXY_PRECONDITION(!is_null(cur)); + return cur.covering_lexeme().end(); + } + + static auto lexeme(_node cur) noexcept + { + LEXY_PRECONDITION(!is_null(cur)); + return cur.lexeme(); + } +}; +} // namespace lexy +#endif + #endif // LEXY_PARSE_TREE_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy/token.hpp b/3rdparty/lexy/include/lexy/token.hpp index 3ea1532b8..a7b1442e5 100644 --- a/3rdparty/lexy/include/lexy/token.hpp +++ b/3rdparty/lexy/include/lexy/token.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_TOKEN_HPP_INCLUDED @@ -88,9 +88,6 @@ inline constexpr auto token_kind_map_for = token_kind_map; namespace lexy { -template -using _detect_token_kind_name = decltype(token_kind_name(TokenKind{})); - template constexpr auto _has_special_token_kind = [] { using kind = LEXY_DECAY_DECLTYPE(lexy::token_kind_of); @@ -174,12 +171,13 @@ class token_kind constexpr const char* name() const noexcept { if (is_predefined()) - return _kind_name(static_cast(_value)); - else if constexpr (lexy::_detail::is_detected<_detect_token_kind_name, TokenKind>) - return token_kind_name(get()); // ADL + return token_kind_name(static_cast(_value)); else - // We only have a generic name. - return "token"; + return token_kind_name(get()); // ADL + } + friend constexpr const char* token_kind_name(token_kind kind) noexcept + { + return kind.name(); } constexpr _underlying_type get() const noexcept diff --git a/3rdparty/lexy/include/lexy/visualize.hpp b/3rdparty/lexy/include/lexy/visualize.hpp index 8b0b6f467..42b3fb0be 100644 --- a/3rdparty/lexy/include/lexy/visualize.hpp +++ b/3rdparty/lexy/include/lexy/visualize.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_VISUALIZE_HPP_INCLUDED @@ -363,32 +363,7 @@ OutputIt visualize_to(OutputIt out, lexy::lexeme lexeme, }; using encoding = typename Reader::encoding; - if constexpr (std::is_same_v // - || std::is_same_v) - { - auto count = 0u; - for (char c : lexeme) - { - // If the character is in fact ASCII, visualize the code point. - // Otherwise, visualize as byte. - if (lexy::_detail::is_ascii(c)) - out = visualize_to(out, lexy::code_point(static_cast(c)), opts); - else - out = write_escaped_byte(out, static_cast(c)); - - ++count; - if (count == opts.max_lexeme_width) - { - out = _detail::write_ellipsis(out, opts); - break; - } - } - return out; - } - else if constexpr (std::is_same_v // - || std::is_same_v // - || std::is_same_v // - || std::is_same_v) + if constexpr (lexy::is_unicode_encoding) { // Parse the individual code points, and write them out. lexy::range_input input(lexeme.begin(), lexeme.end()); @@ -406,7 +381,7 @@ OutputIt visualize_to(OutputIt out, lexy::lexeme lexeme, else if (result.error == lexy::_detail::cp_error::success) { // Consume and visualize. - reader.set_position(result.end); + reader.reset(result.end); out = visualize_to(out, lexy::code_point(result.cp), opts); } else @@ -461,7 +436,28 @@ OutputIt visualize_to(OutputIt out, lexy::lexeme lexeme, } return out; } - else if constexpr (std::is_same_v) + else if constexpr (lexy::is_text_encoding) + { + auto count = 0u; + for (char c : lexeme) + { + // If the character is in fact ASCII, visualize the code point. + // Otherwise, visualize as byte. + if (lexy::_detail::is_ascii(c)) + out = visualize_to(out, lexy::code_point(static_cast(c)), opts); + else + out = write_escaped_byte(out, static_cast(c)); + + ++count; + if (count == opts.max_lexeme_width) + { + out = _detail::write_ellipsis(out, opts); + break; + } + } + return out; + } + else if constexpr (lexy::is_byte_encoding) { auto count = 0u; for (auto iter = lexeme.begin(); iter != lexeme.end(); ++iter) @@ -480,6 +476,13 @@ OutputIt visualize_to(OutputIt out, lexy::lexeme lexeme, } return out; } + else if constexpr (lexy::is_node_encoding) + { + // Visualize as an iterator range of characters. + lexy::range_input + input(lexeme.begin(), lexeme.end()); + return visualize_to(out, lexy::lexeme_for(input.begin(), input.end())); + } else { static_assert(lexy::_detail::error, "unknown encoding"); @@ -622,6 +625,41 @@ struct cfile_output_iterator } }; +struct stderr_output_iterator +{ + auto operator*() const noexcept + { + return *this; + } + auto operator++(int) const noexcept + { + return *this; + } + + stderr_output_iterator& operator=(char c) + { + std::fputc(c, stderr); + return *this; + } +}; +struct stdout_output_iterator +{ + auto operator*() const noexcept + { + return *this; + } + auto operator++(int) const noexcept + { + return *this; + } + + stdout_output_iterator& operator=(char c) + { + std::fputc(c, stdout); + return *this; + } +}; + /// Writes the visualization to the FILE. template void visualize(std::FILE* file, const T& obj, visualization_options opts = {}) diff --git a/3rdparty/lexy/include/lexy_ext/compiler_explorer.hpp b/3rdparty/lexy/include/lexy_ext/compiler_explorer.hpp index 588a456ea..220adf4ab 100644 --- a/3rdparty/lexy/include/lexy_ext/compiler_explorer.hpp +++ b/3rdparty/lexy/include/lexy_ext/compiler_explorer.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_EXT_COMPILER_EXPLORER_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy_ext/parse_tree_algorithm.hpp b/3rdparty/lexy/include/lexy_ext/parse_tree_algorithm.hpp index 9009ee08e..f8242fab6 100644 --- a/3rdparty/lexy/include/lexy_ext/parse_tree_algorithm.hpp +++ b/3rdparty/lexy/include/lexy_ext/parse_tree_algorithm.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_EXT_PARSE_TREE_ALGORITHM_HPP_INCLUDED @@ -216,7 +216,7 @@ class _filtered_node_range template _filtered_node_range(Predicate&& pred, Iterator begin, Sentinel end) noexcept - ->_filtered_node_range, Iterator, Sentinel>; + -> _filtered_node_range, Iterator, Sentinel>; /// Returns the children that of node that match the predicate. /// diff --git a/3rdparty/lexy/include/lexy_ext/parse_tree_doctest.hpp b/3rdparty/lexy/include/lexy_ext/parse_tree_doctest.hpp index 51c1d1f29..18b12dead 100644 --- a/3rdparty/lexy/include/lexy_ext/parse_tree_doctest.hpp +++ b/3rdparty/lexy/include/lexy_ext/parse_tree_doctest.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_EXT_PARSE_TREE_DOCTEST_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy_ext/report_error.hpp b/3rdparty/lexy/include/lexy_ext/report_error.hpp index fc04e10cf..0ee6c32a2 100644 --- a/3rdparty/lexy/include/lexy_ext/report_error.hpp +++ b/3rdparty/lexy/include/lexy_ext/report_error.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_EXT_REPORT_ERROR_HPP_INCLUDED @@ -317,7 +317,7 @@ OutputIt write_error(OutputIt out, const lexy::error_context& context, namespace lexy_ext { -template +template struct _report_error { OutputIterator _iter; @@ -337,18 +337,14 @@ struct _report_error void operator()(const lexy::error_context& context, const lexy::error& error) { - if constexpr (std::is_same_v) - _detail::write_error(lexy::cfile_output_iterator{stderr}, context, error, _opts, - _path); - else - _iter = _detail::write_error(_iter, context, error, _opts, _path); + _iter = _detail::write_error(_iter, context, error, _opts, _path); ++_count; } std::size_t finish() && { if (_count != 0) - std::fputs("\n", stderr); + *_iter++ = '\n'; return _count; } }; @@ -378,7 +374,7 @@ struct _report_error }; /// An error callback that uses diagnostic_writer to print to stderr (by default). -constexpr auto report_error = _report_error<>{}; +constexpr auto report_error = _report_error{}; } // namespace lexy_ext #endif // LEXY_EXT_REPORT_ERROR_HPP_INCLUDED diff --git a/3rdparty/lexy/include/lexy_ext/shell.hpp b/3rdparty/lexy/include/lexy_ext/shell.hpp index 6fe81ee01..66865bbda 100644 --- a/3rdparty/lexy/include/lexy_ext/shell.hpp +++ b/3rdparty/lexy/include/lexy_ext/shell.hpp @@ -1,4 +1,4 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 #ifndef LEXY_EXT_SHELL_HPP_INCLUDED @@ -168,6 +168,16 @@ class shell using encoding = typename Prompt::encoding; using iterator = typename lexy::_detail::buffer_builder::stable_iterator; + struct marker + { + iterator _it; + + constexpr iterator position() const noexcept + { + return _it; + } + }; + auto reader() const& { return *this; @@ -191,9 +201,13 @@ class shell return iterator(_shell->_buffer, _idx); } - void set_position(iterator new_pos) noexcept + marker current() const noexcept + { + return {position()}; + } + void reset(marker m) noexcept { - _idx = new_pos.index(); + _idx = m._it.index(); } private: diff --git a/3rdparty/lexy/src/CMakeLists.txt b/3rdparty/lexy/src/CMakeLists.txt index 6f07f43db..709cc831f 100644 --- a/3rdparty/lexy/src/CMakeLists.txt +++ b/3rdparty/lexy/src/CMakeLists.txt @@ -1,4 +1,4 @@ -# Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +# Copyright (C) 2020-2024 Jonathan Müller and lexy contributors # SPDX-License-Identifier: BSL-1.0 get_filename_component(include_dir ${CMAKE_CURRENT_SOURCE_DIR}/../include/lexy ABSOLUTE) @@ -80,6 +80,7 @@ set(header_files ${include_dir}/dsl/option.hpp ${include_dir}/dsl/operator.hpp ${include_dir}/dsl/parse_as.hpp + ${include_dir}/dsl/parse_tree_node.hpp ${include_dir}/dsl/peek.hpp ${include_dir}/dsl/position.hpp ${include_dir}/dsl/production.hpp @@ -105,6 +106,7 @@ set(header_files ${include_dir}/input/buffer.hpp ${include_dir}/input/file.hpp ${include_dir}/input/lexeme_input.hpp + ${include_dir}/input/parse_tree_input.hpp ${include_dir}/input/range_input.hpp ${include_dir}/input/string_input.hpp @@ -191,7 +193,7 @@ elseif(${CMAKE_CXX_COMPILER_ID} STREQUAL "GNU") # GCC's arry bounds, maybe uninitialized, and restrict warning seems to have false positives. target_compile_options(lexy_dev INTERFACE -Wno-array-bounds -Wno-maybe-uninitialized -Wno-restrict) elseif(MSVC) - target_compile_options(lexy_dev INTERFACE /WX /W3 /D _CRT_SECURE_NO_WARNINGS /wd5105) + target_compile_options(lexy_dev INTERFACE /WX /W3 /D _CRT_SECURE_NO_WARNINGS /wd5105 /utf-8) endif() # Link to have FILE I/O. @@ -214,3 +216,9 @@ target_sources(lexy_ext INTERFACE ${ext_headers_files}) add_library(lexy INTERFACE) add_alias(lexy lexy) target_link_libraries(lexy INTERFACE foonathan::lexy::core foonathan::lexy::file foonathan::lexy::unicode foonathan::lexy::ext) + +# Link to enable experimental features. +add_library(lexy_experimental INTERFACE) +add_alias(lexy::experimental lexy_experimental) +target_compile_definitions(lexy_experimental INTERFACE LEXY_EXPERIMENTAL=1) + diff --git a/3rdparty/lexy/src/input/file.cpp b/3rdparty/lexy/src/input/file.cpp index 655473ebc..028fb6de3 100644 --- a/3rdparty/lexy/src/input/file.cpp +++ b/3rdparty/lexy/src/input/file.cpp @@ -1,6 +1,7 @@ -// Copyright (C) 2020-2023 Jonathan Müller and lexy contributors +// Copyright (C) 2020-2024 Jonathan Müller and lexy contributors // SPDX-License-Identifier: BSL-1.0 +#include #include #include @@ -56,8 +57,8 @@ lexy::file_error get_file_error() noexcept } } -constexpr std::size_t small_file_size = 4 * 1024; -constexpr std::size_t medium_file_size = 32 * 1024; +constexpr std::size_t small_file_size = std::size_t(4) * 1024; +constexpr std::size_t medium_file_size = std::size_t(32) * 1024; } // namespace lexy::file_error lexy::_detail::read_file(const char* path, file_callback cb, void* user_data) diff --git a/3rdparty/tinyxml2/tinyxml2.cpp b/3rdparty/tinyxml2/tinyxml2.cpp index 31925d964..c5c487010 100755 --- a/3rdparty/tinyxml2/tinyxml2.cpp +++ b/3rdparty/tinyxml2/tinyxml2.cpp @@ -103,12 +103,17 @@ distribution. #if defined(_WIN64) #define TIXML_FSEEK _fseeki64 #define TIXML_FTELL _ftelli64 -#elif defined(__APPLE__) || defined(__FreeBSD__) || defined(__ANDROID__) +#elif defined(__APPLE__) || defined(__FreeBSD__) || defined(__OpenBSD__) || defined(__NetBSD__) || defined(__DragonFly__) || defined(__CYGWIN__) #define TIXML_FSEEK fseeko #define TIXML_FTELL ftello -#elif defined(__unix__) && defined(__x86_64__) - #define TIXML_FSEEK fseeko64 - #define TIXML_FTELL ftello64 +#elif defined(__ANDROID__) + #if __ANDROID_API__ > 24 + #define TIXML_FSEEK fseeko64 + #define TIXML_FTELL ftello64 + #else + #define TIXML_FSEEK fseeko + #define TIXML_FTELL ftello + #endif #else #define TIXML_FSEEK fseek #define TIXML_FTELL ftell @@ -707,7 +712,7 @@ bool XMLUtil::ToUnsigned64(const char* str, uint64_t* value) { } -char* XMLDocument::Identify( char* p, XMLNode** node ) +char* XMLDocument::Identify( char* p, XMLNode** node, bool first ) { TIXMLASSERT( node ); TIXMLASSERT( p ); @@ -759,9 +764,19 @@ char* XMLDocument::Identify( char* p, XMLNode** node ) p += dtdHeaderLen; } else if ( XMLUtil::StringEqual( p, elementHeader, elementHeaderLen ) ) { - returnNode = CreateUnlinkedNode( _elementPool ); - returnNode->_parseLineNum = _parseCurLineNum; - p += elementHeaderLen; + + // Preserve whitespace pedantically before closing tag, when it's immediately after opening tag + if (WhitespaceMode() == PEDANTIC_WHITESPACE && first && p != start && *(p + elementHeaderLen) == '/') { + returnNode = CreateUnlinkedNode(_textPool); + returnNode->_parseLineNum = startLine; + p = start; // Back it up, all the text counts. + _parseCurLineNum = startLine; + } + else { + returnNode = CreateUnlinkedNode(_elementPool); + returnNode->_parseLineNum = _parseCurLineNum; + p += elementHeaderLen; + } } else { returnNode = CreateUnlinkedNode( _textPool ); @@ -814,6 +829,34 @@ XMLNode::~XMLNode() } } +// ChildElementCount was originally suggested by msteiger on the sourceforge page for TinyXML and modified by KB1SPH for TinyXML-2. + +int XMLNode::ChildElementCount(const char *value) const { + int count = 0; + + const XMLElement *e = FirstChildElement(value); + + while (e) { + e = e->NextSiblingElement(value); + count++; + } + + return count; +} + +int XMLNode::ChildElementCount() const { + int count = 0; + + const XMLElement *e = FirstChildElement(); + + while (e) { + e = e->NextSiblingElement(); + count++; + } + + return count; +} + const char* XMLNode::Value() const { // Edge case: XMLDocuments don't have a Value. Return null. @@ -1062,21 +1105,23 @@ char* XMLNode::ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ) if (_document->Error()) return 0; + bool first = true; while( p && *p ) { XMLNode* node = 0; - p = _document->Identify( p, &node ); + p = _document->Identify( p, &node, first ); TIXMLASSERT( p ); if ( node == 0 ) { break; } + first = false; const int initialLineNum = node->_parseLineNum; StrPair endTag; p = node->ParseDeep( p, &endTag, curLineNumPtr ); if ( !p ) { - DeleteNode( node ); + _document->DeleteNode( node ); if ( !_document->Error() ) { _document->SetError( XML_ERROR_PARSING, initialLineNum, 0); } @@ -1109,7 +1154,7 @@ char* XMLNode::ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ) } if ( !wellLocated ) { _document->SetError( XML_ERROR_PARSING_DECLARATION, initialLineNum, "XMLDeclaration value=%s", decl->Value()); - DeleteNode( node ); + _document->DeleteNode( node ); break; } } @@ -1144,7 +1189,7 @@ char* XMLNode::ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ) } if ( mismatch ) { _document->SetError( XML_ERROR_MISMATCHED_ELEMENT, initialLineNum, "XMLElement name=%s", ele->Name()); - DeleteNode( node ); + _document->DeleteNode( node ); break; } } @@ -1776,11 +1821,11 @@ XMLError XMLElement::QueryInt64Text(int64_t* ival) const } -XMLError XMLElement::QueryUnsigned64Text(uint64_t* ival) const +XMLError XMLElement::QueryUnsigned64Text(uint64_t* uval) const { if(FirstChild() && FirstChild()->ToText()) { const char* t = FirstChild()->Value(); - if(XMLUtil::ToUnsigned64(t, ival)) { + if(XMLUtil::ToUnsigned64(t, uval)) { return XML_SUCCESS; } return XML_CAN_NOT_CONVERT_TEXT; @@ -2412,21 +2457,21 @@ XMLError XMLDocument::SaveFile( FILE* fp, bool compact ) } -XMLError XMLDocument::Parse( const char* p, size_t len ) +XMLError XMLDocument::Parse( const char* xml, size_t nBytes ) { Clear(); - if ( len == 0 || !p || !*p ) { + if ( nBytes == 0 || !xml || !*xml ) { SetError( XML_ERROR_EMPTY_DOCUMENT, 0, 0 ); return _errorID; } - if ( len == static_cast(-1) ) { - len = strlen( p ); + if ( nBytes == static_cast(-1) ) { + nBytes = strlen( xml ); } TIXMLASSERT( _charBuffer == 0 ); - _charBuffer = new char[ len+1 ]; - memcpy( _charBuffer, p, len ); - _charBuffer[len] = 0; + _charBuffer = new char[ nBytes+1 ]; + memcpy( _charBuffer, xml, nBytes ); + _charBuffer[nBytes] = 0; Parse(); if ( Error() ) { diff --git a/3rdparty/tinyxml2/tinyxml2.h b/3rdparty/tinyxml2/tinyxml2.h index 452ae95bb..7586f7b8d 100755 --- a/3rdparty/tinyxml2/tinyxml2.h +++ b/3rdparty/tinyxml2/tinyxml2.h @@ -42,9 +42,6 @@ distribution. #endif #include -/* - TODO: intern strings instead of allocation. -*/ /* gcc: g++ -Wall -DTINYXML2_DEBUG tinyxml2.cpp xmltest.cpp -o gccxmltest.exe @@ -64,7 +61,7 @@ distribution. # pragma warning(disable: 4251) #endif -#ifdef _WIN32 +#ifdef _MSC_VER # ifdef TINYXML2_EXPORT # define TINYXML2_LIB __declspec(dllexport) # elif defined(TINYXML2_IMPORT) @@ -83,27 +80,27 @@ distribution. #if defined(TINYXML2_DEBUG) # if defined(_MSC_VER) # // "(void)0," is for suppressing C4127 warning in "assert(false)", "assert(true)" and the like -# define TIXMLASSERT( x ) if ( !((void)0,(x))) { __debugbreak(); } +# define TIXMLASSERT( x ) do { if ( !((void)0,(x))) { __debugbreak(); } } while(false) # elif defined (ANDROID_NDK) # include -# define TIXMLASSERT( x ) if ( !(x)) { __android_log_assert( "assert", "grinliz", "ASSERT in '%s' at %d.", __FILE__, __LINE__ ); } +# define TIXMLASSERT( x ) do { if ( !(x)) { __android_log_assert( "assert", "grinliz", "ASSERT in '%s' at %d.", __FILE__, __LINE__ ); } } while(false) # else # include # define TIXMLASSERT assert # endif #else -# define TIXMLASSERT( x ) {} +# define TIXMLASSERT( x ) do {} while(false) #endif #endif /* Versioning, past 1.0.14: http://semver.org/ */ -static const int TIXML2_MAJOR_VERSION = 9; +static const int TIXML2_MAJOR_VERSION = 10; static const int TIXML2_MINOR_VERSION = 0; static const int TIXML2_PATCH_VERSION = 0; -#define TINYXML2_MAJOR_VERSION 9 +#define TINYXML2_MAJOR_VERSION 10 #define TINYXML2_MINOR_VERSION 0 #define TINYXML2_PATCH_VERSION 0 @@ -112,7 +109,7 @@ static const int TIXML2_PATCH_VERSION = 0; // system, and the capacity of the stack. On the other hand, it's a trivial // attack that can result from ill, malicious, or even correctly formed XML, // so there needs to be a limit in place. -static const int TINYXML2_MAX_ELEMENT_DEPTH = 100; +static const int TINYXML2_MAX_ELEMENT_DEPTH = 500; namespace tinyxml2 { @@ -305,9 +302,9 @@ class DynArray if ( cap > _allocated ) { TIXMLASSERT( cap <= INT_MAX / 2 ); const int newAllocated = cap * 2; - T* newMem = new T[newAllocated]; + T* newMem = new T[static_cast(newAllocated)]; TIXMLASSERT( newAllocated >= _size ); - memcpy( newMem, _mem, sizeof(T)*_size ); // warning: not using constructors, only works for PODs + memcpy( newMem, _mem, sizeof(T)*static_cast(_size) ); // warning: not using constructors, only works for PODs if ( _mem != _pool ) { delete [] _mem; } @@ -317,7 +314,7 @@ class DynArray } T* _mem; - T _pool[INITIAL_SIZE]; + T _pool[static_cast(INITIAL_SIZE)]; int _allocated; // objects allocated int _size; // number objects in use }; @@ -365,17 +362,17 @@ class MemPoolT : public MemPool _nUntracked = 0; } - virtual int ItemSize() const { + virtual int ItemSize() const override{ return ITEM_SIZE; } int CurrentAllocs() const { return _currentAllocs; } - virtual void* Alloc() { + virtual void* Alloc() override{ if ( !_root ) { // Need a new block. - Block* block = new Block(); + Block* block = new Block; _blockPtrs.Push( block ); Item* blockItems = block->items; @@ -398,7 +395,7 @@ class MemPoolT : public MemPool return result; } - virtual void Free( void* mem ) { + virtual void Free( void* mem ) override { if ( !mem ) { return; } @@ -416,7 +413,7 @@ class MemPoolT : public MemPool ITEM_SIZE, _nAllocs, _blockPtrs.Size() ); } - void SetTracked() { + void SetTracked() override { --_nUntracked; } @@ -443,7 +440,7 @@ class MemPoolT : public MemPool union Item { Item* next; - char itemData[ITEM_SIZE]; + char itemData[static_cast(ITEM_SIZE)]; }; struct Block { Item items[ITEMS_PER_BLOCK]; @@ -603,7 +600,7 @@ class TINYXML2_LIB XMLUtil TIXMLASSERT( p ); TIXMLASSERT( q ); TIXMLASSERT( nChar >= 0 ); - return strncmp( p, q, nChar ) == 0; + return strncmp( p, q, static_cast(nChar) ) == 0; } inline static bool IsUTF8Continuation( const char p ) { @@ -732,6 +729,12 @@ class TINYXML2_LIB XMLNode return 0; } + // ChildElementCount was originally suggested by msteiger on the sourceforge page for TinyXML and modified by KB1SPH for TinyXML-2. + + int ChildElementCount(const char *value) const; + + int ChildElementCount() const; + /** The meaning of 'value' changes for the specific type. @verbatim Document: empty (NULL is returned, not an empty string) @@ -992,12 +995,12 @@ class TINYXML2_LIB XMLText : public XMLNode { friend class XMLDocument; public: - virtual bool Accept( XMLVisitor* visitor ) const; + virtual bool Accept( XMLVisitor* visitor ) const override; - virtual XMLText* ToText() { + virtual XMLText* ToText() override { return this; } - virtual const XMLText* ToText() const { + virtual const XMLText* ToText() const override { return this; } @@ -1010,14 +1013,14 @@ class TINYXML2_LIB XMLText : public XMLNode return _isCData; } - virtual XMLNode* ShallowClone( XMLDocument* document ) const; - virtual bool ShallowEqual( const XMLNode* compare ) const; + virtual XMLNode* ShallowClone( XMLDocument* document ) const override; + virtual bool ShallowEqual( const XMLNode* compare ) const override; protected: explicit XMLText( XMLDocument* doc ) : XMLNode( doc ), _isCData( false ) {} virtual ~XMLText() {} - char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ); + char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ) override; private: bool _isCData; @@ -1032,23 +1035,23 @@ class TINYXML2_LIB XMLComment : public XMLNode { friend class XMLDocument; public: - virtual XMLComment* ToComment() { + virtual XMLComment* ToComment() override { return this; } - virtual const XMLComment* ToComment() const { + virtual const XMLComment* ToComment() const override { return this; } - virtual bool Accept( XMLVisitor* visitor ) const; + virtual bool Accept( XMLVisitor* visitor ) const override; - virtual XMLNode* ShallowClone( XMLDocument* document ) const; - virtual bool ShallowEqual( const XMLNode* compare ) const; + virtual XMLNode* ShallowClone( XMLDocument* document ) const override; + virtual bool ShallowEqual( const XMLNode* compare ) const override; protected: explicit XMLComment( XMLDocument* doc ); virtual ~XMLComment(); - char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr); + char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr) override; private: XMLComment( const XMLComment& ); // not supported @@ -1071,23 +1074,23 @@ class TINYXML2_LIB XMLDeclaration : public XMLNode { friend class XMLDocument; public: - virtual XMLDeclaration* ToDeclaration() { + virtual XMLDeclaration* ToDeclaration() override { return this; } - virtual const XMLDeclaration* ToDeclaration() const { + virtual const XMLDeclaration* ToDeclaration() const override { return this; } - virtual bool Accept( XMLVisitor* visitor ) const; + virtual bool Accept( XMLVisitor* visitor ) const override; - virtual XMLNode* ShallowClone( XMLDocument* document ) const; - virtual bool ShallowEqual( const XMLNode* compare ) const; + virtual XMLNode* ShallowClone( XMLDocument* document ) const override; + virtual bool ShallowEqual( const XMLNode* compare ) const override; protected: explicit XMLDeclaration( XMLDocument* doc ); virtual ~XMLDeclaration(); - char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ); + char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ) override; private: XMLDeclaration( const XMLDeclaration& ); // not supported @@ -1106,23 +1109,23 @@ class TINYXML2_LIB XMLUnknown : public XMLNode { friend class XMLDocument; public: - virtual XMLUnknown* ToUnknown() { + virtual XMLUnknown* ToUnknown() override { return this; } - virtual const XMLUnknown* ToUnknown() const { + virtual const XMLUnknown* ToUnknown() const override { return this; } - virtual bool Accept( XMLVisitor* visitor ) const; + virtual bool Accept( XMLVisitor* visitor ) const override; - virtual XMLNode* ShallowClone( XMLDocument* document ) const; - virtual bool ShallowEqual( const XMLNode* compare ) const; + virtual XMLNode* ShallowClone( XMLDocument* document ) const override; + virtual bool ShallowEqual( const XMLNode* compare ) const override; protected: explicit XMLUnknown( XMLDocument* doc ); virtual ~XMLUnknown(); - char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ); + char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ) override; private: XMLUnknown( const XMLUnknown& ); // not supported @@ -1274,13 +1277,13 @@ class TINYXML2_LIB XMLElement : public XMLNode SetValue( str, staticMem ); } - virtual XMLElement* ToElement() { + virtual XMLElement* ToElement() override { return this; } - virtual const XMLElement* ToElement() const { + virtual const XMLElement* ToElement() const override { return this; } - virtual bool Accept( XMLVisitor* visitor ) const; + virtual bool Accept( XMLVisitor* visitor ) const override; /** Given an attribute name, Attribute() returns the value for the attribute of that name, or null if none @@ -1676,11 +1679,11 @@ class TINYXML2_LIB XMLElement : public XMLNode ElementClosingType ClosingType() const { return _closingType; } - virtual XMLNode* ShallowClone( XMLDocument* document ) const; - virtual bool ShallowEqual( const XMLNode* compare ) const; + virtual XMLNode* ShallowClone( XMLDocument* document ) const override; + virtual bool ShallowEqual( const XMLNode* compare ) const override; protected: - char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ); + char* ParseDeep( char* p, StrPair* parentEndTag, int* curLineNumPtr ) override; private: XMLElement( XMLDocument* doc ); @@ -1704,7 +1707,8 @@ class TINYXML2_LIB XMLElement : public XMLNode enum Whitespace { PRESERVE_WHITESPACE, - COLLAPSE_WHITESPACE + COLLAPSE_WHITESPACE, + PEDANTIC_WHITESPACE }; @@ -1728,11 +1732,11 @@ class TINYXML2_LIB XMLDocument : public XMLNode XMLDocument( bool processEntities = true, Whitespace whitespaceMode = PRESERVE_WHITESPACE ); ~XMLDocument(); - virtual XMLDocument* ToDocument() { + virtual XMLDocument* ToDocument() override { TIXMLASSERT( this == _document ); return this; } - virtual const XMLDocument* ToDocument() const { + virtual const XMLDocument* ToDocument() const override { TIXMLASSERT( this == _document ); return this; } @@ -1829,7 +1833,7 @@ class TINYXML2_LIB XMLDocument : public XMLNode @endverbatim */ void Print( XMLPrinter* streamer=0 ) const; - virtual bool Accept( XMLVisitor* visitor ) const; + virtual bool Accept( XMLVisitor* visitor ) const override; /** Create a new Element associated with @@ -1915,15 +1919,15 @@ class TINYXML2_LIB XMLDocument : public XMLNode void DeepCopy(XMLDocument* target) const; // internal - char* Identify( char* p, XMLNode** node ); + char* Identify( char* p, XMLNode** node, bool first ); // internal void MarkInUse(const XMLNode* const); - virtual XMLNode* ShallowClone( XMLDocument* /*document*/ ) const { + virtual XMLNode* ShallowClone( XMLDocument* /*document*/ ) const override{ return 0; } - virtual bool ShallowEqual( const XMLNode* /*compare*/ ) const { + virtual bool ShallowEqual( const XMLNode* /*compare*/ ) const override{ return false; } @@ -2286,18 +2290,18 @@ class TINYXML2_LIB XMLPrinter : public XMLVisitor void PushDeclaration( const char* value ); void PushUnknown( const char* value ); - virtual bool VisitEnter( const XMLDocument& /*doc*/ ); - virtual bool VisitExit( const XMLDocument& /*doc*/ ) { + virtual bool VisitEnter( const XMLDocument& /*doc*/ ) override; + virtual bool VisitExit( const XMLDocument& /*doc*/ ) override { return true; } - virtual bool VisitEnter( const XMLElement& element, const XMLAttribute* attribute ); - virtual bool VisitExit( const XMLElement& element ); + virtual bool VisitEnter( const XMLElement& element, const XMLAttribute* attribute ) override; + virtual bool VisitExit( const XMLElement& element ) override; - virtual bool Visit( const XMLText& text ); - virtual bool Visit( const XMLComment& comment ); - virtual bool Visit( const XMLDeclaration& declaration ); - virtual bool Visit( const XMLUnknown& unknown ); + virtual bool Visit( const XMLText& text ) override; + virtual bool Visit( const XMLComment& comment ) override; + virtual bool Visit( const XMLDeclaration& declaration ) override; + virtual bool Visit( const XMLUnknown& unknown ) override; /** If in print to memory mode, return a pointer to diff --git a/CHANGELOG.rst b/CHANGELOG.rst index a9cb4cb0d..600abbaa3 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,6 +2,152 @@ Changelog for package behaviortree_cpp ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +4.7.2 (2025-05-29) +------------------ +* Fix issue `#978 `_ : skipped was not working properly +* Added codespell as a pre-commit hook. (`#977 `_) +* fix: Make impossible to accidentally copy JsonExporter singleton (`#975 `_) +* Contributors: Davide Faconti, Leander Stephen D'Souza, tony-p + +4.7.1 (2025-05-13) +------------------ +* fix ROS CI +* Add action to publish Doxygen documentation as GH Page (`#972 `_) +* Update Doxyfile +* Make BT::Any::copyInto const (`#970 `_) +* more changes related to TestNode +* Contributors: David Sobek, Davide Faconti, Marcus Ebner von Eschenbach + +4.7.0 (2025-04-24) +------------------ +* change TestNodeConfig preferred constructor +* Fix dangling‐capture in TestNodeConfig +* Fix Precondition to only check condition once (`#904 `_) +* fix issue 945 +* extend JSON conversion to include vectors (`#965 `_) +* Fix CI, add BUILD_TESTS and remove catkin support +* Fix testing CMake issue to resolve Rolling regression (`#961 `_) +* Bug fix/set blackboard (`#955 `_) +* feat: add fuzzing harnesses (`#925 `_) +* fix warnings +* Add const to applyVisitor (`#935 `_) +* try fix (`#941 `_) +* add workflow for sonarcube (`#936 `_) +* Fix issue `#909 `_: static queue in Loop +* apply changes suggested in `#893 `_ +* apply fix mentioned in `#916 `_ +* apply fixes suggested in `#919 `_ +* fix issue `#918 `_ (introduced in `#885 `_) +* add fix suggested in `#920 `_ +* add unit test related to `#931 `_ +* Fix compilation error when targeting C++23 (`#926 `_) ^~~~~~~~~~~~~ +* Fixes issue # `#929 `_ and `#921 `_ +* apply check suggested in `#924 `_ +* Fix ROS 2 build when ZeroMQ or SQlite3 include are not in the default include path (`#911 `_) + * Fix ROS 2 build when ZeroMQ or SQlite3 include are not in the default include path + * Update ament_build.cmake +* Fix/use correct compiler pixi/conda (`#914 `_) + * fix: Use the cxx-compiler package which will set the correct compiler for the platform, and setup the required environment for it to work as expected + * misc: update pixi versions in pipeline +* Add "other ports" to NodeConfig (`#910 `_) +* [retry_node] Refresh max_attempts\_ in case it changed (`#905 `_) + Co-authored-by: Guillaume Doisy +* use relative path in .Doxyfile (`#882 `_) +* Additional XML verification for ReactiveSequence nodes (`#885 `_) + Co-authored-by: AndyZe +* fix script parse error while 'A==-1' (`#896 `_) + Co-authored-by: wangzheng +* Expose return value of wait_for (`#887 `_) +* fix(examples): update t11_groot_howto log filename (`#886 `_) +* put minitrace in the build_interface link library (`#874 `_) + fixes the cmake export set when building behavior tree on standard cmake: CMake Error: install(EXPORT "behaviortree_cppTargets" ...) includes target "behaviortree_cpp" which requires target "minitrace" that is not in any export set. +* Improved XML parsing error message to say where in the XML the offending port is found. (`#876 `_) + Example output: + a port with name [ball_pose] is found in the XML (, line 7) but not in the providedPorts() of its registered node type. +* Refactored the TreeNode::executeTick() function to use a scoped timer for performance monitoring. (`#861 `_) (`#863 `_) + Update src/tree_node.cpp + Co-authored-by: wangzheng + Co-authored-by: Davide Faconti +* fix issue `#852 `_: thread safety in Loggers +* Lexy updated +* tinyXML updated to version 10.0 +* cppzmq updated to version 4.10 +* fix the "all_skipped" logic +* fixed: support utf-8 path xml-file (`#845 `_) + * fixed: 1. added compile version check to support Chinese path xml-file parsing 2. cmake add msvc /utf-8 options + * change cmake /utf-8 option add mode +* Export plugins to share directory & register CrossDoor plugin (`#804 `_) +* Contributors: Aglargil, AndyZe, Antoine Hoarau, David Sobek, Davide Faconti, Guillaume Doisy, Isar Meijer, Jake Keller, Marq Rasmussen, Michele Tartari, Silvio Traversaro, Tony Najjar, b-adkins, ckrah, devis12, kinly, tony-p, vincent-hui + +4.6.2 (2024-06-26) +------------------ +* Initialize template variable `T out` (`#839 `_) +* Building with a recent compiler fails due incompatible expected library (`#833 `_) + * nonstd::expected updated to 0.8 +* fix issue `#829 `_: support again custom JSON converters +* fix issue `#834 `_: enable minitrace +* allow multiple instances of the loggers +* fix issue `#827 `_ : verify name +* add TickMonitorCallback +* Fix typo in FallbackNode constructor parameter name (`#830 `_) +* fix segfault and throw instead when manifest is nullptr +* Add in call to ament_export_targets. (`#826 `_) +* Contributors: Davide Faconti, S. Messerschmidt, Sharmin Ramli, avikus-seonghyeon.kwon + +4.6.1 (2024-05-20) +------------------ +* remove flatbuffers from public API and old file_logger +* fix issue `#824 `_: use global in Blackboard::set +* Add test for setting a global blackboard entry using a node's output port `#823 `_ +* examples renamed +* Contributors: Davide Faconti, Robin Müller + +4.6.0 (2024-04-28) +------------------ +* add tutorial 19 about the global blackboard +* renamed examples to match website +* Update TestNode and the corresponding tutorial +* bug fixes related to sequence_id and unit tests added +* Add string concatenation operator to scripting (`#802 `_) +* Add library alias for BT::behaviortree_cpp (`#808 `_) +* add Time Stamped blackboard (`#805 `_) +* add additional information and functionality to SQLiteLogger +* add syntax for entries in the root blackboard ("@" prefix) +* Fix/pixi build (`#791 `_) +* fix unit tests in Windows +* fix windows compilation +* Update cmake_windows.yml +* Deprecate Balckboard::clear(). Issue `#794 `_ +* Support string vector conversion for ports (`#790 `_) +* add more convertToString for integers +* warn about overwritten enums +* fix ambiguous to_json +* Extend unit test for blackboard backup to run the second tree (`#789 `_) +* json conversion changed and +* issue `#755 `_ : add backchaining test and change reactive nodes checks (`#770 `_) +* Update switch_node.h +* test moved and port remapping fixed +* Create pull_request_template.md + +* adding pre-commit +* handle enums conversions is assignment +* Contributors: Davide Faconti, Sean Geles, Sebastian Castro, Victor Massagué Respall, avikus-seonghyeon.kwon, tony-p + +4.5.2 (2024-03-07) +------------------ +* bugfix: string to enum/integer/boolean in scripts +* bug fix in scripting comparison +* added more pretty-prints to demangler +* fixes and checks in default values, based on PR `#773 `_ +* Initialize std::atomic_bool (`#772 `_) +* Fix issue `#767 `_ and `#768 `_ +* updated default port syntax: "{=}" +* new default port capability: blackbard entries +* fix issue `#757 `_ : skipped nodes should not call post-condition ALWAYS +* Merge pull request `#756 `_ from imere/imere-patch-1 +* fix(test): Typo in gtest_blackboard.cpp +* Contributors: Davide Faconti, Lu Z, Marq Rasmussen + 4.5.1 (2024-01-23) ------------------ * Support enums and real numbers in Node Switch @@ -254,7 +400,7 @@ Changelog for package behaviortree_cpp * better include paths * Control node and Decorators RUNNING before first child * blackboard: update getKeys and add mutex to scripting -* add [[nodiscard]] and some othe minor changes +* add [[nodiscard]] and some other minor changes * add screenshot * change the behavior of tickOnce to actually loop is wake up signal is… (`#522 `_) * change the behavior of tickOnce to actually loop is wake up signal is received @@ -353,7 +499,7 @@ Changelog for package behaviortree_cpp dependency explicitly. * Change order of lock to prevent deadlock. (`#368 `_) Resolves `#367 `_. -* Fix `#320 `_ : forbit refrences in Any +* Fix `#320 `_ : forbid references in Any * Update action_node.h * Contributors: Adam Sasine, Davide Faconti, Fabian Schurig, Griswald Brooks, Hyeongsik Min, Robodrome, imgbot[bot], panwauu @@ -700,9 +846,9 @@ Changelog for package behaviortree_cpp * Conan package distribution (#39) * Non-functional refactoring of xml_parsing to clean up the code * cosmetic changes in the code of BehaviorTreeFactory -* XML schema. Related to enchancement #40 +* XML schema. Related to enhancement #40 * call setRegistrationName() for built-in Nodes - The methos is called by BehaviorTreefactory, therefore it + The method is called by BehaviorTreefactory, therefore it registrationName is empty if trees are created programmatically. * Reset reference count when destroying logger (issue #38) * Contributors: Davide Facont, Davide Faconti, Uilian Ries @@ -718,7 +864,7 @@ Changelog for package behaviortree_cpp ------------------ * adding virtual TreeNode::onInit() [issue #33] * fix issue #34 : if you don't implement convertFromString, it will compile but it may throw -* Pretty demangled names and obsolate comments removed +* Pretty demangled names and obsolete comments removed * bug fixes * more comments * [enhancement #32]: add CoroActionNode and rename ActionNode as "AsynActionNode" @@ -785,7 +931,7 @@ Changelog for package behaviortree_cpp * Fix: registerBuilder did not register the manifest. It was "broken" as public API method * Use the Pimpl idiom to hide zmq from the header file * move header of minitrace in the cpp file -* Fixed a crash occuring when you didn't initialized a Tree object (#20) +* Fixed a crash occurring when you didn't initialized a Tree object (#20) * Fix issue #16 * add ParallelNode to pre-registered entries in factory (issue #13) * removed M_PI diff --git a/CMakeLists.txt b/CMakeLists.txt index 1863b80de..e69c9e96c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,37 +1,62 @@ cmake_minimum_required(VERSION 3.16.3) # version on Ubuntu Focal -project(behaviortree_cpp VERSION 4.5.1 LANGUAGES C CXX) +project(behaviortree_cpp VERSION 4.7.2 LANGUAGES C CXX) -set(CMAKE_CONFIG_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_LIST_DIR}/cmake") +# create compile_commands.json +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +#---- project configuration ---- +option(BTCPP_SHARED_LIBS "Build shared libraries" ON) +option(BTCPP_BUILD_TOOLS "Build commandline tools" ON) +option(BTCPP_EXAMPLES "Build tutorials and examples" ON) +option(BUILD_TESTING "Build the unit tests" ON) +option(BTCPP_GROOT_INTERFACE "Add Groot2 connection. Requires ZeroMQ" ON) +option(BTCPP_SQLITE_LOGGING "Add SQLite logging." ON) + +option(USE_V3_COMPATIBLE_NAMES "Use some alias to compile more easily old 3.x code" OFF) +option(ENABLE_FUZZING "Enable fuzzing builds" OFF) +option(USE_AFLPLUSPLUS "Use AFL++ instead of libFuzzer" OFF) +option(ENABLE_DEBUG "Enable debug build with full symbols" OFF) +option(FORCE_STATIC_LINKING "Force static linking of all dependencies" OFF) + +set(BASE_FLAGS "") + +if(ENABLE_DEBUG) + list(APPEND BASE_FLAGS + -g3 + -ggdb3 + -O0 + -fno-omit-frame-pointer + ) +endif() + +# Include fuzzing configuration if enabled +if(ENABLE_FUZZING) + include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/fuzzing_build.cmake) +else() + # Apply base flags for non-fuzzing builds + add_compile_options(${BASE_FLAGS}) + add_link_options(${BASE_FLAGS}) +endif() + +set(CMAKE_CONFIG_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_LIST_DIR}/cmake") list(APPEND CMAKE_MODULE_PATH "${CMAKE_CONFIG_PATH}") set(BTCPP_LIBRARY ${PROJECT_NAME}) if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES) - message(STATUS "Setting build type to 'Release' as none was specified.") - set(CMAKE_BUILD_TYPE "Release" CACHE - STRING "Choose the type of build." FORCE) - # Set the possible values of build type for cmake-gui - set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS - "Debug" "Release" "MinSizeRel" "RelWithDebInfo") + message(STATUS "Setting build type to 'Release' as none was specified.") + set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Choose the type of build." FORCE) + set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS + "Debug" "Release" "MinSizeRel" "RelWithDebInfo") endif() if(MSVC) add_definitions(-D_CRT_SECURE_NO_WARNINGS -DWIN32_LEAN_AND_MEAN) else() - add_definitions(-Wpedantic) + add_definitions(-Wpedantic -fno-omit-frame-pointer) endif() -#---- project configuration ---- -option(BTCPP_SHARED_LIBS "Build shared libraries" ON) -option(BTCPP_BUILD_TOOLS "Build commandline tools" ON) -option(BTCPP_EXAMPLES "Build tutorials and examples" ON) -option(BTCPP_UNIT_TESTS "Build the unit tests" ON) -option(BTCPP_GROOT_INTERFACE "Add Groot2 connection. Requires ZeroMQ" ON) -option(BTCPP_SQLITE_LOGGING "Add SQLite logging." ON) - -option(USE_V3_COMPATIBLE_NAMES "Use some alias to compile more easily old 3.x code" OFF) - if(USE_V3_COMPATIBLE_NAMES) add_definitions(-DUSE_BTCPP3_OLD_NAMES) endif() @@ -54,31 +79,25 @@ if ( ament_cmake_FOUND ) add_definitions( -DUSING_ROS2 ) message(STATUS "------------------------------------------") - message(STATUS "BehaviourTree is being built using AMENT.") + message(STATUS "BehaviorTree is being built using AMENT.") message(STATUS "------------------------------------------") include(cmake/ament_build.cmake) - -elseif( CATKIN_DEVEL_PREFIX OR CATKIN_BUILD_BINARY_PACKAGE) - - add_definitions( -DUSING_ROS ) - message(STATUS "------------------------------------------") - message(STATUS "BehaviourTree is being built using CATKIN.") - message(STATUS "------------------------------------------") - include(cmake/catkin_build.cmake) - set(catkin_FOUND TRUE) else() message(STATUS "------------------------------------------") - message(STATUS "BehaviourTree is being built with conan.") + message(STATUS "BehaviorTree is being built with conan.") message(STATUS "------------------------------------------") include(cmake/conan_build.cmake) endif() - ############################################################# # LIBRARY add_subdirectory(3rdparty/lexy) +add_library(minitrace STATIC 3rdparty/minitrace/minitrace.cpp) +target_compile_definitions(minitrace PRIVATE MTR_ENABLED=True) +set_property(TARGET minitrace PROPERTY POSITION_INDEPENDENT_CODE ON) + list(APPEND BT_SOURCE src/action_node.cpp src/basic_types.cpp @@ -96,13 +115,15 @@ list(APPEND BT_SOURCE src/actions/test_node.cpp src/actions/sleep_node.cpp + src/actions/updated_action.cpp src/decorators/delay_node.cpp src/decorators/inverter_node.cpp src/decorators/repeat_node.cpp src/decorators/retry_node.cpp - src/decorators/timeout_node.cpp src/decorators/subtree_node.cpp + src/decorators/timeout_node.cpp + src/decorators/updated_decorator.cpp src/controls/if_then_else_node.cpp src/controls/fallback_node.cpp @@ -121,7 +142,6 @@ list(APPEND BT_SOURCE src/loggers/bt_observer.cpp 3rdparty/tinyxml2/tinyxml2.cpp - 3rdparty/minitrace/minitrace.cpp ) @@ -160,6 +180,8 @@ target_link_libraries(${BTCPP_LIBRARY} Threads::Threads ${CMAKE_DL_LIBS} $ + $ + PUBLIC ${BTCPP_EXTRA_LIBRARIES} ) @@ -179,20 +201,35 @@ target_compile_definitions(${BTCPP_LIBRARY} PUBLIC BTCPP_LIBRARY_VERSION="${CMAK target_compile_features(${BTCPP_LIBRARY} PUBLIC cxx_std_17) if(MSVC) + target_compile_options(${BTCPP_LIBRARY} PRIVATE "/source-charset:utf-8") else() - target_compile_options(${BTCPP_LIBRARY} PRIVATE -Wall -Wextra) + if(ENABLE_DEBUG) + target_compile_options(${BTCPP_LIBRARY} PRIVATE -Wall -Wextra -g3 -ggdb3 -O0 -fno-omit-frame-pointer) + else() + target_compile_options(${BTCPP_LIBRARY} PRIVATE -Wall -Wextra) + endif() +endif() + +add_library(BT::${BTCPP_LIBRARY} ALIAS ${BTCPP_LIBRARY}) + +# Add fuzzing targets +if(ENABLE_FUZZING) + add_fuzzing_targets() endif() ############################################################# message( STATUS "BTCPP_LIB_DESTINATION: ${BTCPP_LIB_DESTINATION} " ) message( STATUS "BTCPP_INCLUDE_DESTINATION: ${BTCPP_INCLUDE_DESTINATION} " ) -message( STATUS "BTCPP_UNIT_TESTS: ${BTCPP_UNIT_TESTS} " ) -add_subdirectory(sample_nodes) +if (BUILD_TESTING OR BTCPP_EXAMPLES) + add_subdirectory(sample_nodes) +endif() ###################################################### -if (BTCPP_UNIT_TESTS) +include(CTest) +message( STATUS "BUILD_TESTING: ${BUILD_TESTING} " ) +if (BUILD_TESTING) add_subdirectory(tests) endif() @@ -208,7 +245,7 @@ endif() # INSTALL INSTALL(TARGETS ${BTCPP_LIBRARY} - EXPORT ${PROJECT_NAME}Targets + EXPORT ${BTCPP_LIBRARY}Targets ARCHIVE DESTINATION ${BTCPP_LIB_DESTINATION} LIBRARY DESTINATION ${BTCPP_LIB_DESTINATION} RUNTIME DESTINATION ${BTCPP_BIN_DESTINATION} @@ -220,4 +257,3 @@ INSTALL( DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/include/ FILES_MATCHING PATTERN "*.h*") export_btcpp_package() - diff --git a/Doxyfile b/Doxyfile index 7c24f87b6..d06db140e 100644 --- a/Doxyfile +++ b/Doxyfile @@ -58,7 +58,7 @@ PROJECT_LOGO = # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. -OUTPUT_DIRECTORY = /home/davide.faconti/ws_behavior_tree/src/Behavior-Tree/doc +OUTPUT_DIRECTORY = ./doc # If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and @@ -781,7 +781,7 @@ WARN_LOGFILE = # spaces. See also FILE_PATTERNS and EXTENSION_MAPPING # Note: If this tag is empty the current directory is searched. -INPUT = /home/davide.faconti/ws_behavior_tree/src/Behavior-Tree/include +INPUT = ./include # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses @@ -863,8 +863,9 @@ RECURSIVE = YES # Note that relative paths are relative to the directory from which doxygen is # run. -EXCLUDE = /home/davide.faconti/ws_behavior_tree/src/Behavior-Tree/3rdparty \ - /home/davide.faconti/ws_behavior_tree/src/Behavior-Tree/gtest +EXCLUDE = ./3rdparty \ + ./gtest \ + ./include/behaviortree_cpp/contrib # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded diff --git a/LICENSE b/LICENSE index f60806f21..e55a7557f 100644 --- a/LICENSE +++ b/LICENSE @@ -19,4 +19,3 @@ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - diff --git a/README.md b/README.md index 48ad13537..6fda200c7 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,10 @@ ![License MIT](https://img.shields.io/github/license/BehaviorTree/BehaviorTree.CPP?color=blue) -![Version](https://img.shields.io/badge/version-4.5-blue.svg) [![conan Ubuntu](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/cmake_ubuntu.yml/badge.svg)](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/cmake_ubuntu.yml) [![conan Windows](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/cmake_windows.yml/badge.svg)](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/cmake_windows.yml) -[![ros1](https://github.com/BehaviorTree/BehaviorTree.CPP/workflows/ros1/badge.svg?branch=master)](https://github.com/BehaviorTree/BehaviorTree.CPP/actions?query=workflow%3Aros1) -[![ros2](https://github.com/BehaviorTree/BehaviorTree.CPP/workflows/ros2/badge.svg?branch=master)](https://github.com/BehaviorTree/BehaviorTree.CPP/actions?query=workflow%3Aros2) +[![ros2](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/ros2.yaml/badge.svg)](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/ros2.yaml) +[![pixi (Conda)](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/pixi.yaml/badge.svg)](https://github.com/BehaviorTree/BehaviorTree.CPP/actions/workflows/pixi.yaml) -# BehaviorTree.CPP 4.5 +# BehaviorTree.CPP 4.7

@@ -36,17 +35,11 @@ to visualize, record, replay and analyze state transitions. You can learn about the main concepts, the API and the tutorials here: https://www.behaviortree.dev/ +An automatically generated API documentation can be found here: https://BehaviorTree.github.io/BehaviorTree.CPP/ + If the documentation doesn't answer your questions and/or you want to connect with the other **BT.CPP** users, visit [our forum](https://github.com/BehaviorTree/BehaviorTree.CPP/discussions) -## Previous version - -Version 3.8 of the software can be found in the branch -[v3.8](https://github.com/BehaviorTree/BehaviorTree.CPP/tree/v3.8). - -That branch might receive bug fixes, but the new features will be implemented -only in the master branch. - # GUI Editor Editing a BehaviorTree is as simple as editing an XML file in your favorite text editor. @@ -62,7 +55,6 @@ If you are looking for a more fancy graphical user interface (and I know you do) Three build systems are supported: -- **catkin**, if you use ROS - **colcon (ament)**, if you use ROS2 - **conan** otherwise (Linux/Windows). - **straight cmake** if you want to be personally responsible for dependencies :) @@ -87,6 +79,11 @@ cmake ../BehaviorTree.CPP cmake --build . --parallel ``` +If you want to build in a [pixi](https://pixi.sh/) project (conda virtual environment). +``` +pixi run build +``` + If you want to use BT.CPP in your application, please refer to the example here: https://github.com/BehaviorTree/btcpp_sample . @@ -99,6 +96,13 @@ You can contact the primary author, **dfaconti@aurynrobotics.com**, to discuss y [![Star History Chart](https://api.star-history.com/svg?repos=BehaviorTree/BehaviorTree.CPP&type=Date)](https://star-history.com/#BehaviorTree/BehaviorTree.CPP&Date) +## Previous version + +Version 3.8 of the software can be found in the branch +[v3.8](https://github.com/BehaviorTree/BehaviorTree.CPP/tree/v3.8). + +That branch might receive bug fixes, but the new features will be implemented +only in the master branch. # License diff --git a/cmake/FindZeroMQ.cmake b/cmake/FindZeroMQ.cmake index 8553df95c..b11258812 100644 --- a/cmake/FindZeroMQ.cmake +++ b/cmake/FindZeroMQ.cmake @@ -29,8 +29,6 @@ else (ZeroMQ_LIBRARIES AND ZeroMQ_INCLUDE_DIRS) find_path(ZeroMQ_INCLUDE_DIR NAMES zmq.h - HINTS - "$ENV{CONDA_PREFIX}/include" PATHS /usr/include /usr/local/include diff --git a/cmake/ament_build.cmake b/cmake/ament_build.cmake index 9de2fd099..ec1e0a66b 100644 --- a/cmake/ament_build.cmake +++ b/cmake/ament_build.cmake @@ -12,6 +12,9 @@ endif() find_package(ament_index_cpp REQUIRED) +set(BTCPP_EXTRA_INCLUDE_DIRS ${ZeroMQ_INCLUDE_DIRS} + ${SQLite3_INCLUDE_DIRS}) + set( BTCPP_EXTRA_LIBRARIES $ $ @@ -26,6 +29,7 @@ set( BTCPP_BIN_DESTINATION bin ) mark_as_advanced( BTCPP_EXTRA_LIBRARIES + BTCPP_EXTRA_INCLUDE_DIRS BTCPP_LIB_DESTINATION BTCPP_INCLUDE_DESTINATION BTCPP_BIN_DESTINATION ) @@ -33,5 +37,6 @@ mark_as_advanced( macro(export_btcpp_package) ament_export_include_directories(include) ament_export_libraries(${BTCPP_LIBRARY}) + ament_export_targets(${BTCPP_LIBRARY}Targets) ament_package() endmacro() diff --git a/cmake/catkin_build.cmake b/cmake/catkin_build.cmake deleted file mode 100644 index 487d84773..000000000 --- a/cmake/catkin_build.cmake +++ /dev/null @@ -1,40 +0,0 @@ -#---- Add the subdirectory cmake ---- -set(CMAKE_CONFIG_PATH ${CMAKE_MODULE_PATH} "${PROJECT_SOURCE_DIR}/cmake") -list(APPEND CMAKE_MODULE_PATH "${CMAKE_CONFIG_PATH}") - -if(BTCPP_GROOT_INTERFACE) - find_package(ZeroMQ REQUIRED) -endif() - -if(BTCPP_SQLITE_LOGGING) - find_package(SQLite3 REQUIRED) -endif() - -find_package(catkin REQUIRED COMPONENTS roslib) - -catkin_package( - INCLUDE_DIRS include - LIBRARIES ${BTCPP_LIBRARY} - CATKIN_DEPENDS roslib ) - -set(BTCPP_EXTRA_INCLUDE_DIRS ${catkin_INCLUDE_DIRS} ) - -set( BTCPP_EXTRA_LIBRARIES - ${catkin_LIBRARIES} - ${ZeroMQ_LIBRARIES} - ${SQLite3_LIBRARIES}) - -set( BTCPP_LIB_DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} ) -set( BTCPP_INCLUDE_DESTINATION ${CATKIN_GLOBAL_INCLUDE_DESTINATION} ) -set( BTCPP_BIN_DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} ) - -mark_as_advanced( - BTCPP_EXTRA_LIBRARIES - BTCPP_EXTRA_INCLUDE_DIRS - BTCPP_LIB_DESTINATION - BTCPP_INCLUDE_DESTINATION - BTCPP_BIN_DESTINATION ) - -macro(export_btcpp_package) - # do nothing -endmacro() diff --git a/cmake/conan.cmake b/cmake/conan.cmake index 33512fcee..d36c5ed44 100644 --- a/cmake/conan.cmake +++ b/cmake/conan.cmake @@ -116,7 +116,7 @@ macro(_conan_check_language) set(LANGUAGE C) set(USING_CXX 0) else () - message(FATAL_ERROR "Conan: Neither C or C++ was detected as a language for the project. Unabled to detect compiler version.") + message(FATAL_ERROR "Conan: Neither C or C++ was detected as a language for the project. Unable to detect compiler version.") endif() endmacro() @@ -146,7 +146,7 @@ macro(_conan_detect_compiler) set(COMPILER_VERSION ${MAJOR}) else() set(COMPILER_VERSION ${MAJOR}.${MINOR}) - endif() + endif() elseif (${CMAKE_${LANGUAGE}_COMPILER_ID} STREQUAL QCC) set(_CONAN_SETTING_COMPILER qcc) set(COMPILER_VERSION ${MAJOR}.${MINOR}) @@ -180,7 +180,7 @@ macro(_conan_detect_compiler) set(COMPILER_VERSION ${MAJOR}) else() set(COMPILER_VERSION ${MAJOR}.${MINOR}) - endif() + endif() set(_CONAN_SETTING_COMPILER_VERSION ${COMPILER_VERSION}) @@ -190,7 +190,7 @@ macro(_conan_detect_compiler) set(_CONAN_SETTING_COMPILER_LIBCXX ${_LIBCXX}) endif () elseif (${CMAKE_${LANGUAGE}_COMPILER_ID} STREQUAL Clang - AND NOT "${CMAKE_${LANGUAGE}_COMPILER_FRONTEND_VARIANT}" STREQUAL "MSVC" + AND NOT "${CMAKE_${LANGUAGE}_COMPILER_FRONTEND_VARIANT}" STREQUAL "MSVC" AND NOT "${CMAKE_${LANGUAGE}_SIMULATE_ID}" STREQUAL "MSVC") string(REPLACE "." ";" VERSION_LIST ${CMAKE_${LANGUAGE}_COMPILER_VERSION}) @@ -203,7 +203,7 @@ macro(_conan_detect_compiler) set(COMPILER_VERSION ${MAJOR}) else() set(COMPILER_VERSION ${MAJOR}.${MINOR}) - endif() + endif() set(_CONAN_SETTING_COMPILER_VERSION ${COMPILER_VERSION}) @@ -219,8 +219,8 @@ macro(_conan_detect_compiler) set(_CONAN_SETTING_COMPILER_LIBCXX ${_LIBCXX}) endif () elseif(${CMAKE_${LANGUAGE}_COMPILER_ID} STREQUAL MSVC - OR (${CMAKE_${LANGUAGE}_COMPILER_ID} STREQUAL Clang - AND "${CMAKE_${LANGUAGE}_COMPILER_FRONTEND_VARIANT}" STREQUAL "MSVC" + OR (${CMAKE_${LANGUAGE}_COMPILER_ID} STREQUAL Clang + AND "${CMAKE_${LANGUAGE}_COMPILER_FRONTEND_VARIANT}" STREQUAL "MSVC" AND "${CMAKE_${LANGUAGE}_SIMULATE_ID}" STREQUAL "MSVC")) set(_VISUAL "Visual Studio") @@ -475,7 +475,7 @@ function(conan_cmake_autodetect detected_settings) endfunction() macro(conan_parse_arguments) - set(options BASIC_SETUP CMAKE_TARGETS UPDATE KEEP_RPATHS NO_LOAD NO_OUTPUT_DIRS + set(options BASIC_SETUP CMAKE_TARGETS UPDATE KEEP_RPATHS NO_LOAD NO_OUTPUT_DIRS OUTPUT_QUIET NO_IMPORTS SKIP_STD) set(oneValueArgs CONANFILE ARCH BUILD_TYPE INSTALL_FOLDER OUTPUT_FOLDER CONAN_COMMAND) set(multiValueArgs DEBUG_PROFILE RELEASE_PROFILE RELWITHDEBINFO_PROFILE MINSIZEREL_PROFILE @@ -656,11 +656,11 @@ function(conan_cmake_install) if(DEFINED NO_IMPORTS) set(NO_IMPORTS --no-imports) endif() - set(install_args install ${PATH_OR_REFERENCE} ${REFERENCE} ${UPDATE} ${NO_IMPORTS} ${REMOTE} - ${LOCKFILE} ${LOCKFILE_OUT} ${LOCKFILE_NODE_ID} ${INSTALL_FOLDER} - ${OUTPUT_FOLDER} ${GENERATOR} ${BUILD} ${ENV} ${ENV_HOST} ${ENV_BUILD} - ${OPTIONS} ${OPTIONS_HOST} ${OPTIONS_BUILD} ${PROFILE} ${PROFILE_HOST} - ${PROFILE_BUILD} ${SETTINGS} ${SETTINGS_HOST} ${SETTINGS_BUILD} + set(install_args install ${PATH_OR_REFERENCE} ${REFERENCE} ${UPDATE} ${NO_IMPORTS} ${REMOTE} + ${LOCKFILE} ${LOCKFILE_OUT} ${LOCKFILE_NODE_ID} ${INSTALL_FOLDER} + ${OUTPUT_FOLDER} ${GENERATOR} ${BUILD} ${ENV} ${ENV_HOST} ${ENV_BUILD} + ${OPTIONS} ${OPTIONS_HOST} ${OPTIONS_BUILD} ${PROFILE} ${PROFILE_HOST} + ${PROFILE_BUILD} ${SETTINGS} ${SETTINGS_HOST} ${SETTINGS_BUILD} ${CONF} ${CONF_HOST} ${CONF_BUILD}) string(REPLACE ";" " " _install_args "${install_args}") @@ -764,12 +764,12 @@ function(conan_cmake_lock_create) set(BASE --base) endif() set(lock_create_Args lock create ${PATH} ${REFERENCE} ${UPDATE} ${BASE} ${REMOTE} ${LOCKFILE} ${LOCKFILE_OUT} ${LOCKFILE_NODE_ID} ${INSTALL_FOLDER} - ${GENERATOR} ${BUILD} ${ENV} ${ENV_HOST} ${ENV_BUILD} ${OPTIONS} ${OPTIONS_HOST} ${OPTIONS_BUILD} + ${GENERATOR} ${BUILD} ${ENV} ${ENV_HOST} ${ENV_BUILD} ${OPTIONS} ${OPTIONS_HOST} ${OPTIONS_BUILD} ${PROFILE} ${PROFILE_HOST} ${PROFILE_BUILD} ${SETTINGS} ${SETTINGS_HOST} ${SETTINGS_BUILD}) string(REPLACE ";" " " _lock_create_Args "${lock_create_Args}") message(STATUS "Conan executing: ${CONAN_CMD} ${_lock_create_Args}") - + if(ARGS_OUTPUT_QUIET) set(OUTPUT_OPT OUTPUT_QUIET) endif() @@ -1050,7 +1050,7 @@ macro(conan_config_install) endif() if(DEFINED CONAN_ARGS) - # Convert ; seperated multi arg list into space seperated string + # Convert ; separated multi arg list into space separated string string(REPLACE ";" " " l_CONAN_ARGS "${CONAN_ARGS}") set(CONAN_ARGS_ARGS "--args=${l_CONAN_ARGS}") endif() @@ -1083,7 +1083,7 @@ function(conan_cmake_profile) set(profileMultiValueArgs SETTINGS OPTIONS CONF ENV BUILDENV RUNENV TOOL_REQUIRES) cmake_parse_arguments(ARGS "" "${profileOneValueArgs}" "${profileMultiValueArgs}" ${ARGN}) - if(DEFINED ARGS_FILEPATH) + if(DEFINED ARGS_FILEPATH) set(_FN "${ARGS_FILEPATH}") else() set(_FN "${CMAKE_CURRENT_BINARY_DIR}/profile") diff --git a/cmake/fuzzing_build.cmake b/cmake/fuzzing_build.cmake new file mode 100644 index 000000000..43f367001 --- /dev/null +++ b/cmake/fuzzing_build.cmake @@ -0,0 +1,153 @@ +# Fuzzing configuration +# Supports both local fuzzing and OSS-Fuzz integration + +# Detect if we're running in OSS-Fuzz environment +if(DEFINED ENV{LIB_FUZZING_ENGINE}) + set(OSS_FUZZ ON) + message(STATUS "OSS-Fuzz environment detected") +else() + set(OSS_FUZZ OFF) +endif() + +# Auto-detect AFL++ compiler if not in OSS-Fuzz mode +if(NOT OSS_FUZZ AND (CMAKE_C_COMPILER MATCHES ".*afl-.*" OR CMAKE_CXX_COMPILER MATCHES ".*afl-.*")) + set(USE_AFLPLUSPLUS ON CACHE BOOL "Use AFL++ instead of libFuzzer" FORCE) + message(STATUS "AFL++ compiler detected - automatically enabling AFL++ mode") +endif() + +# When building for fuzzing, we want static library by default +set(BTCPP_SHARED_LIBS OFF CACHE BOOL "Build static library for fuzzing" FORCE) + +# Only apply static linking settings if explicitly requested +if(FORCE_STATIC_LINKING) + set(CMAKE_FIND_LIBRARY_SUFFIXES .a ${CMAKE_FIND_LIBRARY_SUFFIXES}) + set(BUILD_SHARED_LIBS OFF) + + # Force static linking for dependencies + if(BTCPP_GROOT_INTERFACE) + set(ZeroMQ_USE_STATIC_LIBS ON) + set(ZEROMQ_STATIC_LIBRARY ON) + endif() + + if(BTCPP_SQLITE_LOGGING) + set(SQLite3_USE_STATIC_LIBS ON) + endif() +endif() + +# Set up flags for local fuzzing (not used for OSS-Fuzz) +if(NOT OSS_FUZZ) + list(APPEND BASE_FLAGS -O2) + + if(USE_AFLPLUSPLUS) + set(SANITIZER_FLAGS + -fsanitize=address,undefined + ) + else() + # For libFuzzer, use fuzzer-no-link for the library + set(SANITIZER_FLAGS + -fsanitize=address,undefined,fuzzer-no-link + ) + endif() + + # Apply sanitizer flags to the base library + list(APPEND BASE_FLAGS ${SANITIZER_FLAGS}) + + add_compile_options(${BASE_FLAGS}) + add_link_options(${BASE_FLAGS}) +endif() + +# Disable certain features during fuzzing +set(BTCPP_EXAMPLES OFF CACHE BOOL "Disable examples during fuzzing" FORCE) +set(BTCPP_BUILD_TOOLS OFF CACHE BOOL "Disable tools during fuzzing" FORCE) +set(BTCPP_UNIT_TESTS OFF CACHE BOOL "Disable tests during fuzzing" FORCE) +set(BTCPP_SHARED_LIBS OFF CACHE BOOL "Build static library for fuzzing" FORCE) + +# Function to apply fuzzing flags for local development builds +function(apply_local_fuzzing_flags target) + target_compile_options(${target} PRIVATE + ${BASE_FLAGS} + ${SANITIZER_FLAGS} + ) + + if(FORCE_STATIC_LINKING) + if(USE_AFLPLUSPLUS) + target_link_options(${target} PRIVATE + ${BASE_FLAGS} + ${SANITIZER_FLAGS} + -static-libstdc++ + -static-libgcc + -fsanitize=fuzzer + ) + else() + target_link_options(${target} PRIVATE + ${BASE_FLAGS} + -fsanitize=fuzzer + ${SANITIZER_FLAGS} + -static-libstdc++ + -static-libgcc + ) + endif() + else() + if(USE_AFLPLUSPLUS) + target_link_options(${target} PRIVATE + ${BASE_FLAGS} + ${SANITIZER_FLAGS} + -fsanitize=fuzzer + ) + else() + target_link_options(${target} PRIVATE + ${BASE_FLAGS} + -fsanitize=fuzzer + ${SANITIZER_FLAGS} + ) + endif() + endif() +endfunction() + +# Function to add fuzzing targets - compatible with both local and OSS-Fuzz builds +function(add_fuzzing_targets) + set(FUZZERS bt_fuzzer script_fuzzer bb_fuzzer) + + foreach(fuzzer ${FUZZERS}) + add_executable(${fuzzer} fuzzing/${fuzzer}.cpp) + + if(OSS_FUZZ) + # For OSS-Fuzz environment, we rely on environment variables + # like $CC, $CXX, $CFLAGS, $CXXFLAGS, and $LIB_FUZZING_ENGINE + target_link_libraries(${fuzzer} PRIVATE + ${BTCPP_LIBRARY} + ${BTCPP_EXTRA_LIBRARIES} + $ENV{LIB_FUZZING_ENGINE} + ) + else() + # For local development, use our own flags + apply_local_fuzzing_flags(${fuzzer}) + target_link_libraries(${fuzzer} PRIVATE + ${BTCPP_LIBRARY} + ${BTCPP_EXTRA_LIBRARIES} + ) + endif() + + # Setup corpus directories (useful for both environments) + set(CORPUS_DIR ${CMAKE_BINARY_DIR}/corpus/${fuzzer}) + file(MAKE_DIRECTORY ${CORPUS_DIR}) + endforeach() + + # Copy corpus files if they exist (useful for local testing) + # OSS-Fuzz provides its own corpus handling + if(NOT OSS_FUZZ) + file(GLOB BT_CORPUS_FILES "${CMAKE_SOURCE_DIR}/fuzzing/corpus/bt_corpus/*") + file(GLOB SCRIPT_CORPUS_FILES "${CMAKE_SOURCE_DIR}/fuzzing/corpus/script_corpus/*") + file(GLOB BB_CORPUS_FILES "${CMAKE_SOURCE_DIR}/fuzzing/corpus/bb_corpus/*") + + if(BT_CORPUS_FILES) + file(COPY ${BT_CORPUS_FILES} DESTINATION ${CMAKE_BINARY_DIR}/corpus/bt_fuzzer) + endif() + if(SCRIPT_CORPUS_FILES) + file(COPY ${SCRIPT_CORPUS_FILES} DESTINATION ${CMAKE_BINARY_DIR}/corpus/script_fuzzer) + endif() + if(BB_CORPUS_FILES) + file(COPY ${BB_CORPUS_FILES} DESTINATION ${CMAKE_BINARY_DIR}/corpus/bb_fuzzer) + endif() + endif() +endfunction() diff --git a/conanfile.txt b/conanfile.txt index 56dd97006..7b81d1d6d 100644 --- a/conanfile.txt +++ b/conanfile.txt @@ -1,5 +1,5 @@ [requires] -gtest/1.12.1 +gtest/1.14.0 zeromq/4.3.4 sqlite3/3.40.1 diff --git a/contributors.txt b/contributors.txt index 578b30da4..83ef7a03d 100644 --- a/contributors.txt +++ b/contributors.txt @@ -1,4 +1,3 @@ Davide Faconti Michele Colledanchise Rocco Santomo - diff --git a/convert_v3_to_v4.py b/convert_v3_to_v4.py index 027eebbca..0866ffcde 100755 --- a/convert_v3_to_v4.py +++ b/convert_v3_to_v4.py @@ -170,4 +170,4 @@ class ArgsType(typing.NamedTuple): if __name__ == "__main__": - main() \ No newline at end of file + main() diff --git a/docs/substitution_sample.json b/docs/substitution_sample.json index b9d58de03..fd908e727 100644 --- a/docs/substitution_sample.json +++ b/docs/substitution_sample.json @@ -1,15 +1,15 @@ { - "TestNodeConfigs": { - "MyTest": { - "async_delay": 2000, - "return_status": "SUCCESS", - "post_script": "msg ='message SUBSTITUED'" - } - }, - - "SubstitutionRules": { - "mysub/action_*": "TestAction", - "talk": "TestSaySomething", - "last_action": "MyTest" + "TestNodeConfigs": { + "MyTest": { + "async_delay": 2000, + "return_status": "SUCCESS", + "post_script": "msg ='message SUBSTITUED'" } -} \ No newline at end of file + }, + + "SubstitutionRules": { + "mysub/action_*": "TestAction", + "talk": "TestSaySomething", + "last_action": "MyTest" + } +} diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 362afcc23..a42272367 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -26,27 +26,37 @@ CompileExample("t07_load_multiple_xml") CompileExample("t08_additional_node_args") CompileExample("t09_scripting") CompileExample("t10_observer") -CompileExample("t11_replace_rules") -if(BTCPP_GROOT_INTERFACE AND BTCPP_SQLITE_LOGGING) -CompileExample("t12_groot_howto") -CompileExample("generate_log") +if(BTCPP_GROOT_INTERFACE) + CompileExample("t11_groot_howto") endif() +CompileExample("t12_default_ports") +CompileExample("t13_access_by_ref") +CompileExample("t14_subtree_model") +CompileExample("t15_nodes_mocking") +CompileExample("t16_global_blackboard") +CompileExample("t17_blackboard_backup") +CompileExample("t18_waypoints") + CompileExample("ex01_wrap_legacy") CompileExample("ex02_runtime_ports") -CompileExample("ex04_waypoints") -CompileExample("ex05_subtree_model") -CompileExample("ex06_access_by_ptr") -CompileExample("t13_plugin_executor") +if(BTCPP_SQLITE_LOGGING) + CompileExample("ex03_sqlite_log") +endif() + + +############ Create plugin and executor in folder plugin_example ########## -############ Create plugin for tutorial 13 ########## # library must be SHARED -add_library(t13_plugin_action SHARED t13_plugin_action.cpp ) +add_library(test_plugin_action SHARED plugin_example/plugin_action.cpp ) # you must set the definition BT_PLUGIN_EXPORT -target_compile_definitions(t13_plugin_action PRIVATE BT_PLUGIN_EXPORT ) -# remove the "lib" prefix. Name of the file will be t13_plugin_action.so -set_target_properties(t13_plugin_action PROPERTIES PREFIX "") +target_compile_definitions(test_plugin_action PRIVATE BT_PLUGIN_EXPORT ) +# remove the "lib" prefix. Name of the file will be test_plugin_action.so +set_target_properties(test_plugin_action PROPERTIES PREFIX "") # link dependencies as usual -target_link_libraries(t13_plugin_action ${BTCPP_LIBRARY} ) +target_link_libraries(test_plugin_action ${BTCPP_LIBRARY} ) + +add_executable(test_plugin_executor plugin_example/plugin_executor.cpp ) +target_link_libraries(test_plugin_executor ${BTCPP_LIBRARY}) diff --git a/examples/broken_sequence.cpp b/examples/broken_sequence.cpp deleted file mode 100644 index 032d242f9..000000000 --- a/examples/broken_sequence.cpp +++ /dev/null @@ -1,82 +0,0 @@ -#include "Blackboard/blackboard_local.h" -#include "behaviortree_cpp/behavior_tree.h" -#include "behaviortree_cpp/bt_factory.h" - -using namespace BT; - -NodeStatus SayHello() -{ - printf("hello\n"); - return NodeStatus::SUCCESS; -} - -class ActionTestNode : public ActionNode -{ -public: - ActionTestNode(const std::string& name) : ActionNode(name) - {} - - NodeStatus tick() override - { - time_ = 5; - stop_loop_ = false; - int i = 0; - while (!stop_loop_ && i++ < time_) - { - std::this_thread::sleep_for(std::chrono::milliseconds(100)); - } - return NodeStatus::SUCCESS; - } - - virtual void halt() override - { - stop_loop_ = true; - } - -private: - int time_; - std::atomic_bool stop_loop_; -}; - -int main() -{ - BT::SequenceNode root("root"); - BT::SimpleActionNode action1("say_hello", std::bind(SayHello)); - ActionTestNode action2("async_action"); - - root.addChild(&action1); - root.addChild(&action2); - - int count = 0; - - NodeStatus status = NodeStatus::RUNNING; - - while (status == NodeStatus::RUNNING) - { - status = root.executeTick(); - - std::cout << count++ << " : " << root.status() << " / " << action1.status() << " / " - << action2.status() << std::endl; - - std::this_thread::sleep_for(std::chrono::milliseconds(100)); - } - - return 0; -} -// Output -/* - -hello -0 : RUNNING / SUCCESS / RUNNING -hello -1 : RUNNING / SUCCESS / RUNNING -hello -2 : RUNNING / SUCCESS / RUNNING -hello -3 : RUNNING / SUCCESS / RUNNING -hello -4 : RUNNING / SUCCESS / RUNNING -hello -5 : SUCCESS / IDLE / IDLE - -*/ diff --git a/examples/ex01_wrap_legacy.cpp b/examples/ex01_wrap_legacy.cpp index 80abf64f7..b6fb195da 100644 --- a/examples/ex01_wrap_legacy.cpp +++ b/examples/ex01_wrap_legacy.cpp @@ -20,7 +20,7 @@ class MyLegacyMoveTo bool go(Point3D goal) { printf("Going to: %f %f %f\n", goal.x, goal.y, goal.z); - return true; // true means success in my legacy code + return true; // true means success in my legacy code } }; @@ -33,7 +33,7 @@ Point3D convertFromString(StringView key) { // three real numbers separated by semicolons auto parts = BT::splitString(key, ';'); - if (parts.size() != 3) + if(parts.size() != 3) { throw RuntimeError("invalid input)"); } @@ -46,7 +46,7 @@ Point3D convertFromString(StringView key) return output; } } -} // namespace BT +} // namespace BT // clang-format off static const char* xml_text = R"( @@ -81,7 +81,7 @@ int main() // Register the lambda with BehaviorTreeFactory::registerSimpleAction - PortsList ports = {BT::InputPort("goal")}; + PortsList ports = { BT::InputPort("goal") }; factory.registerSimpleAction("MoveTo", MoveToWrapperWithLambda, ports); auto tree = factory.createTreeFromText(xml_text); diff --git a/examples/ex02_runtime_ports.cpp b/examples/ex02_runtime_ports.cpp index d6e645dc2..25a13beeb 100644 --- a/examples/ex02_runtime_ports.cpp +++ b/examples/ex02_runtime_ports.cpp @@ -17,8 +17,8 @@ static const char* xml_text = R"( class ThinkRuntimePort : public BT::SyncActionNode { public: - ThinkRuntimePort(const std::string& name, const BT::NodeConfig& config) : - BT::SyncActionNode(name, config) + ThinkRuntimePort(const std::string& name, const BT::NodeConfig& config) + : BT::SyncActionNode(name, config) {} BT::NodeStatus tick() override @@ -31,15 +31,15 @@ class ThinkRuntimePort : public BT::SyncActionNode class SayRuntimePort : public BT::SyncActionNode { public: - SayRuntimePort(const std::string& name, const BT::NodeConfig& config) : - BT::SyncActionNode(name, config) + SayRuntimePort(const std::string& name, const BT::NodeConfig& config) + : BT::SyncActionNode(name, config) {} // You must override the virtual function tick() BT::NodeStatus tick() override { auto msg = getInput("message"); - if (!msg) + if(!msg) { throw BT::RuntimeError("missing required input [message]: ", msg.error()); } @@ -54,12 +54,12 @@ int main() //-------- register ports that might be defined at runtime -------- // more verbose way - PortsList think_ports = {BT::OutputPort("text")}; + PortsList think_ports = { BT::OutputPort("text") }; factory.registerBuilder( CreateManifest("ThinkRuntimePort", think_ports), CreateBuilder()); // less verbose way - PortsList say_ports = {BT::InputPort("message")}; + PortsList say_ports = { BT::InputPort("message") }; factory.registerNodeType("SayRuntimePort", say_ports); factory.registerBehaviorTreeFromText(xml_text); diff --git a/examples/ex03_sqlite_log.cpp b/examples/ex03_sqlite_log.cpp new file mode 100644 index 000000000..9ea8c028b --- /dev/null +++ b/examples/ex03_sqlite_log.cpp @@ -0,0 +1,149 @@ +#include "dummy_nodes.h" +#include "behaviortree_cpp/bt_factory.h" +#include "behaviortree_cpp/loggers/bt_sqlite_logger.h" +#include "behaviortree_cpp/xml_parsing.h" + +struct TaskA +{ + int type; + std::string name; +}; + +struct TaskB +{ + double value; + std::string name; +}; + +using Command = std::variant; + +// Simple Action that updates an instance of Position2D in the blackboard +class SetTask : public BT::SyncActionNode +{ +public: + SetTask(const std::string& name, const BT::NodeConfig& config) + : BT::SyncActionNode(name, config) + {} + + BT::NodeStatus tick() override + { + auto type = getInput("type").value(); + if(type == "A") + { + setOutput("task", TaskA{ 43, type }); + } + else if(type == "B") + { + setOutput("task", TaskB{ 3.14, type }); + } + return BT::NodeStatus::SUCCESS; + } + + static BT::PortsList providedPorts() + { + return { BT::InputPort("type"), BT::OutputPort("task") }; + } + +private: +}; + +// clang-format off + +static const char* xml_text = R"( + + + + + +