From 7c58f5aff2168efbcb6120c81ce5d0739943de65 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Mon, 5 May 2025 16:21:00 -0700 Subject: [PATCH 01/19] wip --- CMakeLists.txt | 18 ++++++++++++++++++ include/orc/dwarf_structs.hpp | 12 ++---------- src/dwarf.cpp | 20 ++++++++++---------- test/src/main.cpp | 10 ++++++++++ 4 files changed, 40 insertions(+), 20 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 6cb3250..995fd68 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -63,6 +63,23 @@ if (NOT TARGET nlohmann_json::nlohmann_json) FetchContent_MakeAvailable(json) endif() +#################################################################################################### +# +# Adds support for Google Test. +# + +if (NOT TARGET GTest::gtest) + message(STATUS "ORC third-party: creating target 'GTest::gtest'...") + FetchContent_Declare( + googletest + GIT_REPOSITORY https://github.com/google/googletest.git + GIT_TAG v1.14.0 + ) + # Prevent overriding the parent project's compiler/linker settings + set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) + FetchContent_MakeAvailable(googletest) +endif() + #################################################################################################### # # Adds support for the Tracy profiler. @@ -185,6 +202,7 @@ target_link_libraries(orc_test tomlplusplus::tomlplusplus Tracy::TracyClient nlohmann_json::nlohmann_json + GTest::gtest ) if (PROJECT_IS_TOP_LEVEL) target_compile_options(orc_test PRIVATE -Wall -Werror) diff --git a/include/orc/dwarf_structs.hpp b/include/orc/dwarf_structs.hpp index 3d4b55a..4e75520 100644 --- a/include/orc/dwarf_structs.hpp +++ b/include/orc/dwarf_structs.hpp @@ -19,6 +19,7 @@ // application #include "orc/dwarf_constants.hpp" +#include "orc/fixed_vector.hpp" #include "orc/hash.hpp" #include "orc/string_pool.hpp" @@ -176,21 +177,12 @@ std::ostream& operator<<(std::ostream& s, const attribute& x); //-------------------------------------------------------------------------------------------------- // I'm not a fan of the name `attribute_sequence`. -// -// TODO: Consider using `std::array` instead of `std::vector` to avoid dynamic allocation. This -// would require we cap the max number of attributes at compile time, which should be okay as long -// as we pick a reasonable number. On the other hand, that would make DIEs with smaller sets of -// attributes less memory efficient. It's the classic space/time tradeoff. struct attribute_sequence { - using attributes_type = std::vector; + using attributes_type = orc::fixed_vector; using value_type = typename attributes_type::value_type; using iterator = typename attributes_type::iterator; using const_iterator = typename attributes_type::const_iterator; - void reserve(std::size_t size) { - _attributes.reserve(size); - } - bool has(dw::at name) const { auto [valid, iterator] = find(name); return valid; diff --git a/src/dwarf.cpp b/src/dwarf.cpp index 21faca2..11f1798 100644 --- a/src/dwarf.cpp +++ b/src/dwarf.cpp @@ -18,6 +18,7 @@ // application #include "orc/dwarf_structs.hpp" #include "orc/features.hpp" +#include "orc/fixed_vector.hpp" #include "orc/object_file_registry.hpp" #include "orc/orc.hpp" #include "orc/settings.hpp" @@ -572,7 +573,7 @@ void line_header::read(freader& s, bool needs_byteswap) { //-------------------------------------------------------------------------------------------------- // It is fixed to keep allocations from happening. constexpr std::size_t max_names_k{32}; -using fixed_attribute_array = std::array; +using fixed_attribute_array = orc::fixed_vector; /** * @brief Extracts fatal attributes from an attribute sequence @@ -592,15 +593,17 @@ using fixed_attribute_array = std::array; * @note The function is limited to processing `max_names_k` fatal attributes. */ fixed_attribute_array fatal_attributes_within(const attribute_sequence& attributes) { - fixed_attribute_array names{dw::at::none}; - std::size_t count{0}; + fixed_attribute_array names; for (const auto& attr : attributes) { - if (nonfatal_attribute(attr._name)) continue; - ADOBE_INVARIANT(count < (max_names_k - 1), "fatal_attribute_hash names overflow"); - names[count++] = attr._name; + if (nonfatal_attribute(attr._name)) { + continue; + } + + names.push_back(attr._name); } - std::sort(&names[0], &names[count]); + + std::sort(names.begin(), names.end()); return names; } @@ -1862,9 +1865,6 @@ die_pair dwarf::implementation::abbreviation_to_die(std::size_t die_address, pro die._tag = a._tag; die._has_children = a._has_children; - // Can we get rid of this memory allocation? This happens a lot... - attributes.reserve(a._attributes.size()); - std::transform(a._attributes.begin(), a._attributes.end(), std::back_inserter(attributes), [&](const auto& x) { // If the attribute is nonfatal, we'll pass over it in `process_attribute`. diff --git a/test/src/main.cpp b/test/src/main.cpp index 0a394de..aa8456d 100644 --- a/test/src/main.cpp +++ b/test/src/main.cpp @@ -19,6 +19,9 @@ #include #include +// Google Test +#include + //-------------------------------------------------------------------------------------------------- namespace { @@ -632,6 +635,13 @@ std::size_t traverse_directory_tree(const std::filesystem::path& directory) { int main(int argc, char** argv) try { orc::profiler::initialize(); + // Initialize and run Google Test + ::testing::InitGoogleTest(&argc, argv); + int gtest_result = RUN_ALL_TESTS(); + if (gtest_result != 0) { + return gtest_result; + } + if (argc < 2) { console_error() << "Usage: " << argv[0] << " /path/to/test/battery/ [--json_mode]\n"; throw std::runtime_error("no path to test battery given"); From 319c5df1b97fb5873d1e4bf25949b2afdf8fc56c Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Mon, 5 May 2025 16:23:07 -0700 Subject: [PATCH 02/19] wip --- include/orc/fixed_vector.hpp | 171 ++++++++++++++++++++++++++ test/src/fixed_vector_tests.cpp | 205 ++++++++++++++++++++++++++++++++ 2 files changed, 376 insertions(+) create mode 100644 include/orc/fixed_vector.hpp create mode 100644 test/src/fixed_vector_tests.cpp diff --git a/include/orc/fixed_vector.hpp b/include/orc/fixed_vector.hpp new file mode 100644 index 0000000..363b30e --- /dev/null +++ b/include/orc/fixed_vector.hpp @@ -0,0 +1,171 @@ +// Copyright 2025 Adobe +// All Rights Reserved. +// +// NOTICE: Adobe permits you to use, modify, and distribute this file in accordance with the terms +// of the Adobe license agreement accompanying it. + +#pragma once + +// stdc++ +#include +#include +#include + +// adobe contract checks +#include "adobe/contract_checks.hpp" + +//-------------------------------------------------------------------------------------------------- + +namespace orc { + +//-------------------------------------------------------------------------------------------------- + +template +struct fixed_vector { + using value_type = T; + using array_type = std::array; + using size_type = typename array_type::size_type; + using iterator = typename array_type::iterator; + using const_iterator = typename array_type::const_iterator; + using reverse_iterator = std::reverse_iterator; + using const_reverse_iterator = std::reverse_iterator; + + // Constructors + fixed_vector() = default; + fixed_vector(const fixed_vector&) = default; + fixed_vector& operator=(const fixed_vector&) = default; + + fixed_vector(fixed_vector&& rhs) : _a(std::move(rhs._a)), _n(rhs._n) { + rhs._n = 0; + } + + fixed_vector& operator=(fixed_vector&& rhs) { + _a = std::move(rhs._a); + _n = rhs._n; + rhs._n = 0; + return *this; + } + + fixed_vector(size_type count, const T& value) { + ADOBE_PRECONDITION(count <= N, "fixed_vector overflow"); + for (size_type i = 0; i < count; ++i) { + push_back(value); + } + } + + // Element access + T& at(size_type pos) { + if (pos >= _n) { + throw std::out_of_range("fixed_vector::at"); + } + return _a[pos]; + } + + const T& at(size_type pos) const { + if (pos >= _n) { + throw std::out_of_range("fixed_vector::at"); + } + return _a[pos]; + } + + T& operator[](size_type pos) { return _a[pos]; } + const T& operator[](size_type pos) const { return _a[pos]; } + + T& front() { return _a[0]; } + const T& front() const { return _a[0]; } + + T& back() { return _a[_n - 1]; } + const T& back() const { return _a[_n - 1]; } + + // Capacity + size_type size() const { return _n; } + bool empty() const { return _n == 0; } + size_type max_size() const { return N; } + size_type capacity() const { return N; } + + // Modifiers + void push_back(const T& x) { + ADOBE_PRECONDITION(_n < N, "fixed_vector overflow"); + _a[_n++] = x; + } + + void pop_back() { + ADOBE_PRECONDITION(_n > 0, "fixed_vector underflow"); + back() = T(); + --_n; + } + + void clear() { + while (!empty()) { + pop_back(); + } + } + + iterator insert(iterator pos, const T& value) { + ADOBE_PRECONDITION(_n < N - 1, "fixed_vector overflow"); + auto old_end = end(); + push_back(value); + std::rotate(pos, old_end, end()); + return pos; + } + + template + iterator insert(iterator pos, Iterator first, Iterator last) { + iterator old_end = end(); + while (first != last) { + push_back(*first++); + } + std::rotate(pos, old_end, end()); + return pos; + } + + auto erase(iterator pos) { + ADOBE_PRECONDITION(_n > 0, "fixed_vector underflow"); + std::rotate(pos, std::next(pos), end()); + back() = T(); + --_n; + return pos; + } + + // Iterators + auto begin() { return _a.begin(); } + auto begin() const { return _a.begin(); } + auto cbegin() const { return _a.begin(); } + + auto end() { return std::next(begin(), _n); } + auto end() const { return std::next(begin(), _n); } + auto cend() const { return std::next(cbegin(), _n); } + + auto rbegin() { return reverse_iterator(end()); } + auto rbegin() const { return const_reverse_iterator(end()); } + auto crbegin() const { return const_reverse_iterator(cend()); } + + auto rend() { return reverse_iterator(begin()); } + auto rend() const { return const_reverse_iterator(begin()); } + auto crend() const { return const_reverse_iterator(cbegin()); } + + friend void swap(fixed_vector& lhs, fixed_vector& rhs) { + std::swap(lhs._a, rhs._a); + std::swap(lhs._n, rhs._n); + } + +private: + array_type _a; + size_type _n{0}; +}; + +template +bool operator==(const fixed_vector& lhs, const fixed_vector& rhs) { + return std::equal(lhs.begin(), lhs.end(), rhs.begin(), rhs.end()); +} + +template +bool operator!=(const fixed_vector& lhs, const fixed_vector& rhs) { + return !(lhs == rhs); +} + +//-------------------------------------------------------------------------------------------------- + +} // namespace orc + +//-------------------------------------------------------------------------------------------------- diff --git a/test/src/fixed_vector_tests.cpp b/test/src/fixed_vector_tests.cpp new file mode 100644 index 0000000..7568050 --- /dev/null +++ b/test/src/fixed_vector_tests.cpp @@ -0,0 +1,205 @@ +// identity +#include "orc/fixed_vector.hpp" + +// stdc++ +#include +#include +#include +#include + +// gtest +#include + +using namespace orc; + +// Constructor tests +TEST(FixedVectorTest, DefaultConstructor) { + fixed_vector vec; + EXPECT_TRUE(vec.empty()); + EXPECT_EQ(vec.size(), 0); +} + +TEST(FixedVectorTest, FillConstructor) { + fixed_vector vec(3, 0); + EXPECT_EQ(vec.size(), 3); +} + +// Element access tests +TEST(FixedVectorTest, AtAccess) { + fixed_vector vec; + vec.push_back(0); + EXPECT_NO_THROW(vec.at(0)); + EXPECT_THROW(vec.at(1), std::out_of_range); +} + +TEST(FixedVectorTest, OperatorBracketAccess) { + fixed_vector vec; + vec.push_back(0); + EXPECT_NO_THROW(vec[0]); +} + +TEST(FixedVectorTest, FrontBackAccess) { + fixed_vector vec; + vec.push_back(0); + vec.push_back(1); + EXPECT_NO_THROW(vec.front()); + EXPECT_EQ(vec.front(), 0); + EXPECT_NO_THROW(vec.back()); + EXPECT_EQ(vec.back(), 1); +} + +// Iterator tests +TEST(FixedVectorTest, IteratorOperations) { + fixed_vector vec; + vec.push_back(0); + vec.push_back(1); + + EXPECT_EQ(std::distance(vec.begin(), vec.end()), 2); + EXPECT_EQ(std::distance(vec.cbegin(), vec.cend()), 2); + EXPECT_EQ(std::distance(vec.rbegin(), vec.rend()), 2); +} + +// Capacity tests +TEST(FixedVectorTest, CapacityOperations) { + fixed_vector vec; + EXPECT_EQ(vec.max_size(), 5); + EXPECT_EQ(vec.capacity(), 5); + EXPECT_TRUE(vec.empty()); + + vec.push_back(0); + EXPECT_FALSE(vec.empty()); + EXPECT_EQ(vec.size(), 1); +} + +// Modifier tests +TEST(FixedVectorTest, PushBack) { + fixed_vector vec; + for (int i = 0; i < 5; ++i) { + EXPECT_NO_THROW(vec.push_back(i)); + EXPECT_EQ(vec.back(), i); + } +} + +TEST(FixedVectorTest, PopBack) { + fixed_vector vec; + vec.push_back(0); + EXPECT_EQ(vec.size(), 1); + EXPECT_NO_THROW(vec.pop_back()); + EXPECT_EQ(vec.size(), 0); +} + +TEST(FixedVectorTest, Clear) { + fixed_vector vec; + vec.push_back(0); + vec.push_back(1); + EXPECT_EQ(vec.size(), 2); + vec.clear(); + EXPECT_TRUE(vec.empty()); +} + +TEST(FixedVectorTest, Insert) { + fixed_vector vec; + vec.push_back(1); + auto it = vec.insert(vec.begin(), 0); + EXPECT_EQ(it, vec.begin()); + EXPECT_EQ(vec.size(), 2); + EXPECT_EQ(vec[0], 0); + EXPECT_EQ(vec[1], 1); +} + +TEST(FixedVectorTest, Erase) { + fixed_vector vec; + vec.push_back(0); + vec.push_back(1); + EXPECT_EQ(vec[0], 0); + EXPECT_EQ(vec[1], 1); + auto it = vec.erase(vec.begin()); + EXPECT_EQ(vec.size(), 1); + EXPECT_EQ(it, vec.begin()); + EXPECT_EQ(vec[0], 1); +} + +// Non-member function tests +TEST(FixedVectorTest, ComparisonOperators) { + fixed_vector vec1; + fixed_vector vec2; + + vec1.push_back(0); + vec2.push_back(0); + + EXPECT_TRUE(vec1 == vec2); + EXPECT_FALSE(vec1 != vec2); +} + +TEST(FixedVectorTest, Swap) { + fixed_vector vec1; + fixed_vector vec2; + + vec1.push_back(0); + vec2.push_back(0); + vec2.push_back(1); + + swap(vec1, vec2); + EXPECT_EQ(vec1.size(), 2); + EXPECT_EQ(vec2.size(), 1); +} + +// Special test for string type +TEST(FixedVectorTest, StringOperations) { + fixed_vector vec; + vec.push_back("hello"); + vec.push_back("world"); + + EXPECT_EQ(vec[0], "hello"); + EXPECT_EQ(vec[1], "world"); + EXPECT_EQ(vec.size(), 2); +} + +// Test for move semantics +TEST(FixedVectorTest, MoveOperations) { + fixed_vector vec1; + vec1.push_back("hello"); + + fixed_vector vec2(std::move(vec1)); + EXPECT_TRUE(vec1.empty()); + EXPECT_EQ(vec2.size(), 1); + EXPECT_EQ(vec2[0], "hello"); + + fixed_vector vec3; + vec3 = std::move(vec2); + EXPECT_TRUE(vec2.empty()); + EXPECT_EQ(vec3.size(), 1); + EXPECT_EQ(vec3[0], "hello"); +} + +// Test for range-based for loop +TEST(FixedVectorTest, RangeBasedFor) { + fixed_vector vec; + vec.push_back(1); + vec.push_back(2); + vec.push_back(3); + + int sum = 0; + for (const auto& x : vec) { + sum += x; + } + EXPECT_EQ(sum, 6); +} + +// Test for reverse iterators +TEST(FixedVectorTest, ReverseIterators) { + fixed_vector vec; + vec.push_back(1); + vec.push_back(2); + vec.push_back(3); + + std::vector reversed; + for (auto it = vec.rbegin(); it != vec.rend(); ++it) { + reversed.push_back(*it); + } + + EXPECT_EQ(reversed.size(), 3); + EXPECT_EQ(reversed[0], 3); + EXPECT_EQ(reversed[1], 2); + EXPECT_EQ(reversed[2], 1); +} From d75cee9d696794c0237147d9d36fc494f087f73f Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Mon, 5 May 2025 16:33:31 -0700 Subject: [PATCH 03/19] wip --- include/orc/fixed_vector.hpp | 180 +++++++++++++++++++++++++++++++++-- 1 file changed, 173 insertions(+), 7 deletions(-) diff --git a/include/orc/fixed_vector.hpp b/include/orc/fixed_vector.hpp index 363b30e..3a8650b 100644 --- a/include/orc/fixed_vector.hpp +++ b/include/orc/fixed_vector.hpp @@ -20,6 +20,16 @@ namespace orc { //-------------------------------------------------------------------------------------------------- +/** + * @brief A fixed-size vector container that provides a subset of `std::vector` functionality + * + * @tparam T The type of elements stored in the vector + * @tparam N The maximum number of elements the vector can hold + * + * This container provides a fixed-size alternative to `std::vector` with similar interface. + * It guarantees that memory is allocated on the stack and never reallocates. + * Operations that would exceed the fixed capacity `N` will terminate the program. + */ template struct fixed_vector { using value_type = T; @@ -35,10 +45,25 @@ struct fixed_vector { fixed_vector(const fixed_vector&) = default; fixed_vector& operator=(const fixed_vector&) = default; + /** + * @brief Move constructor + * + * @param rhs The `fixed_vector` to move from + * + * @post `rhs` is left in an empty state + */ fixed_vector(fixed_vector&& rhs) : _a(std::move(rhs._a)), _n(rhs._n) { rhs._n = 0; } + /** + * @brief Move assignment operator + * + * @param rhs The `fixed_vector` to move from + * @return Reference to this `fixed_vector` + * + * @post `rhs` is left in an empty state + */ fixed_vector& operator=(fixed_vector&& rhs) { _a = std::move(rhs._a); _n = rhs._n; @@ -46,6 +71,15 @@ struct fixed_vector { return *this; } + /** + * @brief Constructs a fixed_vector with count copies of value + * + * @param count Number of elements to create + * @param value Value to initialize elements with + * + * @pre count <= N + * @note If count > N, the program will terminate. + */ fixed_vector(size_type count, const T& value) { ADOBE_PRECONDITION(count <= N, "fixed_vector overflow"); for (size_type i = 0; i < count; ++i) { @@ -53,7 +87,14 @@ struct fixed_vector { } } - // Element access + /** + * @brief Access element at specified position with bounds checking + * + * @param pos Position of the element to return + * @return Reference to the requested element + * + * @throw std::out_of_range if pos >= size() + */ T& at(size_type pos) { if (pos >= _n) { throw std::out_of_range("fixed_vector::at"); @@ -61,6 +102,14 @@ struct fixed_vector { return _a[pos]; } + /** + * @brief Access element at specified position with bounds checking (const version) + * + * @param pos Position of the element to return + * @return Const reference to the requested element + * + * @throw std::out_of_range if pos >= size() + */ const T& at(size_type pos) const { if (pos >= _n) { throw std::out_of_range("fixed_vector::at"); @@ -68,14 +117,73 @@ struct fixed_vector { return _a[pos]; } + /** + * @brief Access element at specified position without bounds checking + * + * @param pos Position of the element to return + * @return Reference to the requested element + * + * @pre pos < size() + */ T& operator[](size_type pos) { return _a[pos]; } + + /** + * @brief Access element at specified position without bounds checking (const version) + * + * @param pos Position of the element to return + * @return Const reference to the requested element + * + * @pre pos < size() + */ const T& operator[](size_type pos) const { return _a[pos]; } - T& front() { return _a[0]; } - const T& front() const { return _a[0]; } + /** + * @brief Returns reference to the first element + * + * @return Reference to the first element + * + * @pre !empty() + */ + T& front() { + ADOBE_PRECONDITION(!empty(), "fixed_vector is empty"); + return _a[0]; + } + + /** + * @brief Returns const reference to the first element + * + * @return Const reference to the first element + * + * @pre !empty() + */ + const T& front() const { + ADOBE_PRECONDITION(!empty(), "fixed_vector is empty"); + return _a[0]; + } - T& back() { return _a[_n - 1]; } - const T& back() const { return _a[_n - 1]; } + /** + * @brief Returns reference to the last element + * + * @return Reference to the last element + * + * @pre !empty() + */ + T& back() { + ADOBE_PRECONDITION(!empty(), "fixed_vector is empty"); + return _a[_n - 1]; + } + + /** + * @brief Returns const reference to the last element + * + * @return Const reference to the last element + * + * @pre !empty() + */ + const T& back() const { + ADOBE_PRECONDITION(!empty(), "fixed_vector is empty"); + return _a[_n - 1]; + } // Capacity size_type size() const { return _n; } @@ -83,32 +191,67 @@ struct fixed_vector { size_type max_size() const { return N; } size_type capacity() const { return N; } - // Modifiers + /** + * @brief Adds an element to the end + * + * @param x Value to append + * + * @pre size() < N, otherwise the program will terminate. + */ void push_back(const T& x) { ADOBE_PRECONDITION(_n < N, "fixed_vector overflow"); _a[_n++] = x; } + /** + * @brief Removes the last element + * + * @pre !empty(), otherwise the program will terminate. + * @post The last element is destroyed and size() is decremented by 1 + */ void pop_back() { ADOBE_PRECONDITION(_n > 0, "fixed_vector underflow"); back() = T(); --_n; } + /** + * @brief Removes all elements + * + * @post size() == 0 + */ void clear() { while (!empty()) { pop_back(); } } + /** + * @brief Inserts value before pos + * + * @param pos Iterator before which the content will be inserted + * @param value Element value to insert + * @return Iterator pointing to the inserted value + * + * @pre size() < N, otherwise the program will terminate. + */ iterator insert(iterator pos, const T& value) { - ADOBE_PRECONDITION(_n < N - 1, "fixed_vector overflow"); auto old_end = end(); push_back(value); std::rotate(pos, old_end, end()); return pos; } + /** + * @brief Inserts elements from range [first, last) before pos + * + * @param pos Iterator before which the content will be inserted + * @param first Iterator to the first element to insert + * @param last Iterator past the last element to insert + * @return Iterator pointing to the first inserted element + * + * @pre size() + std::distance(first, last) <= N, otherwise the program will terminate. + */ template iterator insert(iterator pos, Iterator first, Iterator last) { iterator old_end = end(); @@ -119,6 +262,15 @@ struct fixed_vector { return pos; } + /** + * @brief Removes element at pos + * + * @param pos Iterator to the element to remove + * @return Iterator following the last removed element + * + * @pre !empty(), otherwise the program will terminate. + * @post size() is decremented by 1 + */ auto erase(iterator pos) { ADOBE_PRECONDITION(_n > 0, "fixed_vector underflow"); std::rotate(pos, std::next(pos), end()); @@ -154,11 +306,25 @@ struct fixed_vector { size_type _n{0}; }; +/** + * @brief Equality comparison operator + * + * @param lhs First fixed_vector to compare + * @param rhs Second fixed_vector to compare + * @return true if the vectors have the same size and elements, false otherwise + */ template bool operator==(const fixed_vector& lhs, const fixed_vector& rhs) { return std::equal(lhs.begin(), lhs.end(), rhs.begin(), rhs.end()); } +/** + * @brief Inequality comparison operator + * + * @param lhs First fixed_vector to compare + * @param rhs Second fixed_vector to compare + * @return true if the vectors are not equal, false otherwise + */ template bool operator!=(const fixed_vector& lhs, const fixed_vector& rhs) { return !(lhs == rhs); From 914c809ba3195ef349c3086a4ea08250b0465725 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Thu, 8 May 2025 15:55:10 -0700 Subject: [PATCH 04/19] changing orc_test to add tests it finds to gtest for consistent output and gtest capabilities --- test/src/main.cpp | 277 +++++++++++++++++++++++++--------------------- 1 file changed, 148 insertions(+), 129 deletions(-) diff --git a/test/src/main.cpp b/test/src/main.cpp index aa8456d..4099c72 100644 --- a/test/src/main.cpp +++ b/test/src/main.cpp @@ -227,6 +227,7 @@ struct expected_odrv { const std::string& linkage_name() const { return (*this)["linkage_name"]; } }; +#if 0 std::ostream& operator<<(std::ostream& s, const expected_odrv& x) { // map is unordered, so we have to sort the keys... std::vector keys; @@ -239,32 +240,25 @@ std::ostream& operator<<(std::ostream& s, const expected_odrv& x) { } return s; } +#endif //-------------------------------------------------------------------------------------------------- const char* to_string(toml::node_type x) { + // clang-format off switch (x) { - case toml::node_type::none: - return "none"; - case toml::node_type::table: - return "table"; - case toml::node_type::array: - return "array"; - case toml::node_type::string: - return "string"; - case toml::node_type::integer: - return "integer"; - case toml::node_type::floating_point: - return "floating_point"; - case toml::node_type::boolean: - return "boolean"; - case toml::node_type::date: - return "date"; - case toml::node_type::time: - return "time"; - case toml::node_type::date_time: - return "date_time"; + case toml::node_type::none: return "none"; + case toml::node_type::table: return "table"; + case toml::node_type::array: return "array"; + case toml::node_type::string: return "string"; + case toml::node_type::integer: return "integer"; + case toml::node_type::floating_point: return "floating_point"; + case toml::node_type::boolean: return "boolean"; + case toml::node_type::date: return "date"; + case toml::node_type::time: return "time"; + case toml::node_type::date_time: return "date_time"; } + // clang-format on assert(false); } @@ -362,7 +356,7 @@ std::vector compile_compilation_units(const std::filesyst std::vector object_files; const bool preserve_object_files = settings["orc_test_flags"]["preserve_object_files"].value_or(false); - console() << "Compiling " << units.size() << " source file(s):\n"; + // console() << "Compiling " << units.size() << " source file(s):\n"; for (auto& unit : units) { auto temp_path = sanitize(object_file_path(home, unit)); if (preserve_object_files) { @@ -383,8 +377,8 @@ std::vector compile_compilation_units(const std::filesyst throw std::runtime_error("unexpected compilation failure"); } object_files.emplace_back(std::move(temp_path)); - console() << " " << unit._src.filename() << " -> " << object_files.back().filename() - << '\n'; + // console() << " " << unit._src.filename() << " -> " << object_files.back().filename() + // << '\n'; } return object_files; } @@ -442,7 +436,22 @@ bool odrv_report_match(const expected_odrv& odrv, const odrv_report& report) { } //-------------------------------------------------------------------------------------------------- -// return `false` if no error, or `true` on error. +/** + * @brief Validates runtime metrics against expected values defined in settings + * + * This function compares various metrics collected during an ORC test pass + * against expected values specified in the TOML configuration. It reports + * any mismatches to the error console. + * + * @param settings The TOML configuration table containing expected metric values + * + * @pre The settings parameter should contain a "metrics" table with integer values + * for the metrics to be validated + * @pre The globals singleton should be initialized with the actual metrics + * + * @return true if any validation failures occurred (metrics didn't match expected values) + * @return false if all metrics matched or if no metrics table was found in settings + */ bool metrics_validation(const toml::table& settings) { const toml::table* expected_ptr = settings["metrics"].as_table(); @@ -454,16 +463,13 @@ bool metrics_validation(const toml::table& settings) { const globals& metrics = globals::instance(); bool failure = false; - const auto compare_field = [&expected](const std::atomic_size_t& field, const char* key) -> bool { + const auto compare_field = [&expected](const std::atomic_size_t& field, + const char* key) -> bool { const toml::value* file_count_ptr = expected[key].as_integer(); if (!file_count_ptr) return false; int64_t expected = **file_count_ptr; if (expected == field) return false; - console_error() << key - << " mismatch (expected " - << expected - << "; calculated " - << field + console_error() << key << " mismatch (expected " << expected << "; calculated " << field << ")\n"; return true; }; @@ -482,22 +488,55 @@ bool metrics_validation(const toml::table& settings) { constexpr const char* tomlname_k = "odrv_test.toml"; //-------------------------------------------------------------------------------------------------- +/** + * @brief Test fixture for ORC tests + * + * This class represents a test fixture for running ORC tests on a specific test directory. + * It handles: + * - Loading and parsing the test configuration from a TOML file + * - Compiling source files if needed + * - Processing object files to detect ODR violations + * - Validating metrics and ODRV reports against expected values + */ +class orc_test_instance : public ::testing::Test { + std::filesystem::path _path; -std::size_t run_battery_test(const std::filesystem::path& home) { - static bool first_s = false; +public: + explicit orc_test_instance(std::filesystem::path&& path) : _path(std::move(path)) {} - if (!first_s) { - console() << '\n'; - } else { - first_s = false; - } +protected: + void SetUp() override { orc_reset(); } - assume(is_directory(home), "\"" + home.string() + "\" is not a directory"); - std::filesystem::path tomlpath = home / tomlname_k; - assume(is_regular_file(tomlpath), "\"" + tomlpath.string() + "\" is not a regular file"); - toml::table settings; + void TestBody() override; +}; - console() << "-=-=- Test: " << home << "\n"; +//-------------------------------------------------------------------------------------------------- +/** + * @brief Implements the logic for an ORC test + * + * This method executes the main logic for an ORC test: + * 1. Validates and loads the test configuration from a TOML file + * 2. Compiles source files (if any are specified) + * 3. Collects object files for processing (if any are specified) + * 4. Processes all object files to detect ODR violations + * 5. Validates metrics against expected values + * 6. Validates detected ODR violations against expected violations + * + * The test will be skipped if the "disable" flag is set in the configuration. + * + * @pre The `_path` member must point to a valid directory containing a valid TOML configuration + * file + * @pre The TOML file must follow the expected format for ORC test configuration + * @post Test assertions are made to validate metrics and ODR violation reports + * @post If the test is disabled in configuration, it will be skipped + * @throws std::runtime_error If the TOML file cannot be parsed or other critical errors occur + */ +void orc_test_instance::TestBody() { + assume(std::filesystem::is_directory(_path), "\"" + _path.string() + "\" is not a directory"); + std::filesystem::path tomlpath = _path / tomlname_k; + assume(std::filesystem::is_regular_file(tomlpath), + "\"" + tomlpath.string() + "\" is not a regular file"); + toml::table settings; try { settings = toml::parse_file(tomlpath.string()); @@ -506,110 +545,88 @@ std::size_t run_battery_test(const std::filesystem::path& home) { throw std::runtime_error("settings file parsing error"); } - // Save this for debugging purposes. - // console_error() << toml::json_formatter{settings} << '\n'; - if (settings["orc_test_flags"]["disable"].value_or(false)) { logging::notice("test disabled"); - return 0; + GTEST_SKIP() << "Test disabled in configuration"; + return; } - auto test_name = home.stem().string(); + auto compilation_units = derive_compilation_units(_path, settings); std::vector object_files; - auto compilation_units = derive_compilation_units(home, settings); if (!compilation_units.empty()) { - object_files = compile_compilation_units(home, settings, compilation_units); + object_files = compile_compilation_units(_path, settings, compilation_units); } - std::vector direct_object_files = derive_object_files(home, settings); - object_files.insert(object_files.end(), std::move_iterator(direct_object_files.begin()), std::move_iterator(direct_object_files.end())); - - // we can have zero of these now, it's okay. - auto expected_odrvs = derive_expected_odrvs(home, settings); - - orc_reset(); - - // save for debugging. - // settings::instance()._parallel_processing = false; + std::vector direct_object_files = derive_object_files(_path, settings); + object_files.insert(object_files.end(), std::make_move_iterator(direct_object_files.begin()), + std::make_move_iterator(direct_object_files.end())); + auto expected_odrvs = derive_expected_odrvs(_path, settings); const std::vector reports = orc_process(std::move(object_files)); - const globals& metrics = globals::instance(); - - console() << "ODRVs expected: " << expected_odrvs.size() << "; reported: " << reports.size() - << '\n'; - - toml::table result; - result.insert("expected", static_cast(expected_odrvs.size())); - result.insert("reported", static_cast(reports.size())); - toml::table toml_metrics; - toml_metrics.insert("object_file_count", static_cast(metrics._object_file_count)); - toml_metrics.insert("odrv_count", static_cast(metrics._odrv_count)); - toml_metrics.insert("unique_symbol_count", static_cast(metrics._unique_symbol_count)); - toml_metrics.insert("die_processed_count", static_cast(metrics._die_processed_count)); - toml_metrics.insert("die_skipped_count", static_cast(metrics._die_skipped_count)); - result.insert("metrics", std::move(toml_metrics)); - - toml_out().insert(test_name, std::move(result)); - - // - // metrics validation - // + // Validate metrics bool metrics_failure = metrics_validation(settings); - - // - // ODRV report validation - // - // At this point, the reports.size() should match the expected_odrvs.size() - // - bool unexpected_result = expected_odrvs.size() != reports.size(); - - // If things are okay so far, make sure each ODRV reported is expected. - if (!unexpected_result) { - for (const auto& report : reports) { - auto found = - std::find_if(expected_odrvs.begin(), expected_odrvs.end(), - [&](const auto& odrv) { return odrv_report_match(odrv, report); }); - - if (found == expected_odrvs.end()) { - unexpected_result = true; - break; - } - - console() << " Found expected ODRV: " << report.reporting_categories() << "\n"; - } + EXPECT_FALSE(metrics_failure) << "Metrics validation failed for " << _path; + + // Validate ODRV reports + EXPECT_EQ(expected_odrvs.size(), reports.size()) << "ODRV count mismatch for " << _path; + + // Check each reported ODRV against expected ones + for (const auto& report : reports) { + auto found = + std::find_if(expected_odrvs.begin(), expected_odrvs.end(), + [&](const auto& odrv) { return odrv_report_match(odrv, report); }); + EXPECT_NE(found, expected_odrvs.end()) + << "Unexpected ODRV found: " << report << " in " << _path; } - - if (unexpected_result) { - console_error() << "Reported ODRV(s):\n"; - - // If there's an error in the test, dump what we've found to assist debugging. - for (const auto& report : reports) { - console() << report << '\n'; - } - - console_error() << "Expected ODRV(s):\n"; - std::size_t count{0}; - for (const auto& expected : expected_odrvs) { - console() << ++count << ":\n" << expected << '\n'; - } - - console_error() << "\nIn battery " << home << ": ODRV count mismatch"; - } - - return metrics_failure + unexpected_result; } //-------------------------------------------------------------------------------------------------- +/** + * Creates a Google Test case for a single orc_test test. + * + * This function registers a new test case with Google Test framework using the + * directory name as the test name. Any hyphens in the directory name are + * replaced with underscores to conform to C++ identifier naming rules. + * + * @param home The filesystem path to the test battery directory + * + * @pre The path must exist and be a valid directory containing test files + * @post A new test case is registered with Google Test framework that will + * create an `orc_test_instance` with the provided path when executed + */ +void create_test(const std::filesystem::path& home) { + std::string test_name = home.stem().string(); + std::replace(test_name.begin(), test_name.end(), '-', '_'); + + ::testing::RegisterTest( + "orc_test", test_name.c_str(), nullptr, nullptr, __FILE__, __LINE__, + [_home = home]() mutable -> ::testing::Test* { return new orc_test_instance(std::move(_home)); }); +} +//-------------------------------------------------------------------------------------------------- +/** + * Recursively traverses a directory tree to find and register tests. + * + * This function walks through the provided directory and all its subdirectories, + * looking for directories that contain a TOML configuration file (indicated by + * tomlname_k). When such a directory is found, it's registered as a test. + * + * @param directory The filesystem path to start traversal from + * @return The number of errors encountered during traversal + * + * @pre The path must exist and be a valid directory + * @post All valid tests in the directory tree are registered with the + * testing framework via create_battery_test() + */ std::size_t traverse_directory_tree(const std::filesystem::path& directory) { assert(is_directory(directory)); std::size_t errors = 0; if (exists(directory / tomlname_k)) { - errors += run_battery_test(directory); + create_test(directory); } for (const auto& entry : std::filesystem::directory_iterator(directory)) { @@ -635,13 +652,6 @@ std::size_t traverse_directory_tree(const std::filesystem::path& directory) { int main(int argc, char** argv) try { orc::profiler::initialize(); - // Initialize and run Google Test - ::testing::InitGoogleTest(&argc, argv); - int gtest_result = RUN_ALL_TESTS(); - if (gtest_result != 0) { - return gtest_result; - } - if (argc < 2) { console_error() << "Usage: " << argv[0] << " /path/to/test/battery/ [--json_mode]\n"; throw std::runtime_error("no path to test battery given"); @@ -655,8 +665,17 @@ int main(int argc, char** argv) try { test_settings()._json_mode = argc > 2 && std::string(argv[2]) == "--json_mode"; + // Traverse the directory tree to find and register tests, + // adding them dynamically to the Google Test framework. std::size_t errors = traverse_directory_tree(battery_path); + // Initialize and run Google Test + ::testing::InitGoogleTest(&argc, argv); + int gtest_result = RUN_ALL_TESTS(); + if (gtest_result != 0) { + return gtest_result; + } + if (test_settings()._json_mode) { cout_safe([&](auto& s) { s << toml::json_formatter{toml_out()} << '\n'; }); } From 334d1596bfb2812651292a0ee20a6a57f93c9b62 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Thu, 8 May 2025 16:10:59 -0700 Subject: [PATCH 05/19] refactor: replace manual array implementation with fixed_vector in object_ancestry struct --- include/orc/dwarf_structs.hpp | 31 ++++++++++++++++--------------- src/orc.cpp | 4 ++-- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/include/orc/dwarf_structs.hpp b/include/orc/dwarf_structs.hpp index 4e75520..9e53ce0 100644 --- a/include/orc/dwarf_structs.hpp +++ b/include/orc/dwarf_structs.hpp @@ -360,43 +360,44 @@ const char* to_string(arch arch); * question. This facilitates reporting when ODRVs are found, giving the user a breadcrumb as * to how the ODRV is being introduced. For efficiency purposes, we fix the max number of ancestors * at compile time, but this can be adjusted if necessary. - * - * TODO: Does it make sense to extract this "static vector" type into a template, so that it can - * be used in other contexts? (e.g., `attribute_sequence`?) */ struct object_ancestry { - std::array _ancestors; - std::size_t _count{0}; + orc::fixed_vector _ancestors; + auto size() const { return _ancestors.size(); } auto begin() const { return _ancestors.begin(); } - auto end() const { return begin() + _count; } + auto end() const { return _ancestors.end(); } auto& back() { - assert(_count); - return _ancestors[_count]; + assert(!_ancestors.empty()); + return _ancestors.back(); } const auto& back() const { - assert(_count); - return _ancestors[_count]; + assert(!_ancestors.empty()); + return _ancestors.back(); } void emplace_back(pool_string&& ancestor) { - assert((_count + 1) < _ancestors.size()); - _ancestors[_count++] = std::move(ancestor); + assert(_ancestors.size() < _ancestors.capacity()); + _ancestors.push_back(std::move(ancestor)); } bool operator<(const object_ancestry& rhs) const { - if (_count < rhs._count) + if (_ancestors.size() < rhs._ancestors.size()) return true; - if (_count > rhs._count) + + if (_ancestors.size() > rhs._ancestors.size()) return false; - for(size_t i=0; i<_count; ++i) { + + for (size_t i = 0; i < _ancestors.size(); ++i) { if (_ancestors[i].view() < rhs._ancestors[i].view()) return true; + if (_ancestors[i].view() > rhs._ancestors[i].view()) return false; } + return false; } }; diff --git a/src/orc.cpp b/src/orc.cpp index f6d474b..f957d76 100644 --- a/src/orc.cpp +++ b/src/orc.cpp @@ -589,9 +589,9 @@ void to_json(nlohmann::json& j, const odrv_report::conflict_details& c) { auto& location_json = instances[location_str]; for (const auto& ancestry : locations.at(location)) { auto* node = &location_json; - for (std::size_t i = 0; i < ancestry._count; ++i) { + for (std::size_t i = 0; i < ancestry.size(); ++i) { const std::string key = ancestry._ancestors[i].allocate_string(); - if (i == (ancestry._count - 1)) { + if (i == (ancestry.size() - 1)) { (*node)["object_files"].push_back(key); } else { node = &(*node)[key]; From 1a63edd5358b9bae7517cd1f89e2989c26208634 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Thu, 8 May 2025 16:17:42 -0700 Subject: [PATCH 06/19] tweak --- src/dwarf.cpp | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/dwarf.cpp b/src/dwarf.cpp index 11f1798..4c679e8 100644 --- a/src/dwarf.cpp +++ b/src/dwarf.cpp @@ -571,9 +571,8 @@ void line_header::read(freader& s, bool needs_byteswap) { } //-------------------------------------------------------------------------------------------------- -// It is fixed to keep allocations from happening. -constexpr std::size_t max_names_k{32}; -using fixed_attribute_array = orc::fixed_vector; + +using fixed_attribute_array = orc::fixed_vector; /** * @brief Extracts fatal attributes from an attribute sequence From 76c6080e7834e36659e05668b9f989c87090719d Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Thu, 8 May 2025 16:31:57 -0700 Subject: [PATCH 07/19] readding a reserve call --- src/dwarf.cpp | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/dwarf.cpp b/src/dwarf.cpp index 7c41984..98f7c64 100644 --- a/src/dwarf.cpp +++ b/src/dwarf.cpp @@ -1879,6 +1879,9 @@ die_pair dwarf::implementation::abbreviation_to_die(std::size_t die_address, pro die._tag = a._tag; die._has_children = a._has_children; + // Can we get rid of this memory allocation? This happens a lot... + attributes.reserve(a._attributes.size()); + std::transform(a._attributes.begin(), a._attributes.end(), std::back_inserter(attributes), [&](const auto& x) { // If the attribute is nonfatal, we'll pass over it in `process_attribute`. From f3a4aa16260c9f417119d90b4abcab81aed6e638 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Thu, 8 May 2025 16:33:04 -0700 Subject: [PATCH 08/19] readding a reserve call --- include/orc/dwarf_structs.hpp | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/include/orc/dwarf_structs.hpp b/include/orc/dwarf_structs.hpp index 9e53ce0..b8ba176 100644 --- a/include/orc/dwarf_structs.hpp +++ b/include/orc/dwarf_structs.hpp @@ -178,11 +178,15 @@ std::ostream& operator<<(std::ostream& s, const attribute& x); //-------------------------------------------------------------------------------------------------- // I'm not a fan of the name `attribute_sequence`. struct attribute_sequence { - using attributes_type = orc::fixed_vector; + using attributes_type = std::vector; using value_type = typename attributes_type::value_type; using iterator = typename attributes_type::iterator; using const_iterator = typename attributes_type::const_iterator; + void reserve(std::size_t n) { + _attributes.reserve(n); + } + bool has(dw::at name) const { auto [valid, iterator] = find(name); return valid; From 586f4e4f9cc125b31b0b46bdaf19047941925719 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Thu, 8 May 2025 16:46:26 -0700 Subject: [PATCH 09/19] updating workflow --- .github/workflows/build-and-test.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 5fa8448..a14576f 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -71,7 +71,7 @@ jobs: id: run-orc_test continue-on-error: true run: | - ./build/Release/orc_test ./test/battery --json_mode > test_out.json + ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json - name: 🛠️ orc_test w/ ASan id: build-orc_test-asan @@ -82,7 +82,7 @@ jobs: id: run-orc_test-asan continue-on-error: true run: | - ./build/Release/orc_test ./test/battery --json_mode > test_out.json + ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json - name: 🛠️ orc_test w/ TSan id: build-orc_test-tsan @@ -93,7 +93,7 @@ jobs: id: run-orc_test-tsan continue-on-error: true run: | - ./build/Release/orc_test ./test/battery --json_mode > test_out.json + ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json - name: 🛠️ orc_test w/ UBSan id: build-orc_test-ubsan @@ -104,7 +104,7 @@ jobs: id: run-orc_test-ubsan continue-on-error: true run: | - ./build/Release/orc_test ./test/battery --json_mode > test_out.json + ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json - name: ✏️ github json uses: jsdaniell/create-json@1.1.2 From bfe893fede9e34657be9fe68f95fd26b4cabdb77 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 08:36:23 -0700 Subject: [PATCH 10/19] adding new gtest to markdown script and wiring it up to the workflow --- .github/gtest_to_markdown.py | 92 ++++++++++++++++++++++++++++ .github/workflows/build-and-test.yml | 24 ++------ 2 files changed, 96 insertions(+), 20 deletions(-) create mode 100644 .github/gtest_to_markdown.py diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py new file mode 100644 index 0000000..c1e9b2a --- /dev/null +++ b/.github/gtest_to_markdown.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python3 + +import json +import sys +from datetime import datetime +from typing import Dict, List, Any + +def format_duration(milliseconds: float) -> str: + """Convert milliseconds to a human-readable duration.""" + if milliseconds < 1000: + return f"{milliseconds:.2f}ms" + seconds = milliseconds / 1000 + if seconds < 60: + return f"{seconds:.2f}s" + minutes = seconds / 60 + return f"{minutes:.2f}m" + +def format_failure_message(failure: Dict[str, Any]) -> str: + """Format a test failure message.""" + message = [] + if "message" in failure: + message.append(failure["message"]) + if "type" in failure: + message.append(f"Type: {failure['type']}") + return "\n".join(message) + +def convert_to_markdown(data: Dict[str, Any]) -> str: + """Convert gtest JSON data to GitHub-flavored markdown.""" + output = [] + + # Add header with timestamp + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + output.append(f"# GTest Results ({timestamp})\n") + + # Add summary + total_tests = len(data.get("testsuites", [])) + passed_tests = sum(1 for suite in data.get("testsuites", []) + if suite.get("status") == "RUN") + failed_tests = total_tests - passed_tests + + output.append("## Summary\n") + output.append(f"- Total Tests: {total_tests}") + output.append(f"- Passed: {passed_tests}") + output.append(f"- Failed: {failed_tests}\n") + + # Add detailed results table + output.append("## Test Results\n") + output.append("| Test Suite | Test Case | Status | Duration |") + output.append("|------------|-----------|--------|----------|") + + for suite in data.get("testsuites", []): + suite_name = suite.get("name", "Unknown Suite") + for test in suite.get("testsuite", []): + test_name = test.get("name", "Unknown Test") + status = "✅ PASS" if test.get("status") == "RUN" else "❌ FAIL" + duration = format_duration(float(test.get("time", 0))) + + # Add the test result row + output.append(f"| {suite_name} | {test_name} | {status} | {duration} |") + + # Add failure details if the test failed + if test.get("status") != "RUN" and "failures" in test: + output.append("\n### Failure Details\n") + for failure in test["failures"]: + output.append("```") + output.append(format_failure_message(failure)) + output.append("```\n") + + return "\n".join(output) + +def main(): + if len(sys.argv) != 2: + print("Usage: python gtest_to_markdown.py ") + sys.exit(1) + + try: + with open(sys.argv[1], 'r') as f: + data = json.load(f) + markdown = convert_to_markdown(data) + print(markdown) + except FileNotFoundError: + print(f"Error: File {sys.argv[1]} not found", file=sys.stderr) + sys.exit(1) + except json.JSONDecodeError: + print(f"Error: Invalid JSON in {sys.argv[1]}", file=sys.stderr) + sys.exit(1) + except Exception as e: + print(f"Error: {str(e)}", file=sys.stderr) + sys.exit(1) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index a14576f..5684fa6 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -72,7 +72,7 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY - name: 🛠️ orc_test w/ ASan id: build-orc_test-asan continue-on-error: true @@ -83,7 +83,7 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY - name: 🛠️ orc_test w/ TSan id: build-orc_test-tsan continue-on-error: true @@ -94,7 +94,7 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY - name: 🛠️ orc_test w/ UBSan id: build-orc_test-ubsan continue-on-error: true @@ -105,20 +105,4 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/orc_test_to_github_actions.py test_out.json - - name: ✏️ github json - uses: jsdaniell/create-json@1.1.2 - continue-on-error: true - with: - name: "github.json" - json: ${{ toJSON(github) }} - - name: ✏️ steps json - uses: jsdaniell/create-json@1.1.2 - continue-on-error: true - with: - name: "steps.json" - json: ${{ toJSON(steps) }} - - name: ✍️ job summary - continue-on-error: false - run: | - python ${GITHUB_WORKSPACE}/.github/generate_job_summary.py $GITHUB_STEP_SUMMARY github.json steps.json + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY From 7b9042ab1d1ccf052f6717694f933a072e6c40a2 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 08:40:46 -0700 Subject: [PATCH 11/19] string parsing fix --- .github/gtest_to_markdown.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py index c1e9b2a..03884d1 100644 --- a/.github/gtest_to_markdown.py +++ b/.github/gtest_to_markdown.py @@ -53,7 +53,17 @@ def convert_to_markdown(data: Dict[str, Any]) -> str: for test in suite.get("testsuite", []): test_name = test.get("name", "Unknown Test") status = "✅ PASS" if test.get("status") == "RUN" else "❌ FAIL" - duration = format_duration(float(test.get("time", 0))) + time_value = test.get("time", 0) + try: + # Try to convert to float, but handle string values like "0s" + if isinstance(time_value, str) and time_value.endswith('s'): + duration_ms = float(time_value[:-1]) * 1000 # Convert seconds to milliseconds + else: + duration_ms = float(time_value) + duration = format_duration(duration_ms) + except ValueError: + # Fallback if conversion fails + duration = str(time_value) # Add the test result row output.append(f"| {suite_name} | {test_name} | {status} | {duration} |") From a3368d218e04f378eac20a2ee218e419159c19dc Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 08:50:33 -0700 Subject: [PATCH 12/19] workflow script tweak --- .github/gtest_to_markdown.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py index 03884d1..31ad155 100644 --- a/.github/gtest_to_markdown.py +++ b/.github/gtest_to_markdown.py @@ -30,7 +30,10 @@ def convert_to_markdown(data: Dict[str, Any]) -> str: # Add header with timestamp timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - output.append(f"# GTest Results ({timestamp})\n") + job_name = os.environ.get('GITHUB_JOB', '') + step_name = os.environ.get('GITHUB_WORKFLOW', '') + test_context = f"{job_name}/{step_name}" if job_name and step_name else "" + output.append(f"#{test_context} ({timestamp}) Test Results\n") # Add summary total_tests = len(data.get("testsuites", [])) From 4600393da9951509200cce5b33145ee6df740d37 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 08:54:18 -0700 Subject: [PATCH 13/19] workflow script tweak --- .github/gtest_to_markdown.py | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py index 31ad155..6e866c2 100644 --- a/.github/gtest_to_markdown.py +++ b/.github/gtest_to_markdown.py @@ -1,6 +1,7 @@ #!/usr/bin/env python3 import json +import os import sys from datetime import datetime from typing import Dict, List, Any From db87b2e2807be7f358f9ba27a1112d7fbb225553 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 09:07:39 -0700 Subject: [PATCH 14/19] refactor: update gtest_to_markdown.py to require context parameter and update workflow --- .github/gtest_to_markdown.py | 22 +++++++++++----------- .github/workflows/build-and-test.yml | 8 ++++---- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py index 6e866c2..1581d6b 100644 --- a/.github/gtest_to_markdown.py +++ b/.github/gtest_to_markdown.py @@ -25,16 +25,13 @@ def format_failure_message(failure: Dict[str, Any]) -> str: message.append(f"Type: {failure['type']}") return "\n".join(message) -def convert_to_markdown(data: Dict[str, Any]) -> str: +def convert_to_markdown(data: Dict[str, Any], context: str) -> str: """Convert gtest JSON data to GitHub-flavored markdown.""" output = [] # Add header with timestamp timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") - job_name = os.environ.get('GITHUB_JOB', '') - step_name = os.environ.get('GITHUB_WORKFLOW', '') - test_context = f"{job_name}/{step_name}" if job_name and step_name else "" - output.append(f"#{test_context} ({timestamp}) Test Results\n") + output.append(f"# {context} Results ({timestamp})\n") # Add summary total_tests = len(data.get("testsuites", [])) @@ -83,20 +80,23 @@ def convert_to_markdown(data: Dict[str, Any]) -> str: return "\n".join(output) def main(): - if len(sys.argv) != 2: - print("Usage: python gtest_to_markdown.py ") + if len(sys.argv) != 3: + print("Usage: python gtest_to_markdown.py ") sys.exit(1) + context = sys.argv[1] + json_file = sys.argv[2] + try: - with open(sys.argv[1], 'r') as f: + with open(json_file, 'r') as f: data = json.load(f) - markdown = convert_to_markdown(data) + markdown = convert_to_markdown(data, context) print(markdown) except FileNotFoundError: - print(f"Error: File {sys.argv[1]} not found", file=sys.stderr) + print(f"Error: File {json_file} not found", file=sys.stderr) sys.exit(1) except json.JSONDecodeError: - print(f"Error: Invalid JSON in {sys.argv[1]}", file=sys.stderr) + print(f"Error: Invalid JSON in {json_file}", file=sys.stderr) sys.exit(1) except Exception as e: print(f"Error: {str(e)}", file=sys.stderr) diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 5684fa6..e3390e6 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -72,7 +72,7 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Release Tests" test_out.json >> $GITHUB_STEP_SUMMARY - name: 🛠️ orc_test w/ ASan id: build-orc_test-asan continue-on-error: true @@ -83,7 +83,7 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Address Sanitizer Tests" test_out.json >> $GITHUB_STEP_SUMMARY - name: 🛠️ orc_test w/ TSan id: build-orc_test-tsan continue-on-error: true @@ -94,7 +94,7 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Thread Sanitizer Tests" test_out.json >> $GITHUB_STEP_SUMMARY - name: 🛠️ orc_test w/ UBSan id: build-orc_test-ubsan continue-on-error: true @@ -105,4 +105,4 @@ jobs: continue-on-error: true run: | ./build/Release/orc_test ./test/battery --gtest_output="json:test_out.json" - python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py test_out.json >> $GITHUB_STEP_SUMMARY + python ${GITHUB_WORKSPACE}/.github/gtest_to_markdown.py "Undefined Behavior Sanitizer Tests" test_out.json >> $GITHUB_STEP_SUMMARY From d7010da7845ac3994cebad50877955304694b784 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 09:13:49 -0700 Subject: [PATCH 15/19] removing unneeded scripts --- .github/generate_job_summary.py | 72 --------------------------- .github/orc_test_to_github_actions.py | 6 --- 2 files changed, 78 deletions(-) delete mode 100755 .github/generate_job_summary.py delete mode 100755 .github/orc_test_to_github_actions.py diff --git a/.github/generate_job_summary.py b/.github/generate_job_summary.py deleted file mode 100755 index 93e6c6c..0000000 --- a/.github/generate_job_summary.py +++ /dev/null @@ -1,72 +0,0 @@ -import sys -import json -import re - -if __name__ == "__main__": - sys.stdout = open(sys.argv[1], "w") - github = json.load(open(sys.argv[2], "r")) - steps = json.load(open(sys.argv[3], "r")) - - print(f"# {github['workflow']}: Job Summary") - print("") - - print("## Details") - print(f"- started by: `{github['actor']}`") - if "event" in github: - event = github['event'] - if "pull_request" in event: - print(f"- branch: `{event['pull_request']['head']['ref']}`") - if "action" in event: - print(f"- action: `{event['action']}`") - - print("") - - print("## Summary of Steps") - print("| Step | Test | Notes | Expected | Reported |") - print("|---|---|---|---|---|") - - all_success = True - p = re.compile('(?") - print(json.dumps(github, indent=4, sort_keys=False)) - print("") - - print("## steps") - print("") - print(json.dumps(steps, indent=4, sort_keys=False)) - print("") - - if not all_success: - sys.exit("One or more tests failed") diff --git a/.github/orc_test_to_github_actions.py b/.github/orc_test_to_github_actions.py deleted file mode 100755 index 49394a8..0000000 --- a/.github/orc_test_to_github_actions.py +++ /dev/null @@ -1,6 +0,0 @@ -import sys -import json - -if __name__ == "__main__": - test_results = json.load(open(sys.argv[1], "r")) - print(f"::set-output name=orc_test_out::{test_results}"); From ae5393e9f5b61758b9c1dd9f324feeb97fda9fd6 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 09:23:03 -0700 Subject: [PATCH 16/19] adding docs --- .github/gtest_to_markdown.py | 103 ++++++++++++++++++++++++++++++++++- 1 file changed, 100 insertions(+), 3 deletions(-) diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py index 1581d6b..171aaca 100644 --- a/.github/gtest_to_markdown.py +++ b/.github/gtest_to_markdown.py @@ -1,5 +1,26 @@ #!/usr/bin/env python3 +""" +Google Test to Markdown Converter + +This script converts Google Test (gtest) JSON output into GitHub-flavored markdown format. +It processes test results and generates a well-formatted markdown document that includes: +- A timestamped header +- Summary statistics (total, passed, and failed tests) +- A detailed table of test results +- Failure details for any failed tests + +The script is designed to be used as a command-line tool, taking two arguments: +1. context: A string describing the test run context +2. gtest_json_file: Path to the JSON file containing gtest results + +Example usage: + python gtest_to_markdown.py "Unit Tests" test_results.json + +The output is formatted markdown that can be directly used in GitHub issues, pull requests, +or documentation. +""" + import json import os import sys @@ -7,7 +28,24 @@ from typing import Dict, List, Any def format_duration(milliseconds: float) -> str: - """Convert milliseconds to a human-readable duration.""" + """ + Convert milliseconds to a human-readable duration string. + + Args: + milliseconds (float): Duration in milliseconds + + Returns: + str: Human-readable duration string in the format: + - "X.XXms" for durations < 1 second + - "X.XXs" for durations < 1 minute + - "X.XXm" for durations >= 1 minute + + Example: + >>> format_duration(500) + '500.00ms' + >>> format_duration(1500) + '1.50s' + """ if milliseconds < 1000: return f"{milliseconds:.2f}ms" seconds = milliseconds / 1000 @@ -17,7 +55,24 @@ def format_duration(milliseconds: float) -> str: return f"{minutes:.2f}m" def format_failure_message(failure: Dict[str, Any]) -> str: - """Format a test failure message.""" + """ + Format a test failure message from the gtest JSON output. + + Args: + failure (Dict[str, Any]): A dictionary containing failure information + with optional keys: + - message: The failure message + - type: The type of failure + + Returns: + str: Formatted failure message combining the message and type + if both are present, otherwise just the message. + + Example: + >>> failure = {"message": "Expected 2 but got 3", "type": "AssertionError"} + >>> format_failure_message(failure) + 'Expected 2 but got 3\nType: AssertionError' + """ message = [] if "message" in failure: message.append(failure["message"]) @@ -26,7 +81,30 @@ def format_failure_message(failure: Dict[str, Any]) -> str: return "\n".join(message) def convert_to_markdown(data: Dict[str, Any], context: str) -> str: - """Convert gtest JSON data to GitHub-flavored markdown.""" + """ + Convert gtest JSON data to GitHub-flavored markdown. + + This function processes the gtest JSON output and generates a comprehensive + markdown document that includes test results, statistics, and failure details. + + Args: + data (Dict[str, Any]): The parsed JSON data from gtest output + context (str): A string describing the context of the test run + (e.g., "Unit Tests", "Integration Tests") + + Returns: + str: A complete markdown document containing: + - Header with timestamp + - Summary statistics + - Detailed test results table + - Failure details for failed tests + + The output markdown includes: + - A table with columns: Test Suite, Test Case, Status, Duration + - Emoji indicators (✅ for pass, ❌ for fail) + - Formatted duration strings + - Code blocks for failure messages + """ output = [] # Add header with timestamp @@ -80,6 +158,25 @@ def convert_to_markdown(data: Dict[str, Any], context: str) -> str: return "\n".join(output) def main(): + """ + Main entry point for the script. + + Processes command line arguments and converts gtest JSON output to markdown. + The script expects two arguments: + 1. context: A string describing the test run context + 2. gtest_json_file: Path to the JSON file containing gtest results + + The script will: + - Read and parse the JSON file + - Convert the data to markdown format + - Print the markdown to stdout + + Exits with status code 1 if: + - Incorrect number of arguments + - File not found + - Invalid JSON + - Any other error occurs + """ if len(sys.argv) != 3: print("Usage: python gtest_to_markdown.py ") sys.exit(1) From 67aaaae3b92c815b9075f3bd1f2e6f59b88affbe Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 10:18:39 -0700 Subject: [PATCH 17/19] putting a comment back in --- include/orc/dwarf_structs.hpp | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/include/orc/dwarf_structs.hpp b/include/orc/dwarf_structs.hpp index b8ba176..2984872 100644 --- a/include/orc/dwarf_structs.hpp +++ b/include/orc/dwarf_structs.hpp @@ -177,14 +177,19 @@ std::ostream& operator<<(std::ostream& s, const attribute& x); //-------------------------------------------------------------------------------------------------- // I'm not a fan of the name `attribute_sequence`. +// +// TODO: Consider using `std::array` instead of `std::vector` to avoid dynamic allocation. This +// would require we cap the max number of attributes at compile time, which should be okay as long +// as we pick a reasonable number. On the other hand, that would make DIEs with smaller sets of +// attributes less memory efficient. It's the classic space/time tradeoff. struct attribute_sequence { using attributes_type = std::vector; using value_type = typename attributes_type::value_type; using iterator = typename attributes_type::iterator; using const_iterator = typename attributes_type::const_iterator; - void reserve(std::size_t n) { - _attributes.reserve(n); + void reserve(std::size_t size) { + _attributes.reserve(size); } bool has(dw::at name) const { From d50a95ddc0aba88fa6e7d3938674e6138e82d4a4 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 10:22:30 -0700 Subject: [PATCH 18/19] tweaking the tagged release script --- .github/workflows/tagged-release.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/tagged-release.yml b/.github/workflows/tagged-release.yml index 52f3a54..a48dda9 100644 --- a/.github/workflows/tagged-release.yml +++ b/.github/workflows/tagged-release.yml @@ -15,6 +15,10 @@ jobs: python-version: 3.8 - name: ⬇️ Checkout sources uses: actions/checkout@v3 + with: + lfs: true + - name: 🏗️ Checkout LFS objects + run: git lfs pull - name: 🏗️ Setup project files run: | mkdir build From 58c09b1af229e8b74a6dd345348142a885578004 Mon Sep 17 00:00:00 2001 From: Foster Brereton Date: Fri, 9 May 2025 14:55:51 -0700 Subject: [PATCH 19/19] Improve gtest to markdown conversion: Add duration parsing, enhance failure formatting, and improve output table --- .github/gtest_to_markdown.py | 97 +++++++++++++++++++++--------------- 1 file changed, 56 insertions(+), 41 deletions(-) diff --git a/.github/gtest_to_markdown.py b/.github/gtest_to_markdown.py index 171aaca..675787c 100644 --- a/.github/gtest_to_markdown.py +++ b/.github/gtest_to_markdown.py @@ -54,6 +54,32 @@ def format_duration(milliseconds: float) -> str: minutes = seconds / 60 return f"{minutes:.2f}m" +def parse_duration(time_value: Any) -> float: + """ + Parse a duration value from gtest output into milliseconds. + + Args: + time_value (Any): The duration value from gtest, which could be: + - A float (milliseconds) + - A string ending in 's' (seconds) + - Any other value that should be converted to float + + Returns: + float: Duration in milliseconds + + Example: + >>> parse_duration(500) + 500.0 + >>> parse_duration("1.5s") + 1500.0 + """ + try: + if isinstance(time_value, str) and time_value.endswith('s'): + return float(time_value[:-1]) * 1000 # Convert seconds to milliseconds + return float(time_value) + except (ValueError, TypeError): + return 0.0 # Return 0 for invalid values + def format_failure_message(failure: Dict[str, Any]) -> str: """ Format a test failure message from the gtest JSON output. @@ -61,24 +87,20 @@ def format_failure_message(failure: Dict[str, Any]) -> str: Args: failure (Dict[str, Any]): A dictionary containing failure information with optional keys: - - message: The failure message + - failure: The failure message - type: The type of failure Returns: - str: Formatted failure message combining the message and type - if both are present, otherwise just the message. + str: The message with all newlines replaced with "
" Example: - >>> failure = {"message": "Expected 2 but got 3", "type": "AssertionError"} + >>> failure = {"failure": "Expected 2\nbut got 3"} >>> format_failure_message(failure) - 'Expected 2 but got 3\nType: AssertionError' + 'Expected 2
but got 3' """ - message = [] - if "message" in failure: - message.append(failure["message"]) - if "type" in failure: - message.append(f"Type: {failure['type']}") - return "\n".join(message) + if "failure" in failure: + return "
" + failure["failure"].replace("\n", "
") + "
" + return "" def convert_to_markdown(data: Dict[str, Any], context: str) -> str: """ @@ -112,48 +134,41 @@ def convert_to_markdown(data: Dict[str, Any], context: str) -> str: output.append(f"# {context} Results ({timestamp})\n") # Add summary - total_tests = len(data.get("testsuites", [])) - passed_tests = sum(1 for suite in data.get("testsuites", []) - if suite.get("status") == "RUN") - failed_tests = total_tests - passed_tests + total_tests = data.get("tests", 0) + failed_tests = data.get("failures", 0) + disabled_tests = data.get("disabled", 0) + error_tests = data.get("errors", 0) + tests_duration = format_duration(parse_duration(data.get("time", 0))) + passed_tests = total_tests - failed_tests output.append("## Summary\n") - output.append(f"- Total Tests: {total_tests}") + output.append(f"- Tests: {total_tests}") output.append(f"- Passed: {passed_tests}") - output.append(f"- Failed: {failed_tests}\n") + output.append(f"- Failed: {failed_tests}") + output.append(f"- Disabled: {disabled_tests}") + output.append(f"- Errors: {error_tests}") + output.append(f"- Duration: {tests_duration}\n") # Add detailed results table - output.append("## Test Results\n") - output.append("| Test Suite | Test Case | Status | Duration |") - output.append("|------------|-----------|--------|----------|") + output.append("## Details\n") + output.append("| Suite | Case | Status | Duration | Details |") + output.append("|-------|------|--------|----------|---------|") for suite in data.get("testsuites", []): suite_name = suite.get("name", "Unknown Suite") for test in suite.get("testsuite", []): test_name = test.get("name", "Unknown Test") - status = "✅ PASS" if test.get("status") == "RUN" else "❌ FAIL" - time_value = test.get("time", 0) - try: - # Try to convert to float, but handle string values like "0s" - if isinstance(time_value, str) and time_value.endswith('s'): - duration_ms = float(time_value[:-1]) * 1000 # Convert seconds to milliseconds - else: - duration_ms = float(time_value) - duration = format_duration(duration_ms) - except ValueError: - # Fallback if conversion fails - duration = str(time_value) - - # Add the test result row - output.append(f"| {suite_name} | {test_name} | {status} | {duration} |") - + status = "❌ FAIL" if "failures" in test else "✅ PASS" + duration = format_duration(parse_duration(test.get("time", 0))) + details = [] + # Add failure details if the test failed - if test.get("status") != "RUN" and "failures" in test: - output.append("\n### Failure Details\n") + if "failures" in test: for failure in test["failures"]: - output.append("```") - output.append(format_failure_message(failure)) - output.append("```\n") + details.append(format_failure_message(failure)) + + # Add the test result row + output.append(f"| {suite_name} | {test_name} | {status} | {duration} | {'
'.join(details)}") return "\n".join(output)