26 cl::desc(
"Also write profiles with all-zero counters. "
27 "Intended for testing/debugging."));
30 raw_ostream &Out, std::optional<unsigned> VersionOverride,
37 Writer.EnterBlockInfoBlock();
39 auto DescribeBlock = [&](unsigned ID, StringRef Name) {
40 Writer.EmitRecord(bitc::BLOCKINFO_CODE_SETBID,
41 SmallVector<unsigned, 1>{ID});
42 Writer.EmitRecord(bitc::BLOCKINFO_CODE_BLOCKNAME,
43 llvm::arrayRefFromStringRef(Name));
46 auto DescribeRecord = [&](
unsigned RecordID,
StringRef Name) {
48 Data.push_back(RecordID);
49 llvm::append_range(Data, Name);
50 Writer.EmitRecord(bitc::BLOCKINFO_CODE_SETRECORDNAME, Data);
53 DescribeRecord(PGOCtxProfileRecords::Version,
"Version");
58 "TotalRootEntryCount");
73 const auto Version = VersionOverride.value_or(CurrentVersion);
74 Writer.EmitRecord(PGOCtxProfileRecords::Version,
90void PGOCtxProfileWriter::writeCallsiteIndex(uint32_t
CallsiteIndex) {
105 const ContextNode &Node) {
108 if (!IncludeEmpty && (
Node.counters_size() > 0 &&
Node.entrycount() == 0))
111 writeGuid(
Node.guid());
113 writeCounters({
Node.counters(),
Node.counters_size()});
114 writeSubcontexts(Node);
118void PGOCtxProfileWriter::writeSubcontexts(
const ContextNode &Node) {
119 for (uint32_t
I = 0U;
I <
Node.callsites_size(); ++
I)
120 for (
const auto *Subcontext =
Node.subContexts()[
I]; Subcontext;
121 Subcontext = Subcontext->next())
122 writeNode(
I, *Subcontext);
144 writeGuid(RootNode.
guid());
149 for (
const auto *
P = Unhandled;
P;
P =
P->next())
150 writeFlat(
P->guid(),
P->counters(),
P->counters_size());
153 writeSubcontexts(RootNode);
161 writeCounters({Buffer,
Size});
169using SerializableFlatProfileRepresentation =
170 std::pair<ctx_profile::GUID, std::vector<uint64_t>>;
172struct SerializableCtxRepresentation {
175 std::vector<std::vector<SerializableCtxRepresentation>> Callsites;
178struct SerializableRootRepresentation :
public SerializableCtxRepresentation {
180 std::vector<SerializableFlatProfileRepresentation> Unhandled;
183struct SerializableProfileRepresentation {
184 std::vector<SerializableRootRepresentation> Contexts;
185 std::vector<SerializableFlatProfileRepresentation> FlatProfiles;
189createNode(std::vector<std::unique_ptr<
char[]>> &Nodes,
190 const std::vector<SerializableCtxRepresentation> &DCList);
195createNode(std::vector<std::unique_ptr<
char[]>> &Nodes,
196 const SerializableCtxRepresentation &DC,
199 DC.Callsites.size());
200 auto *Mem = Nodes.emplace_back(std::make_unique<
char[]>(AllocSize)).get();
201 std::memset(Mem, 0, AllocSize);
203 DC.Callsites.size(),
Next);
204 std::memcpy(
Ret->counters(), DC.Counters.data(),
205 sizeof(uint64_t) * DC.Counters.size());
214createNode(std::vector<std::unique_ptr<
char[]>> &Nodes,
215 const std::vector<SerializableCtxRepresentation> &DCList) {
217 for (
const auto &DC : DCList)
238 IO.
mapRequired(
"TotalRootEntryCount", R.TotalRootEntryCount);
252 SerializableFlatProfileRepresentation &SFPR) {
260 SerializableProfileRepresentation SPR;
264 std::vector<std::unique_ptr<char[]>> Nodes;
270 if (!SPR.Contexts.empty()) {
272 for (
const auto &DC : SPR.Contexts) {
273 auto *TopList = createNode(Nodes, DC);
276 "Unexpected error converting internal structure to ctx profile");
279 for (
const auto &U : DC.Unhandled) {
280 SerializableCtxRepresentation Unhandled;
281 Unhandled.Guid = U.first;
282 Unhandled.Counters = U.second;
283 FirstUnhandled = createNode(Nodes, Unhandled, FirstUnhandled);
285 Writer.
writeContextual(*TopList, FirstUnhandled, DC.TotalRootEntryCount);
289 if (!SPR.FlatProfiles.empty()) {
static cl::opt< bool > IncludeEmptyOpt("ctx-prof-include-empty", cl::init(false), cl::desc("Also write profiles with all-zero counters. " "Intended for testing/debugging."))
#define LLVM_YAML_IS_SEQUENCE_VECTOR(type)
Utility for declaring that a std::vector of a particular type should be considered a YAML sequence.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Lightweight error class with error context and mandatory checking.
static ErrorSuccess success()
Create a success value.
Write one or more ContextNodes to the provided raw_fd_stream.
void startFlatSection() override
void writeContextual(const ctx_profile::ContextNode &RootNode, const ctx_profile::ContextNode *Unhandled, uint64_t TotalRootEntryCount) override
static constexpr unsigned VBREncodingBits
PGOCtxProfileWriter(raw_ostream &Out, std::optional< unsigned > VersionOverride=std::nullopt, bool IncludeEmpty=false)
void endFlatSection() override
static constexpr StringRef ContainerMagic
static constexpr unsigned CodeLen
void startContextSection() override
void endContextSection() override
void writeFlat(ctx_profile::GUID Guid, const uint64_t *Buffer, size_t BufferSize) override
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
uint64_t entrycount() const
static size_t getAllocSize(uint32_t NumCounters, uint32_t NumCallsites)
uint32_t counters_size() const
This class implements an extremely fast bulk output stream that can only output to a stream.
raw_ostream & write(unsigned char C)
void mapOptional(const char *Key, T &Val)
void mapRequired(const char *Key, T &Val)
@ C
The default llvm calling convention, compatible with C.
initializer< Ty > init(const Ty &Val)
AstPtr createNode(ASTNode::Type T, Accessor A, ASTNode *Parent, llvm::StringMap< AstPtr > &Partials, llvm::StringMap< Lambda > &Lambdas, llvm::StringMap< SectionLambda > &SectionLambdas, EscapeMap &Escapes)
NodeAddr< NodeBase * > Node
This is an optimization pass for GlobalISel generic memory operations.
auto enumerate(FirstRange &&First, RestRanges &&...Rest)
Given two or more input ranges, returns a new range whose values are tuples (A, B,...
LLVM_ABI Error createCtxProfFromYAML(StringRef Profile, raw_ostream &Out)
Error createStringError(std::error_code EC, char const *Fmt, const Ts &... Vals)
Create formatted StringError object.
FunctionAddr VTableAddr uintptr_t uintptr_t Version
FunctionAddr VTableAddr uintptr_t uintptr_t Data
FunctionAddr VTableAddr Next
@ FlatProfilesSectionBlockID
This class should be specialized by any type that needs to be converted to/from a YAML mapping.
static void mapping(yaml::IO &IO, SerializableCtxRepresentation &SCR)
static void mapping(yaml::IO &IO, SerializableFlatProfileRepresentation &SFPR)
static void mapping(yaml::IO &IO, SerializableProfileRepresentation &SPR)
static void mapping(yaml::IO &IO, SerializableRootRepresentation &R)