1//===- llvm/IR/Metadata.h - Metadata definitions ----------------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9/// @file
10/// This file contains the declarations for metadata subclasses.
11/// They represent the different flavors of metadata that live in LLVM.
12//
13//===----------------------------------------------------------------------===//
14
15#ifndef LLVM_IR_METADATA_H
16#define LLVM_IR_METADATA_H
17
18#include "llvm/ADT/ArrayRef.h"
19#include "llvm/ADT/DenseMap.h"
20#include "llvm/ADT/DenseMapInfo.h"
21#include "llvm/ADT/PointerUnion.h"
22#include "llvm/ADT/SmallVector.h"
23#include "llvm/ADT/StringRef.h"
24#include "llvm/ADT/ilist_node.h"
25#include "llvm/ADT/iterator_range.h"
26#include "llvm/IR/Constant.h"
27#include "llvm/IR/LLVMContext.h"
28#include "llvm/IR/Value.h"
29#include "llvm/Support/CBindingWrapping.h"
30#include "llvm/Support/Casting.h"
31#include "llvm/Support/ErrorHandling.h"
32#include <cassert>
33#include <cstddef>
34#include <cstdint>
35#include <iterator>
36#include <memory>
37#include <string>
38#include <type_traits>
39#include <utility>
40
41namespace llvm {
42
43class Module;
44class ModuleSlotTracker;
45class raw_ostream;
46class DPValue;
47template <typename T> class StringMapEntry;
48template <typename ValueTy> class StringMapEntryStorage;
49class Type;
50
51enum LLVMConstants : uint32_t {
52  DEBUG_METADATA_VERSION = 3 // Current debug info version number.
53};
54
55/// Magic number in the value profile metadata showing a target has been
56/// promoted for the instruction and shouldn't be promoted again.
57const uint64_t NOMORE_ICP_MAGICNUM = -1;
58
59/// Root of the metadata hierarchy.
60///
61/// This is a root class for typeless data in the IR.
62class Metadata {
63  friend class ReplaceableMetadataImpl;
64
65  /// RTTI.
66  const unsigned char SubclassID;
67
68protected:
69  /// Active type of storage.
70  enum StorageType { Uniqued, Distinct, Temporary };
71
72  /// Storage flag for non-uniqued, otherwise unowned, metadata.
73  unsigned char Storage : 7;
74
75  unsigned char SubclassData1 : 1;
76  unsigned short SubclassData16 = 0;
77  unsigned SubclassData32 = 0;
78
79public:
80  enum MetadataKind {
81#define HANDLE_METADATA_LEAF(CLASS) CLASS##Kind,
82#include "llvm/IR/Metadata.def"
83  };
84
85protected:
86  Metadata(unsigned ID, StorageType Storage)
87      : SubclassID(ID), Storage(Storage), SubclassData1(false) {
88    static_assert(sizeof(*this) == 8, "Metadata fields poorly packed");
89  }
90
91  ~Metadata() = default;
92
93  /// Default handling of a changed operand, which asserts.
94  ///
95  /// If subclasses pass themselves in as owners to a tracking node reference,
96  /// they must provide an implementation of this method.
97  void handleChangedOperand(void *, Metadata *) {
98    llvm_unreachable("Unimplemented in Metadata subclass");
99  }
100
101public:
102  unsigned getMetadataID() const { return SubclassID; }
103
104  /// User-friendly dump.
105  ///
106  /// If \c M is provided, metadata nodes will be numbered canonically;
107  /// otherwise, pointer addresses are substituted.
108  ///
109  /// Note: this uses an explicit overload instead of default arguments so that
110  /// the nullptr version is easy to call from a debugger.
111  ///
112  /// @{
113  void dump() const;
114  void dump(const Module *M) const;
115  /// @}
116
117  /// Print.
118  ///
119  /// Prints definition of \c this.
120  ///
121  /// If \c M is provided, metadata nodes will be numbered canonically;
122  /// otherwise, pointer addresses are substituted.
123  /// @{
124  void print(raw_ostream &OS, const Module *M = nullptr,
125             bool IsForDebug = false) const;
126  void print(raw_ostream &OS, ModuleSlotTracker &MST, const Module *M = nullptr,
127             bool IsForDebug = false) const;
128  /// @}
129
130  /// Print as operand.
131  ///
132  /// Prints reference of \c this.
133  ///
134  /// If \c M is provided, metadata nodes will be numbered canonically;
135  /// otherwise, pointer addresses are substituted.
136  /// @{
137  void printAsOperand(raw_ostream &OS, const Module *M = nullptr) const;
138  void printAsOperand(raw_ostream &OS, ModuleSlotTracker &MST,
139                      const Module *M = nullptr) const;
140  /// @}
141};
142
143// Create wrappers for C Binding types (see CBindingWrapping.h).
144DEFINE_ISA_CONVERSION_FUNCTIONS(Metadata, LLVMMetadataRef)
145
146// Specialized opaque metadata conversions.
147inline Metadata **unwrap(LLVMMetadataRef *MDs) {
148  return reinterpret_cast<Metadata**>(MDs);
149}
150
151#define HANDLE_METADATA(CLASS) class CLASS;
152#include "llvm/IR/Metadata.def"
153
154// Provide specializations of isa so that we don't need definitions of
155// subclasses to see if the metadata is a subclass.
156#define HANDLE_METADATA_LEAF(CLASS)                                            \
157  template <> struct isa_impl<CLASS, Metadata> {                               \
158    static inline bool doit(const Metadata &MD) {                              \
159      return MD.getMetadataID() == Metadata::CLASS##Kind;                      \
160    }                                                                          \
161  };
162#include "llvm/IR/Metadata.def"
163
164inline raw_ostream &operator<<(raw_ostream &OS, const Metadata &MD) {
165  MD.print(OS);
166  return OS;
167}
168
169/// Metadata wrapper in the Value hierarchy.
170///
171/// A member of the \a Value hierarchy to represent a reference to metadata.
172/// This allows, e.g., intrinsics to have metadata as operands.
173///
174/// Notably, this is the only thing in either hierarchy that is allowed to
175/// reference \a LocalAsMetadata.
176class MetadataAsValue : public Value {
177  friend class ReplaceableMetadataImpl;
178  friend class LLVMContextImpl;
179
180  Metadata *MD;
181
182  MetadataAsValue(Type *Ty, Metadata *MD);
183
184  /// Drop use of metadata (during teardown).
185  void dropUse() { MD = nullptr; }
186
187public:
188  ~MetadataAsValue();
189
190  static MetadataAsValue *get(LLVMContext &Context, Metadata *MD);
191  static MetadataAsValue *getIfExists(LLVMContext &Context, Metadata *MD);
192
193  Metadata *getMetadata() const { return MD; }
194
195  static bool classof(const Value *V) {
196    return V->getValueID() == MetadataAsValueVal;
197  }
198
199private:
200  void handleChangedMetadata(Metadata *MD);
201  void track();
202  void untrack();
203};
204
205/// Base class for tracking ValueAsMetadata/DIArgLists with user lookups and
206/// Owner callbacks outside of ValueAsMetadata.
207///
208/// Currently only inherited by DPValue; if other classes need to use it, then
209/// a SubclassID will need to be added (either as a new field or by making
210/// DebugValue into a PointerIntUnion) to discriminate between the subclasses in
211/// lookup and callback handling.
212class DebugValueUser {
213protected:
214  // Capacity to store 3 debug values.
215  // TODO: Not all DebugValueUser instances need all 3 elements, if we
216  // restructure the DPValue class then we can template parameterize this array
217  // size.
218  std::array<Metadata *, 3> DebugValues;
219
220  ArrayRef<Metadata *> getDebugValues() const { return DebugValues; }
221
222public:
223  DPValue *getUser();
224  const DPValue *getUser() const;
225  /// To be called by ReplaceableMetadataImpl::replaceAllUsesWith, where `Old`
226  /// is a pointer to one of the pointers in `DebugValues` (so should be type
227  /// Metadata**), and `NewDebugValue` is the new Metadata* that is replacing
228  /// *Old.
229  /// For manually replacing elements of DebugValues,
230  /// `resetDebugValue(Idx, NewDebugValue)` should be used instead.
231  void handleChangedValue(void *Old, Metadata *NewDebugValue);
232  DebugValueUser() = default;
233  explicit DebugValueUser(std::array<Metadata *, 3> DebugValues)
234      : DebugValues(DebugValues) {
235    trackDebugValues();
236  }
237  DebugValueUser(DebugValueUser &&X) {
238    DebugValues = X.DebugValues;
239    retrackDebugValues(X);
240  }
241  DebugValueUser(const DebugValueUser &X) {
242    DebugValues = X.DebugValues;
243    trackDebugValues();
244  }
245
246  DebugValueUser &operator=(DebugValueUser &&X) {
247    if (&X == this)
248      return *this;
249
250    untrackDebugValues();
251    DebugValues = X.DebugValues;
252    retrackDebugValues(X);
253    return *this;
254  }
255
256  DebugValueUser &operator=(const DebugValueUser &X) {
257    if (&X == this)
258      return *this;
259
260    untrackDebugValues();
261    DebugValues = X.DebugValues;
262    trackDebugValues();
263    return *this;
264  }
265
266  ~DebugValueUser() { untrackDebugValues(); }
267
268  void resetDebugValues() {
269    untrackDebugValues();
270    DebugValues.fill(nullptr);
271  }
272
273  void resetDebugValue(size_t Idx, Metadata *DebugValue) {
274    assert(Idx < 3 && "Invalid debug value index.");
275    untrackDebugValue(Idx);
276    DebugValues[Idx] = DebugValue;
277    trackDebugValue(Idx);
278  }
279
280  bool operator==(const DebugValueUser &X) const {
281    return DebugValues == X.DebugValues;
282  }
283  bool operator!=(const DebugValueUser &X) const {
284    return DebugValues != X.DebugValues;
285  }
286
287private:
288  void trackDebugValue(size_t Idx);
289  void trackDebugValues();
290
291  void untrackDebugValue(size_t Idx);
292  void untrackDebugValues();
293
294  void retrackDebugValues(DebugValueUser &X);
295};
296
297/// API for tracking metadata references through RAUW and deletion.
298///
299/// Shared API for updating \a Metadata pointers in subclasses that support
300/// RAUW.
301///
302/// This API is not meant to be used directly.  See \a TrackingMDRef for a
303/// user-friendly tracking reference.
304class MetadataTracking {
305public:
306  /// Track the reference to metadata.
307  ///
308  /// Register \c MD with \c *MD, if the subclass supports tracking.  If \c *MD
309  /// gets RAUW'ed, \c MD will be updated to the new address.  If \c *MD gets
310  /// deleted, \c MD will be set to \c nullptr.
311  ///
312  /// If tracking isn't supported, \c *MD will not change.
313  ///
314  /// \return true iff tracking is supported by \c MD.
315  static bool track(Metadata *&MD) {
316    return track(&MD, *MD, static_cast<Metadata *>(nullptr));
317  }
318
319  /// Track the reference to metadata for \a Metadata.
320  ///
321  /// As \a track(Metadata*&), but with support for calling back to \c Owner to
322  /// tell it that its operand changed.  This could trigger \c Owner being
323  /// re-uniqued.
324  static bool track(void *Ref, Metadata &MD, Metadata &Owner) {
325    return track(Ref, MD, &Owner);
326  }
327
328  /// Track the reference to metadata for \a MetadataAsValue.
329  ///
330  /// As \a track(Metadata*&), but with support for calling back to \c Owner to
331  /// tell it that its operand changed.  This could trigger \c Owner being
332  /// re-uniqued.
333  static bool track(void *Ref, Metadata &MD, MetadataAsValue &Owner) {
334    return track(Ref, MD, &Owner);
335  }
336
337  /// Track the reference to metadata for \a DebugValueUser.
338  ///
339  /// As \a track(Metadata*&), but with support for calling back to \c Owner to
340  /// tell it that its operand changed.  This could trigger \c Owner being
341  /// re-uniqued.
342  static bool track(void *Ref, Metadata &MD, DebugValueUser &Owner) {
343    return track(Ref, MD, &Owner);
344  }
345
346  /// Stop tracking a reference to metadata.
347  ///
348  /// Stops \c *MD from tracking \c MD.
349  static void untrack(Metadata *&MD) { untrack(&MD, *MD); }
350  static void untrack(void *Ref, Metadata &MD);
351
352  /// Move tracking from one reference to another.
353  ///
354  /// Semantically equivalent to \c untrack(MD) followed by \c track(New),
355  /// except that ownership callbacks are maintained.
356  ///
357  /// Note: it is an error if \c *MD does not equal \c New.
358  ///
359  /// \return true iff tracking is supported by \c MD.
360  static bool retrack(Metadata *&MD, Metadata *&New) {
361    return retrack(&MD, *MD, &New);
362  }
363  static bool retrack(void *Ref, Metadata &MD, void *New);
364
365  /// Check whether metadata is replaceable.
366  static bool isReplaceable(const Metadata &MD);
367
368  using OwnerTy = PointerUnion<MetadataAsValue *, Metadata *, DebugValueUser *>;
369
370private:
371  /// Track a reference to metadata for an owner.
372  ///
373  /// Generalized version of tracking.
374  static bool track(void *Ref, Metadata &MD, OwnerTy Owner);
375};
376
377/// Shared implementation of use-lists for replaceable metadata.
378///
379/// Most metadata cannot be RAUW'ed.  This is a shared implementation of
380/// use-lists and associated API for the three that support it (
381/// \a ValueAsMetadata, \a TempMDNode, and \a DIArgList).
382class ReplaceableMetadataImpl {
383  friend class MetadataTracking;
384
385public:
386  using OwnerTy = MetadataTracking::OwnerTy;
387
388private:
389  LLVMContext &Context;
390  uint64_t NextIndex = 0;
391  SmallDenseMap<void *, std::pair<OwnerTy, uint64_t>, 4> UseMap;
392
393public:
394  ReplaceableMetadataImpl(LLVMContext &Context) : Context(Context) {}
395
396  ~ReplaceableMetadataImpl() {
397    assert(UseMap.empty() && "Cannot destroy in-use replaceable metadata");
398  }
399
400  LLVMContext &getContext() const { return Context; }
401
402  /// Replace all uses of this with MD.
403  ///
404  /// Replace all uses of this with \c MD, which is allowed to be null.
405  void replaceAllUsesWith(Metadata *MD);
406   /// Replace all uses of the constant with Undef in debug info metadata
407  static void SalvageDebugInfo(const Constant &C);
408  /// Returns the list of all DIArgList users of this.
409  SmallVector<Metadata *> getAllArgListUsers();
410  /// Returns the list of all DPValue users of this.
411  SmallVector<DPValue *> getAllDPValueUsers();
412
413  /// Resolve all uses of this.
414  ///
415  /// Resolve all uses of this, turning off RAUW permanently.  If \c
416  /// ResolveUsers, call \a MDNode::resolve() on any users whose last operand
417  /// is resolved.
418  void resolveAllUses(bool ResolveUsers = true);
419
420  unsigned getNumUses() const { return UseMap.size(); }
421
422private:
423  void addRef(void *Ref, OwnerTy Owner);
424  void dropRef(void *Ref);
425  void moveRef(void *Ref, void *New, const Metadata &MD);
426
427  /// Lazily construct RAUW support on MD.
428  ///
429  /// If this is an unresolved MDNode, RAUW support will be created on-demand.
430  /// ValueAsMetadata always has RAUW support.
431  static ReplaceableMetadataImpl *getOrCreate(Metadata &MD);
432
433  /// Get RAUW support on MD, if it exists.
434  static ReplaceableMetadataImpl *getIfExists(Metadata &MD);
435
436  /// Check whether this node will support RAUW.
437  ///
438  /// Returns \c true unless getOrCreate() would return null.
439  static bool isReplaceable(const Metadata &MD);
440};
441
442/// Value wrapper in the Metadata hierarchy.
443///
444/// This is a custom value handle that allows other metadata to refer to
445/// classes in the Value hierarchy.
446///
447/// Because of full uniquing support, each value is only wrapped by a single \a
448/// ValueAsMetadata object, so the lookup maps are far more efficient than
449/// those using ValueHandleBase.
450class ValueAsMetadata : public Metadata, ReplaceableMetadataImpl {
451  friend class ReplaceableMetadataImpl;
452  friend class LLVMContextImpl;
453
454  Value *V;
455
456  /// Drop users without RAUW (during teardown).
457  void dropUsers() {
458    ReplaceableMetadataImpl::resolveAllUses(/* ResolveUsers */ false);
459  }
460
461protected:
462  ValueAsMetadata(unsigned ID, Value *V)
463      : Metadata(ID, Uniqued), ReplaceableMetadataImpl(V->getContext()), V(V) {
464    assert(V && "Expected valid value");
465  }
466
467  ~ValueAsMetadata() = default;
468
469public:
470  static ValueAsMetadata *get(Value *V);
471
472  static ConstantAsMetadata *getConstant(Value *C) {
473    return cast<ConstantAsMetadata>(get(C));
474  }
475
476  static LocalAsMetadata *getLocal(Value *Local) {
477    return cast<LocalAsMetadata>(get(Local));
478  }
479
480  static ValueAsMetadata *getIfExists(Value *V);
481
482  static ConstantAsMetadata *getConstantIfExists(Value *C) {
483    return cast_or_null<ConstantAsMetadata>(getIfExists(C));
484  }
485
486  static LocalAsMetadata *getLocalIfExists(Value *Local) {
487    return cast_or_null<LocalAsMetadata>(getIfExists(Local));
488  }
489
490  Value *getValue() const { return V; }
491  Type *getType() const { return V->getType(); }
492  LLVMContext &getContext() const { return V->getContext(); }
493
494  SmallVector<Metadata *> getAllArgListUsers() {
495    return ReplaceableMetadataImpl::getAllArgListUsers();
496  }
497  SmallVector<DPValue *> getAllDPValueUsers() {
498    return ReplaceableMetadataImpl::getAllDPValueUsers();
499  }
500
501  static void handleDeletion(Value *V);
502  static void handleRAUW(Value *From, Value *To);
503
504protected:
505  /// Handle collisions after \a Value::replaceAllUsesWith().
506  ///
507  /// RAUW isn't supported directly for \a ValueAsMetadata, but if the wrapped
508  /// \a Value gets RAUW'ed and the target already exists, this is used to
509  /// merge the two metadata nodes.
510  void replaceAllUsesWith(Metadata *MD) {
511    ReplaceableMetadataImpl::replaceAllUsesWith(MD);
512  }
513
514public:
515  static bool classof(const Metadata *MD) {
516    return MD->getMetadataID() == LocalAsMetadataKind ||
517           MD->getMetadataID() == ConstantAsMetadataKind;
518  }
519};
520
521class ConstantAsMetadata : public ValueAsMetadata {
522  friend class ValueAsMetadata;
523
524  ConstantAsMetadata(Constant *C)
525      : ValueAsMetadata(ConstantAsMetadataKind, C) {}
526
527public:
528  static ConstantAsMetadata *get(Constant *C) {
529    return ValueAsMetadata::getConstant(C);
530  }
531
532  static ConstantAsMetadata *getIfExists(Constant *C) {
533    return ValueAsMetadata::getConstantIfExists(C);
534  }
535
536  Constant *getValue() const {
537    return cast<Constant>(ValueAsMetadata::getValue());
538  }
539
540  static bool classof(const Metadata *MD) {
541    return MD->getMetadataID() == ConstantAsMetadataKind;
542  }
543};
544
545class LocalAsMetadata : public ValueAsMetadata {
546  friend class ValueAsMetadata;
547
548  LocalAsMetadata(Value *Local)
549      : ValueAsMetadata(LocalAsMetadataKind, Local) {
550    assert(!isa<Constant>(Local) && "Expected local value");
551  }
552
553public:
554  static LocalAsMetadata *get(Value *Local) {
555    return ValueAsMetadata::getLocal(Local);
556  }
557
558  static LocalAsMetadata *getIfExists(Value *Local) {
559    return ValueAsMetadata::getLocalIfExists(Local);
560  }
561
562  static bool classof(const Metadata *MD) {
563    return MD->getMetadataID() == LocalAsMetadataKind;
564  }
565};
566
567/// Transitional API for extracting constants from Metadata.
568///
569/// This namespace contains transitional functions for metadata that points to
570/// \a Constants.
571///
572/// In prehistory -- when metadata was a subclass of \a Value -- \a MDNode
573/// operands could refer to any \a Value.  There's was a lot of code like this:
574///
575/// \code
576///     MDNode *N = ...;
577///     auto *CI = dyn_cast<ConstantInt>(N->getOperand(2));
578/// \endcode
579///
580/// Now that \a Value and \a Metadata are in separate hierarchies, maintaining
581/// the semantics for \a isa(), \a cast(), \a dyn_cast() (etc.) requires three
582/// steps: cast in the \a Metadata hierarchy, extraction of the \a Value, and
583/// cast in the \a Value hierarchy.  Besides creating boiler-plate, this
584/// requires subtle control flow changes.
585///
586/// The end-goal is to create a new type of metadata, called (e.g.) \a MDInt,
587/// so that metadata can refer to numbers without traversing a bridge to the \a
588/// Value hierarchy.  In this final state, the code above would look like this:
589///
590/// \code
591///     MDNode *N = ...;
592///     auto *MI = dyn_cast<MDInt>(N->getOperand(2));
593/// \endcode
594///
595/// The API in this namespace supports the transition.  \a MDInt doesn't exist
596/// yet, and even once it does, changing each metadata schema to use it is its
597/// own mini-project.  In the meantime this API prevents us from introducing
598/// complex and bug-prone control flow that will disappear in the end.  In
599/// particular, the above code looks like this:
600///
601/// \code
602///     MDNode *N = ...;
603///     auto *CI = mdconst::dyn_extract<ConstantInt>(N->getOperand(2));
604/// \endcode
605///
606/// The full set of provided functions includes:
607///
608///   mdconst::hasa                <=> isa
609///   mdconst::extract             <=> cast
610///   mdconst::extract_or_null     <=> cast_or_null
611///   mdconst::dyn_extract         <=> dyn_cast
612///   mdconst::dyn_extract_or_null <=> dyn_cast_or_null
613///
614/// The target of the cast must be a subclass of \a Constant.
615namespace mdconst {
616
617namespace detail {
618
619template <class T> T &make();
620template <class T, class Result> struct HasDereference {
621  using Yes = char[1];
622  using No = char[2];
623  template <size_t N> struct SFINAE {};
624
625  template <class U, class V>
626  static Yes &hasDereference(SFINAE<sizeof(static_cast<V>(*make<U>()))> * = 0);
627  template <class U, class V> static No &hasDereference(...);
628
629  static const bool value =
630      sizeof(hasDereference<T, Result>(nullptr)) == sizeof(Yes);
631};
632template <class V, class M> struct IsValidPointer {
633  static const bool value = std::is_base_of<Constant, V>::value &&
634                            HasDereference<M, const Metadata &>::value;
635};
636template <class V, class M> struct IsValidReference {
637  static const bool value = std::is_base_of<Constant, V>::value &&
638                            std::is_convertible<M, const Metadata &>::value;
639};
640
641} // end namespace detail
642
643/// Check whether Metadata has a Value.
644///
645/// As an analogue to \a isa(), check whether \c MD has an \a Value inside of
646/// type \c X.
647template <class X, class Y>
648inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, bool>
649hasa(Y &&MD) {
650  assert(MD && "Null pointer sent into hasa");
651  if (auto *V = dyn_cast<ConstantAsMetadata>(MD))
652    return isa<X>(V->getValue());
653  return false;
654}
655template <class X, class Y>
656inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, bool>
657hasa(Y &MD) {
658  return hasa(&MD);
659}
660
661/// Extract a Value from Metadata.
662///
663/// As an analogue to \a cast(), extract the \a Value subclass \c X from \c MD.
664template <class X, class Y>
665inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
666extract(Y &&MD) {
667  return cast<X>(cast<ConstantAsMetadata>(MD)->getValue());
668}
669template <class X, class Y>
670inline std::enable_if_t<detail::IsValidReference<X, Y &>::value, X *>
671extract(Y &MD) {
672  return extract(&MD);
673}
674
675/// Extract a Value from Metadata, allowing null.
676///
677/// As an analogue to \a cast_or_null(), extract the \a Value subclass \c X
678/// from \c MD, allowing \c MD to be null.
679template <class X, class Y>
680inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
681extract_or_null(Y &&MD) {
682  if (auto *V = cast_or_null<ConstantAsMetadata>(MD))
683    return cast<X>(V->getValue());
684  return nullptr;
685}
686
687/// Extract a Value from Metadata, if any.
688///
689/// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X
690/// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a
691/// Value it does contain is of the wrong subclass.
692template <class X, class Y>
693inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
694dyn_extract(Y &&MD) {
695  if (auto *V = dyn_cast<ConstantAsMetadata>(MD))
696    return dyn_cast<X>(V->getValue());
697  return nullptr;
698}
699
700/// Extract a Value from Metadata, if any, allowing null.
701///
702/// As an analogue to \a dyn_cast_or_null(), extract the \a Value subclass \c X
703/// from \c MD, return null if \c MD doesn't contain a \a Value or if the \a
704/// Value it does contain is of the wrong subclass, allowing \c MD to be null.
705template <class X, class Y>
706inline std::enable_if_t<detail::IsValidPointer<X, Y>::value, X *>
707dyn_extract_or_null(Y &&MD) {
708  if (auto *V = dyn_cast_or_null<ConstantAsMetadata>(MD))
709    return dyn_cast<X>(V->getValue());
710  return nullptr;
711}
712
713} // end namespace mdconst
714
715//===----------------------------------------------------------------------===//
716/// A single uniqued string.
717///
718/// These are used to efficiently contain a byte sequence for metadata.
719/// MDString is always unnamed.
720class MDString : public Metadata {
721  friend class StringMapEntryStorage<MDString>;
722
723  StringMapEntry<MDString> *Entry = nullptr;
724
725  MDString() : Metadata(MDStringKind, Uniqued) {}
726
727public:
728  MDString(const MDString &) = delete;
729  MDString &operator=(MDString &&) = delete;
730  MDString &operator=(const MDString &) = delete;
731
732  static MDString *get(LLVMContext &Context, StringRef Str);
733  static MDString *get(LLVMContext &Context, const char *Str) {
734    return get(Context, Str ? StringRef(Str) : StringRef());
735  }
736
737  StringRef getString() const;
738
739  unsigned getLength() const { return (unsigned)getString().size(); }
740
741  using iterator = StringRef::iterator;
742
743  /// Pointer to the first byte of the string.
744  iterator begin() const { return getString().begin(); }
745
746  /// Pointer to one byte past the end of the string.
747  iterator end() const { return getString().end(); }
748
749  const unsigned char *bytes_begin() const { return getString().bytes_begin(); }
750  const unsigned char *bytes_end() const { return getString().bytes_end(); }
751
752  /// Methods for support type inquiry through isa, cast, and dyn_cast.
753  static bool classof(const Metadata *MD) {
754    return MD->getMetadataID() == MDStringKind;
755  }
756};
757
758/// A collection of metadata nodes that might be associated with a
759/// memory access used by the alias-analysis infrastructure.
760struct AAMDNodes {
761  explicit AAMDNodes() = default;
762  explicit AAMDNodes(MDNode *T, MDNode *TS, MDNode *S, MDNode *N)
763      : TBAA(T), TBAAStruct(TS), Scope(S), NoAlias(N) {}
764
765  bool operator==(const AAMDNodes &A) const {
766    return TBAA == A.TBAA && TBAAStruct == A.TBAAStruct && Scope == A.Scope &&
767           NoAlias == A.NoAlias;
768  }
769
770  bool operator!=(const AAMDNodes &A) const { return !(*this == A); }
771
772  explicit operator bool() const {
773    return TBAA || TBAAStruct || Scope || NoAlias;
774  }
775
776  /// The tag for type-based alias analysis.
777  MDNode *TBAA = nullptr;
778
779  /// The tag for type-based alias analysis (tbaa struct).
780  MDNode *TBAAStruct = nullptr;
781
782  /// The tag for alias scope specification (used with noalias).
783  MDNode *Scope = nullptr;
784
785  /// The tag specifying the noalias scope.
786  MDNode *NoAlias = nullptr;
787
788  // Shift tbaa Metadata node to start off bytes later
789  static MDNode *shiftTBAA(MDNode *M, size_t off);
790
791  // Shift tbaa.struct Metadata node to start off bytes later
792  static MDNode *shiftTBAAStruct(MDNode *M, size_t off);
793
794  // Extend tbaa Metadata node to apply to a series of bytes of length len.
795  // A size of -1 denotes an unknown size.
796  static MDNode *extendToTBAA(MDNode *TBAA, ssize_t len);
797
798  /// Given two sets of AAMDNodes that apply to the same pointer,
799  /// give the best AAMDNodes that are compatible with both (i.e. a set of
800  /// nodes whose allowable aliasing conclusions are a subset of those
801  /// allowable by both of the inputs). However, for efficiency
802  /// reasons, do not create any new MDNodes.
803  AAMDNodes intersect(const AAMDNodes &Other) const {
804    AAMDNodes Result;
805    Result.TBAA = Other.TBAA == TBAA ? TBAA : nullptr;
806    Result.TBAAStruct = Other.TBAAStruct == TBAAStruct ? TBAAStruct : nullptr;
807    Result.Scope = Other.Scope == Scope ? Scope : nullptr;
808    Result.NoAlias = Other.NoAlias == NoAlias ? NoAlias : nullptr;
809    return Result;
810  }
811
812  /// Create a new AAMDNode that describes this AAMDNode after applying a
813  /// constant offset to the start of the pointer.
814  AAMDNodes shift(size_t Offset) const {
815    AAMDNodes Result;
816    Result.TBAA = TBAA ? shiftTBAA(TBAA, Offset) : nullptr;
817    Result.TBAAStruct =
818        TBAAStruct ? shiftTBAAStruct(TBAAStruct, Offset) : nullptr;
819    Result.Scope = Scope;
820    Result.NoAlias = NoAlias;
821    return Result;
822  }
823
824  /// Create a new AAMDNode that describes this AAMDNode after extending it to
825  /// apply to a series of bytes of length Len. A size of -1 denotes an unknown
826  /// size.
827  AAMDNodes extendTo(ssize_t Len) const {
828    AAMDNodes Result;
829    Result.TBAA = TBAA ? extendToTBAA(TBAA, Len) : nullptr;
830    // tbaa.struct contains (offset, size, type) triples. Extending the length
831    // of the tbaa.struct doesn't require changing this (though more information
832    // could be provided by adding more triples at subsequent lengths).
833    Result.TBAAStruct = TBAAStruct;
834    Result.Scope = Scope;
835    Result.NoAlias = NoAlias;
836    return Result;
837  }
838
839  /// Given two sets of AAMDNodes applying to potentially different locations,
840  /// determine the best AAMDNodes that apply to both.
841  AAMDNodes merge(const AAMDNodes &Other) const;
842
843  /// Determine the best AAMDNodes after concatenating two different locations
844  /// together. Different from `merge`, where different locations should
845  /// overlap each other, `concat` puts non-overlapping locations together.
846  AAMDNodes concat(const AAMDNodes &Other) const;
847};
848
849// Specialize DenseMapInfo for AAMDNodes.
850template<>
851struct DenseMapInfo<AAMDNodes> {
852  static inline AAMDNodes getEmptyKey() {
853    return AAMDNodes(DenseMapInfo<MDNode *>::getEmptyKey(),
854                     nullptr, nullptr, nullptr);
855  }
856
857  static inline AAMDNodes getTombstoneKey() {
858    return AAMDNodes(DenseMapInfo<MDNode *>::getTombstoneKey(),
859                     nullptr, nullptr, nullptr);
860  }
861
862  static unsigned getHashValue(const AAMDNodes &Val) {
863    return DenseMapInfo<MDNode *>::getHashValue(Val.TBAA) ^
864           DenseMapInfo<MDNode *>::getHashValue(Val.TBAAStruct) ^
865           DenseMapInfo<MDNode *>::getHashValue(Val.Scope) ^
866           DenseMapInfo<MDNode *>::getHashValue(Val.NoAlias);
867  }
868
869  static bool isEqual(const AAMDNodes &LHS, const AAMDNodes &RHS) {
870    return LHS == RHS;
871  }
872};
873
874/// Tracking metadata reference owned by Metadata.
875///
876/// Similar to \a TrackingMDRef, but it's expected to be owned by an instance
877/// of \a Metadata, which has the option of registering itself for callbacks to
878/// re-unique itself.
879///
880/// In particular, this is used by \a MDNode.
881class MDOperand {
882  Metadata *MD = nullptr;
883
884public:
885  MDOperand() = default;
886  MDOperand(const MDOperand &) = delete;
887  MDOperand(MDOperand &&Op) {
888    MD = Op.MD;
889    if (MD)
890      (void)MetadataTracking::retrack(Op.MD, MD);
891    Op.MD = nullptr;
892  }
893  MDOperand &operator=(const MDOperand &) = delete;
894  MDOperand &operator=(MDOperand &&Op) {
895    MD = Op.MD;
896    if (MD)
897      (void)MetadataTracking::retrack(Op.MD, MD);
898    Op.MD = nullptr;
899    return *this;
900  }
901
902  // Check if MDOperand is of type MDString and equals `Str`.
903  bool equalsStr(StringRef Str) const {
904    return isa<MDString>(this->get()) &&
905           cast<MDString>(this->get())->getString() == Str;
906  }
907
908  ~MDOperand() { untrack(); }
909
910  Metadata *get() const { return MD; }
911  operator Metadata *() const { return get(); }
912  Metadata *operator->() const { return get(); }
913  Metadata &operator*() const { return *get(); }
914
915  void reset() {
916    untrack();
917    MD = nullptr;
918  }
919  void reset(Metadata *MD, Metadata *Owner) {
920    untrack();
921    this->MD = MD;
922    track(Owner);
923  }
924
925private:
926  void track(Metadata *Owner) {
927    if (MD) {
928      if (Owner)
929        MetadataTracking::track(this, *MD, *Owner);
930      else
931        MetadataTracking::track(MD);
932    }
933  }
934
935  void untrack() {
936    assert(static_cast<void *>(this) == &MD && "Expected same address");
937    if (MD)
938      MetadataTracking::untrack(MD);
939  }
940};
941
942template <> struct simplify_type<MDOperand> {
943  using SimpleType = Metadata *;
944
945  static SimpleType getSimplifiedValue(MDOperand &MD) { return MD.get(); }
946};
947
948template <> struct simplify_type<const MDOperand> {
949  using SimpleType = Metadata *;
950
951  static SimpleType getSimplifiedValue(const MDOperand &MD) { return MD.get(); }
952};
953
954/// Pointer to the context, with optional RAUW support.
955///
956/// Either a raw (non-null) pointer to the \a LLVMContext, or an owned pointer
957/// to \a ReplaceableMetadataImpl (which has a reference to \a LLVMContext).
958class ContextAndReplaceableUses {
959  PointerUnion<LLVMContext *, ReplaceableMetadataImpl *> Ptr;
960
961public:
962  ContextAndReplaceableUses(LLVMContext &Context) : Ptr(&Context) {}
963  ContextAndReplaceableUses(
964      std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses)
965      : Ptr(ReplaceableUses.release()) {
966    assert(getReplaceableUses() && "Expected non-null replaceable uses");
967  }
968  ContextAndReplaceableUses() = delete;
969  ContextAndReplaceableUses(ContextAndReplaceableUses &&) = delete;
970  ContextAndReplaceableUses(const ContextAndReplaceableUses &) = delete;
971  ContextAndReplaceableUses &operator=(ContextAndReplaceableUses &&) = delete;
972  ContextAndReplaceableUses &
973  operator=(const ContextAndReplaceableUses &) = delete;
974  ~ContextAndReplaceableUses() { delete getReplaceableUses(); }
975
976  operator LLVMContext &() { return getContext(); }
977
978  /// Whether this contains RAUW support.
979  bool hasReplaceableUses() const {
980    return isa<ReplaceableMetadataImpl *>(Ptr);
981  }
982
983  LLVMContext &getContext() const {
984    if (hasReplaceableUses())
985      return getReplaceableUses()->getContext();
986    return *cast<LLVMContext *>(Ptr);
987  }
988
989  ReplaceableMetadataImpl *getReplaceableUses() const {
990    if (hasReplaceableUses())
991      return cast<ReplaceableMetadataImpl *>(Ptr);
992    return nullptr;
993  }
994
995  /// Ensure that this has RAUW support, and then return it.
996  ReplaceableMetadataImpl *getOrCreateReplaceableUses() {
997    if (!hasReplaceableUses())
998      makeReplaceable(std::make_unique<ReplaceableMetadataImpl>(getContext()));
999    return getReplaceableUses();
1000  }
1001
1002  /// Assign RAUW support to this.
1003  ///
1004  /// Make this replaceable, taking ownership of \c ReplaceableUses (which must
1005  /// not be null).
1006  void
1007  makeReplaceable(std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses) {
1008    assert(ReplaceableUses && "Expected non-null replaceable uses");
1009    assert(&ReplaceableUses->getContext() == &getContext() &&
1010           "Expected same context");
1011    delete getReplaceableUses();
1012    Ptr = ReplaceableUses.release();
1013  }
1014
1015  /// Drop RAUW support.
1016  ///
1017  /// Cede ownership of RAUW support, returning it.
1018  std::unique_ptr<ReplaceableMetadataImpl> takeReplaceableUses() {
1019    assert(hasReplaceableUses() && "Expected to own replaceable uses");
1020    std::unique_ptr<ReplaceableMetadataImpl> ReplaceableUses(
1021        getReplaceableUses());
1022    Ptr = &ReplaceableUses->getContext();
1023    return ReplaceableUses;
1024  }
1025};
1026
1027struct TempMDNodeDeleter {
1028  inline void operator()(MDNode *Node) const;
1029};
1030
1031#define HANDLE_MDNODE_LEAF(CLASS)                                              \
1032  using Temp##CLASS = std::unique_ptr<CLASS, TempMDNodeDeleter>;
1033#define HANDLE_MDNODE_BRANCH(CLASS) HANDLE_MDNODE_LEAF(CLASS)
1034#include "llvm/IR/Metadata.def"
1035
1036/// Metadata node.
1037///
1038/// Metadata nodes can be uniqued, like constants, or distinct.  Temporary
1039/// metadata nodes (with full support for RAUW) can be used to delay uniquing
1040/// until forward references are known.  The basic metadata node is an \a
1041/// MDTuple.
1042///
1043/// There is limited support for RAUW at construction time.  At construction
1044/// time, if any operand is a temporary node (or an unresolved uniqued node,
1045/// which indicates a transitive temporary operand), the node itself will be
1046/// unresolved.  As soon as all operands become resolved, it will drop RAUW
1047/// support permanently.
1048///
1049/// If an unresolved node is part of a cycle, \a resolveCycles() needs
1050/// to be called on some member of the cycle once all temporary nodes have been
1051/// replaced.
1052///
1053/// MDNodes can be large or small, as well as resizable or non-resizable.
1054/// Large MDNodes' operands are allocated in a separate storage vector,
1055/// whereas small MDNodes' operands are co-allocated. Distinct and temporary
1056/// MDnodes are resizable, but only MDTuples support this capability.
1057///
1058/// Clients can add operands to resizable MDNodes using push_back().
1059class MDNode : public Metadata {
1060  friend class ReplaceableMetadataImpl;
1061  friend class LLVMContextImpl;
1062  friend class DIAssignID;
1063
1064  /// The header that is coallocated with an MDNode along with its "small"
1065  /// operands. It is located immediately before the main body of the node.
1066  /// The operands are in turn located immediately before the header.
1067  /// For resizable MDNodes, the space for the storage vector is also allocated
1068  /// immediately before the header, overlapping with the operands.
1069  /// Explicity set alignment because bitfields by default have an
1070  /// alignment of 1 on z/OS.
1071  struct alignas(alignof(size_t)) Header {
1072    bool IsResizable : 1;
1073    bool IsLarge : 1;
1074    size_t SmallSize : 4;
1075    size_t SmallNumOps : 4;
1076    size_t : sizeof(size_t) * CHAR_BIT - 10;
1077
1078    unsigned NumUnresolved = 0;
1079    using LargeStorageVector = SmallVector<MDOperand, 0>;
1080
1081    static constexpr size_t NumOpsFitInVector =
1082        sizeof(LargeStorageVector) / sizeof(MDOperand);
1083    static_assert(
1084        NumOpsFitInVector * sizeof(MDOperand) == sizeof(LargeStorageVector),
1085        "sizeof(LargeStorageVector) must be a multiple of sizeof(MDOperand)");
1086
1087    static constexpr size_t MaxSmallSize = 15;
1088
1089    static constexpr size_t getOpSize(unsigned NumOps) {
1090      return sizeof(MDOperand) * NumOps;
1091    }
1092    /// Returns the number of operands the node has space for based on its
1093    /// allocation characteristics.
1094    static size_t getSmallSize(size_t NumOps, bool IsResizable, bool IsLarge) {
1095      return IsLarge ? NumOpsFitInVector
1096                     : std::max(NumOps, NumOpsFitInVector * IsResizable);
1097    }
1098    /// Returns the number of bytes allocated for operands and header.
1099    static size_t getAllocSize(StorageType Storage, size_t NumOps) {
1100      return getOpSize(
1101                 getSmallSize(NumOps, isResizable(Storage), isLarge(NumOps))) +
1102             sizeof(Header);
1103    }
1104
1105    /// Only temporary and distinct nodes are resizable.
1106    static bool isResizable(StorageType Storage) { return Storage != Uniqued; }
1107    static bool isLarge(size_t NumOps) { return NumOps > MaxSmallSize; }
1108
1109    size_t getAllocSize() const {
1110      return getOpSize(SmallSize) + sizeof(Header);
1111    }
1112    void *getAllocation() {
1113      return reinterpret_cast<char *>(this + 1) -
1114             alignTo(getAllocSize(), alignof(uint64_t));
1115    }
1116
1117    void *getLargePtr() const {
1118      static_assert(alignof(LargeStorageVector) <= alignof(Header),
1119                    "LargeStorageVector too strongly aligned");
1120      return reinterpret_cast<char *>(const_cast<Header *>(this)) -
1121             sizeof(LargeStorageVector);
1122    }
1123
1124    void *getSmallPtr();
1125
1126    LargeStorageVector &getLarge() {
1127      assert(IsLarge);
1128      return *reinterpret_cast<LargeStorageVector *>(getLargePtr());
1129    }
1130
1131    const LargeStorageVector &getLarge() const {
1132      assert(IsLarge);
1133      return *reinterpret_cast<const LargeStorageVector *>(getLargePtr());
1134    }
1135
1136    void resizeSmall(size_t NumOps);
1137    void resizeSmallToLarge(size_t NumOps);
1138    void resize(size_t NumOps);
1139
1140    explicit Header(size_t NumOps, StorageType Storage);
1141    ~Header();
1142
1143    MutableArrayRef<MDOperand> operands() {
1144      if (IsLarge)
1145        return getLarge();
1146      return MutableArrayRef(
1147          reinterpret_cast<MDOperand *>(this) - SmallSize, SmallNumOps);
1148    }
1149
1150    ArrayRef<MDOperand> operands() const {
1151      if (IsLarge)
1152        return getLarge();
1153      return ArrayRef(reinterpret_cast<const MDOperand *>(this) - SmallSize,
1154                      SmallNumOps);
1155    }
1156
1157    unsigned getNumOperands() const {
1158      if (!IsLarge)
1159        return SmallNumOps;
1160      return getLarge().size();
1161    }
1162  };
1163
1164  Header &getHeader() { return *(reinterpret_cast<Header *>(this) - 1); }
1165
1166  const Header &getHeader() const {
1167    return *(reinterpret_cast<const Header *>(this) - 1);
1168  }
1169
1170  ContextAndReplaceableUses Context;
1171
1172protected:
1173  MDNode(LLVMContext &Context, unsigned ID, StorageType Storage,
1174         ArrayRef<Metadata *> Ops1, ArrayRef<Metadata *> Ops2 = std::nullopt);
1175  ~MDNode() = default;
1176
1177  void *operator new(size_t Size, size_t NumOps, StorageType Storage);
1178  void operator delete(void *Mem);
1179
1180  /// Required by std, but never called.
1181  void operator delete(void *, unsigned) {
1182    llvm_unreachable("Constructor throws?");
1183  }
1184
1185  /// Required by std, but never called.
1186  void operator delete(void *, unsigned, bool) {
1187    llvm_unreachable("Constructor throws?");
1188  }
1189
1190  void dropAllReferences();
1191
1192  MDOperand *mutable_begin() { return getHeader().operands().begin(); }
1193  MDOperand *mutable_end() { return getHeader().operands().end(); }
1194
1195  using mutable_op_range = iterator_range<MDOperand *>;
1196
1197  mutable_op_range mutable_operands() {
1198    return mutable_op_range(mutable_begin(), mutable_end());
1199  }
1200
1201public:
1202  MDNode(const MDNode &) = delete;
1203  void operator=(const MDNode &) = delete;
1204  void *operator new(size_t) = delete;
1205
1206  static inline MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs);
1207  static inline MDTuple *getIfExists(LLVMContext &Context,
1208                                     ArrayRef<Metadata *> MDs);
1209  static inline MDTuple *getDistinct(LLVMContext &Context,
1210                                     ArrayRef<Metadata *> MDs);
1211  static inline TempMDTuple getTemporary(LLVMContext &Context,
1212                                         ArrayRef<Metadata *> MDs);
1213
1214  /// Create a (temporary) clone of this.
1215  TempMDNode clone() const;
1216
1217  /// Deallocate a node created by getTemporary.
1218  ///
1219  /// Calls \c replaceAllUsesWith(nullptr) before deleting, so any remaining
1220  /// references will be reset.
1221  static void deleteTemporary(MDNode *N);
1222
1223  LLVMContext &getContext() const { return Context.getContext(); }
1224
1225  /// Replace a specific operand.
1226  void replaceOperandWith(unsigned I, Metadata *New);
1227
1228  /// Check if node is fully resolved.
1229  ///
1230  /// If \a isTemporary(), this always returns \c false; if \a isDistinct(),
1231  /// this always returns \c true.
1232  ///
1233  /// If \a isUniqued(), returns \c true if this has already dropped RAUW
1234  /// support (because all operands are resolved).
1235  ///
1236  /// As forward declarations are resolved, their containers should get
1237  /// resolved automatically.  However, if this (or one of its operands) is
1238  /// involved in a cycle, \a resolveCycles() needs to be called explicitly.
1239  bool isResolved() const { return !isTemporary() && !getNumUnresolved(); }
1240
1241  bool isUniqued() const { return Storage == Uniqued; }
1242  bool isDistinct() const { return Storage == Distinct; }
1243  bool isTemporary() const { return Storage == Temporary; }
1244
1245  bool isReplaceable() const { return isTemporary() || isAlwaysReplaceable(); }
1246  bool isAlwaysReplaceable() const { return getMetadataID() == DIAssignIDKind; }
1247
1248  unsigned getNumTemporaryUses() const {
1249    assert(isTemporary() && "Only for temporaries");
1250    return Context.getReplaceableUses()->getNumUses();
1251  }
1252
1253  /// RAUW a temporary.
1254  ///
1255  /// \pre \a isTemporary() must be \c true.
1256  void replaceAllUsesWith(Metadata *MD) {
1257    assert(isReplaceable() && "Expected temporary/replaceable node");
1258    if (Context.hasReplaceableUses())
1259      Context.getReplaceableUses()->replaceAllUsesWith(MD);
1260  }
1261
1262  /// Resolve cycles.
1263  ///
1264  /// Once all forward declarations have been resolved, force cycles to be
1265  /// resolved.
1266  ///
1267  /// \pre No operands (or operands' operands, etc.) have \a isTemporary().
1268  void resolveCycles();
1269
1270  /// Resolve a unique, unresolved node.
1271  void resolve();
1272
1273  /// Replace a temporary node with a permanent one.
1274  ///
1275  /// Try to create a uniqued version of \c N -- in place, if possible -- and
1276  /// return it.  If \c N cannot be uniqued, return a distinct node instead.
1277  template <class T>
1278  static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *>
1279  replaceWithPermanent(std::unique_ptr<T, TempMDNodeDeleter> N) {
1280    return cast<T>(N.release()->replaceWithPermanentImpl());
1281  }
1282
1283  /// Replace a temporary node with a uniqued one.
1284  ///
1285  /// Create a uniqued version of \c N -- in place, if possible -- and return
1286  /// it.  Takes ownership of the temporary node.
1287  ///
1288  /// \pre N does not self-reference.
1289  template <class T>
1290  static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *>
1291  replaceWithUniqued(std::unique_ptr<T, TempMDNodeDeleter> N) {
1292    return cast<T>(N.release()->replaceWithUniquedImpl());
1293  }
1294
1295  /// Replace a temporary node with a distinct one.
1296  ///
1297  /// Create a distinct version of \c N -- in place, if possible -- and return
1298  /// it.  Takes ownership of the temporary node.
1299  template <class T>
1300  static std::enable_if_t<std::is_base_of<MDNode, T>::value, T *>
1301  replaceWithDistinct(std::unique_ptr<T, TempMDNodeDeleter> N) {
1302    return cast<T>(N.release()->replaceWithDistinctImpl());
1303  }
1304
1305  /// Print in tree shape.
1306  ///
1307  /// Prints definition of \c this in tree shape.
1308  ///
1309  /// If \c M is provided, metadata nodes will be numbered canonically;
1310  /// otherwise, pointer addresses are substituted.
1311  /// @{
1312  void printTree(raw_ostream &OS, const Module *M = nullptr) const;
1313  void printTree(raw_ostream &OS, ModuleSlotTracker &MST,
1314                 const Module *M = nullptr) const;
1315  /// @}
1316
1317  /// User-friendly dump in tree shape.
1318  ///
1319  /// If \c M is provided, metadata nodes will be numbered canonically;
1320  /// otherwise, pointer addresses are substituted.
1321  ///
1322  /// Note: this uses an explicit overload instead of default arguments so that
1323  /// the nullptr version is easy to call from a debugger.
1324  ///
1325  /// @{
1326  void dumpTree() const;
1327  void dumpTree(const Module *M) const;
1328  /// @}
1329
1330private:
1331  MDNode *replaceWithPermanentImpl();
1332  MDNode *replaceWithUniquedImpl();
1333  MDNode *replaceWithDistinctImpl();
1334
1335protected:
1336  /// Set an operand.
1337  ///
1338  /// Sets the operand directly, without worrying about uniquing.
1339  void setOperand(unsigned I, Metadata *New);
1340
1341  unsigned getNumUnresolved() const { return getHeader().NumUnresolved; }
1342
1343  void setNumUnresolved(unsigned N) { getHeader().NumUnresolved = N; }
1344  void storeDistinctInContext();
1345  template <class T, class StoreT>
1346  static T *storeImpl(T *N, StorageType Storage, StoreT &Store);
1347  template <class T> static T *storeImpl(T *N, StorageType Storage);
1348
1349  /// Resize the node to hold \a NumOps operands.
1350  ///
1351  /// \pre \a isTemporary() or \a isDistinct()
1352  /// \pre MetadataID == MDTupleKind
1353  void resize(size_t NumOps) {
1354    assert(!isUniqued() && "Resizing is not supported for uniqued nodes");
1355    assert(getMetadataID() == MDTupleKind &&
1356           "Resizing is not supported for this node kind");
1357    getHeader().resize(NumOps);
1358  }
1359
1360private:
1361  void handleChangedOperand(void *Ref, Metadata *New);
1362
1363  /// Drop RAUW support, if any.
1364  void dropReplaceableUses();
1365
1366  void resolveAfterOperandChange(Metadata *Old, Metadata *New);
1367  void decrementUnresolvedOperandCount();
1368  void countUnresolvedOperands();
1369
1370  /// Mutate this to be "uniqued".
1371  ///
1372  /// Mutate this so that \a isUniqued().
1373  /// \pre \a isTemporary().
1374  /// \pre already added to uniquing set.
1375  void makeUniqued();
1376
1377  /// Mutate this to be "distinct".
1378  ///
1379  /// Mutate this so that \a isDistinct().
1380  /// \pre \a isTemporary().
1381  void makeDistinct();
1382
1383  void deleteAsSubclass();
1384  MDNode *uniquify();
1385  void eraseFromStore();
1386
1387  template <class NodeTy> struct HasCachedHash;
1388  template <class NodeTy>
1389  static void dispatchRecalculateHash(NodeTy *N, std::true_type) {
1390    N->recalculateHash();
1391  }
1392  template <class NodeTy>
1393  static void dispatchRecalculateHash(NodeTy *, std::false_type) {}
1394  template <class NodeTy>
1395  static void dispatchResetHash(NodeTy *N, std::true_type) {
1396    N->setHash(0);
1397  }
1398  template <class NodeTy>
1399  static void dispatchResetHash(NodeTy *, std::false_type) {}
1400
1401  /// Merge branch weights from two direct callsites.
1402  static MDNode *mergeDirectCallProfMetadata(MDNode *A, MDNode *B,
1403                                             const Instruction *AInstr,
1404                                             const Instruction *BInstr);
1405
1406public:
1407  using op_iterator = const MDOperand *;
1408  using op_range = iterator_range<op_iterator>;
1409
1410  op_iterator op_begin() const {
1411    return const_cast<MDNode *>(this)->mutable_begin();
1412  }
1413
1414  op_iterator op_end() const {
1415    return const_cast<MDNode *>(this)->mutable_end();
1416  }
1417
1418  ArrayRef<MDOperand> operands() const { return getHeader().operands(); }
1419
1420  const MDOperand &getOperand(unsigned I) const {
1421    assert(I < getNumOperands() && "Out of range");
1422    return getHeader().operands()[I];
1423  }
1424
1425  /// Return number of MDNode operands.
1426  unsigned getNumOperands() const { return getHeader().getNumOperands(); }
1427
1428  /// Methods for support type inquiry through isa, cast, and dyn_cast:
1429  static bool classof(const Metadata *MD) {
1430    switch (MD->getMetadataID()) {
1431    default:
1432      return false;
1433#define HANDLE_MDNODE_LEAF(CLASS)                                              \
1434  case CLASS##Kind:                                                            \
1435    return true;
1436#include "llvm/IR/Metadata.def"
1437    }
1438  }
1439
1440  /// Check whether MDNode is a vtable access.
1441  bool isTBAAVtableAccess() const;
1442
1443  /// Methods for metadata merging.
1444  static MDNode *concatenate(MDNode *A, MDNode *B);
1445  static MDNode *intersect(MDNode *A, MDNode *B);
1446  static MDNode *getMostGenericTBAA(MDNode *A, MDNode *B);
1447  static MDNode *getMostGenericFPMath(MDNode *A, MDNode *B);
1448  static MDNode *getMostGenericRange(MDNode *A, MDNode *B);
1449  static MDNode *getMostGenericAliasScope(MDNode *A, MDNode *B);
1450  static MDNode *getMostGenericAlignmentOrDereferenceable(MDNode *A, MDNode *B);
1451  /// Merge !prof metadata from two instructions.
1452  /// Currently only implemented with direct callsites with branch weights.
1453  static MDNode *getMergedProfMetadata(MDNode *A, MDNode *B,
1454                                       const Instruction *AInstr,
1455                                       const Instruction *BInstr);
1456};
1457
1458/// Tuple of metadata.
1459///
1460/// This is the simple \a MDNode arbitrary tuple.  Nodes are uniqued by
1461/// default based on their operands.
1462class MDTuple : public MDNode {
1463  friend class LLVMContextImpl;
1464  friend class MDNode;
1465
1466  MDTuple(LLVMContext &C, StorageType Storage, unsigned Hash,
1467          ArrayRef<Metadata *> Vals)
1468      : MDNode(C, MDTupleKind, Storage, Vals) {
1469    setHash(Hash);
1470  }
1471
1472  ~MDTuple() { dropAllReferences(); }
1473
1474  void setHash(unsigned Hash) { SubclassData32 = Hash; }
1475  void recalculateHash();
1476
1477  static MDTuple *getImpl(LLVMContext &Context, ArrayRef<Metadata *> MDs,
1478                          StorageType Storage, bool ShouldCreate = true);
1479
1480  TempMDTuple cloneImpl() const {
1481    ArrayRef<MDOperand> Operands = operands();
1482    return getTemporary(getContext(), SmallVector<Metadata *, 4>(
1483                                          Operands.begin(), Operands.end()));
1484  }
1485
1486public:
1487  /// Get the hash, if any.
1488  unsigned getHash() const { return SubclassData32; }
1489
1490  static MDTuple *get(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1491    return getImpl(Context, MDs, Uniqued);
1492  }
1493
1494  static MDTuple *getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1495    return getImpl(Context, MDs, Uniqued, /* ShouldCreate */ false);
1496  }
1497
1498  /// Return a distinct node.
1499  ///
1500  /// Return a distinct node -- i.e., a node that is not uniqued.
1501  static MDTuple *getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1502    return getImpl(Context, MDs, Distinct);
1503  }
1504
1505  /// Return a temporary node.
1506  ///
1507  /// For use in constructing cyclic MDNode structures. A temporary MDNode is
1508  /// not uniqued, may be RAUW'd, and must be manually deleted with
1509  /// deleteTemporary.
1510  static TempMDTuple getTemporary(LLVMContext &Context,
1511                                  ArrayRef<Metadata *> MDs) {
1512    return TempMDTuple(getImpl(Context, MDs, Temporary));
1513  }
1514
1515  /// Return a (temporary) clone of this.
1516  TempMDTuple clone() const { return cloneImpl(); }
1517
1518  /// Append an element to the tuple. This will resize the node.
1519  void push_back(Metadata *MD) {
1520    size_t NumOps = getNumOperands();
1521    resize(NumOps + 1);
1522    setOperand(NumOps, MD);
1523  }
1524
1525  /// Shrink the operands by 1.
1526  void pop_back() { resize(getNumOperands() - 1); }
1527
1528  static bool classof(const Metadata *MD) {
1529    return MD->getMetadataID() == MDTupleKind;
1530  }
1531};
1532
1533MDTuple *MDNode::get(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1534  return MDTuple::get(Context, MDs);
1535}
1536
1537MDTuple *MDNode::getIfExists(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1538  return MDTuple::getIfExists(Context, MDs);
1539}
1540
1541MDTuple *MDNode::getDistinct(LLVMContext &Context, ArrayRef<Metadata *> MDs) {
1542  return MDTuple::getDistinct(Context, MDs);
1543}
1544
1545TempMDTuple MDNode::getTemporary(LLVMContext &Context,
1546                                 ArrayRef<Metadata *> MDs) {
1547  return MDTuple::getTemporary(Context, MDs);
1548}
1549
1550void TempMDNodeDeleter::operator()(MDNode *Node) const {
1551  MDNode::deleteTemporary(Node);
1552}
1553
1554/// This is a simple wrapper around an MDNode which provides a higher-level
1555/// interface by hiding the details of how alias analysis information is encoded
1556/// in its operands.
1557class AliasScopeNode {
1558  const MDNode *Node = nullptr;
1559
1560public:
1561  AliasScopeNode() = default;
1562  explicit AliasScopeNode(const MDNode *N) : Node(N) {}
1563
1564  /// Get the MDNode for this AliasScopeNode.
1565  const MDNode *getNode() const { return Node; }
1566
1567  /// Get the MDNode for this AliasScopeNode's domain.
1568  const MDNode *getDomain() const {
1569    if (Node->getNumOperands() < 2)
1570      return nullptr;
1571    return dyn_cast_or_null<MDNode>(Node->getOperand(1));
1572  }
1573  StringRef getName() const {
1574    if (Node->getNumOperands() > 2)
1575      if (MDString *N = dyn_cast_or_null<MDString>(Node->getOperand(2)))
1576        return N->getString();
1577    return StringRef();
1578  }
1579};
1580
1581/// Typed iterator through MDNode operands.
1582///
1583/// An iterator that transforms an \a MDNode::iterator into an iterator over a
1584/// particular Metadata subclass.
1585template <class T> class TypedMDOperandIterator {
1586  MDNode::op_iterator I = nullptr;
1587
1588public:
1589  using iterator_category = std::input_iterator_tag;
1590  using value_type = T *;
1591  using difference_type = std::ptrdiff_t;
1592  using pointer = void;
1593  using reference = T *;
1594
1595  TypedMDOperandIterator() = default;
1596  explicit TypedMDOperandIterator(MDNode::op_iterator I) : I(I) {}
1597
1598  T *operator*() const { return cast_or_null<T>(*I); }
1599
1600  TypedMDOperandIterator &operator++() {
1601    ++I;
1602    return *this;
1603  }
1604
1605  TypedMDOperandIterator operator++(int) {
1606    TypedMDOperandIterator Temp(*this);
1607    ++I;
1608    return Temp;
1609  }
1610
1611  bool operator==(const TypedMDOperandIterator &X) const { return I == X.I; }
1612  bool operator!=(const TypedMDOperandIterator &X) const { return I != X.I; }
1613};
1614
1615/// Typed, array-like tuple of metadata.
1616///
1617/// This is a wrapper for \a MDTuple that makes it act like an array holding a
1618/// particular type of metadata.
1619template <class T> class MDTupleTypedArrayWrapper {
1620  const MDTuple *N = nullptr;
1621
1622public:
1623  MDTupleTypedArrayWrapper() = default;
1624  MDTupleTypedArrayWrapper(const MDTuple *N) : N(N) {}
1625
1626  template <class U>
1627  MDTupleTypedArrayWrapper(
1628      const MDTupleTypedArrayWrapper<U> &Other,
1629      std::enable_if_t<std::is_convertible<U *, T *>::value> * = nullptr)
1630      : N(Other.get()) {}
1631
1632  template <class U>
1633  explicit MDTupleTypedArrayWrapper(
1634      const MDTupleTypedArrayWrapper<U> &Other,
1635      std::enable_if_t<!std::is_convertible<U *, T *>::value> * = nullptr)
1636      : N(Other.get()) {}
1637
1638  explicit operator bool() const { return get(); }
1639  explicit operator MDTuple *() const { return get(); }
1640
1641  MDTuple *get() const { return const_cast<MDTuple *>(N); }
1642  MDTuple *operator->() const { return get(); }
1643  MDTuple &operator*() const { return *get(); }
1644
1645  // FIXME: Fix callers and remove condition on N.
1646  unsigned size() const { return N ? N->getNumOperands() : 0u; }
1647  bool empty() const { return N ? N->getNumOperands() == 0 : true; }
1648  T *operator[](unsigned I) const { return cast_or_null<T>(N->getOperand(I)); }
1649
1650  // FIXME: Fix callers and remove condition on N.
1651  using iterator = TypedMDOperandIterator<T>;
1652
1653  iterator begin() const { return N ? iterator(N->op_begin()) : iterator(); }
1654  iterator end() const { return N ? iterator(N->op_end()) : iterator(); }
1655};
1656
1657#define HANDLE_METADATA(CLASS)                                                 \
1658  using CLASS##Array = MDTupleTypedArrayWrapper<CLASS>;
1659#include "llvm/IR/Metadata.def"
1660
1661/// Placeholder metadata for operands of distinct MDNodes.
1662///
1663/// This is a lightweight placeholder for an operand of a distinct node.  It's
1664/// purpose is to help track forward references when creating a distinct node.
1665/// This allows distinct nodes involved in a cycle to be constructed before
1666/// their operands without requiring a heavyweight temporary node with
1667/// full-blown RAUW support.
1668///
1669/// Each placeholder supports only a single MDNode user.  Clients should pass
1670/// an ID, retrieved via \a getID(), to indicate the "real" operand that this
1671/// should be replaced with.
1672///
1673/// While it would be possible to implement move operators, they would be
1674/// fairly expensive.  Leave them unimplemented to discourage their use
1675/// (clients can use std::deque, std::list, BumpPtrAllocator, etc.).
1676class DistinctMDOperandPlaceholder : public Metadata {
1677  friend class MetadataTracking;
1678
1679  Metadata **Use = nullptr;
1680
1681public:
1682  explicit DistinctMDOperandPlaceholder(unsigned ID)
1683      : Metadata(DistinctMDOperandPlaceholderKind, Distinct) {
1684    SubclassData32 = ID;
1685  }
1686
1687  DistinctMDOperandPlaceholder() = delete;
1688  DistinctMDOperandPlaceholder(DistinctMDOperandPlaceholder &&) = delete;
1689  DistinctMDOperandPlaceholder(const DistinctMDOperandPlaceholder &) = delete;
1690
1691  ~DistinctMDOperandPlaceholder() {
1692    if (Use)
1693      *Use = nullptr;
1694  }
1695
1696  unsigned getID() const { return SubclassData32; }
1697
1698  /// Replace the use of this with MD.
1699  void replaceUseWith(Metadata *MD) {
1700    if (!Use)
1701      return;
1702    *Use = MD;
1703
1704    if (*Use)
1705      MetadataTracking::track(*Use);
1706
1707    Metadata *T = cast<Metadata>(this);
1708    MetadataTracking::untrack(T);
1709    assert(!Use && "Use is still being tracked despite being untracked!");
1710  }
1711};
1712
1713//===----------------------------------------------------------------------===//
1714/// A tuple of MDNodes.
1715///
1716/// Despite its name, a NamedMDNode isn't itself an MDNode.
1717///
1718/// NamedMDNodes are named module-level entities that contain lists of MDNodes.
1719///
1720/// It is illegal for a NamedMDNode to appear as an operand of an MDNode.
1721class NamedMDNode : public ilist_node<NamedMDNode> {
1722  friend class LLVMContextImpl;
1723  friend class Module;
1724
1725  std::string Name;
1726  Module *Parent = nullptr;
1727  void *Operands; // SmallVector<TrackingMDRef, 4>
1728
1729  void setParent(Module *M) { Parent = M; }
1730
1731  explicit NamedMDNode(const Twine &N);
1732
1733  template <class T1, class T2> class op_iterator_impl {
1734    friend class NamedMDNode;
1735
1736    const NamedMDNode *Node = nullptr;
1737    unsigned Idx = 0;
1738
1739    op_iterator_impl(const NamedMDNode *N, unsigned i) : Node(N), Idx(i) {}
1740
1741  public:
1742    using iterator_category = std::bidirectional_iterator_tag;
1743    using value_type = T2;
1744    using difference_type = std::ptrdiff_t;
1745    using pointer = value_type *;
1746    using reference = value_type &;
1747
1748    op_iterator_impl() = default;
1749
1750    bool operator==(const op_iterator_impl &o) const { return Idx == o.Idx; }
1751    bool operator!=(const op_iterator_impl &o) const { return Idx != o.Idx; }
1752
1753    op_iterator_impl &operator++() {
1754      ++Idx;
1755      return *this;
1756    }
1757
1758    op_iterator_impl operator++(int) {
1759      op_iterator_impl tmp(*this);
1760      operator++();
1761      return tmp;
1762    }
1763
1764    op_iterator_impl &operator--() {
1765      --Idx;
1766      return *this;
1767    }
1768
1769    op_iterator_impl operator--(int) {
1770      op_iterator_impl tmp(*this);
1771      operator--();
1772      return tmp;
1773    }
1774
1775    T1 operator*() const { return Node->getOperand(Idx); }
1776  };
1777
1778public:
1779  NamedMDNode(const NamedMDNode &) = delete;
1780  ~NamedMDNode();
1781
1782  /// Drop all references and remove the node from parent module.
1783  void eraseFromParent();
1784
1785  /// Remove all uses and clear node vector.
1786  void dropAllReferences() { clearOperands(); }
1787  /// Drop all references to this node's operands.
1788  void clearOperands();
1789
1790  /// Get the module that holds this named metadata collection.
1791  inline Module *getParent() { return Parent; }
1792  inline const Module *getParent() const { return Parent; }
1793
1794  MDNode *getOperand(unsigned i) const;
1795  unsigned getNumOperands() const;
1796  void addOperand(MDNode *M);
1797  void setOperand(unsigned I, MDNode *New);
1798  StringRef getName() const;
1799  void print(raw_ostream &ROS, bool IsForDebug = false) const;
1800  void print(raw_ostream &ROS, ModuleSlotTracker &MST,
1801             bool IsForDebug = false) const;
1802  void dump() const;
1803
1804  // ---------------------------------------------------------------------------
1805  // Operand Iterator interface...
1806  //
1807  using op_iterator = op_iterator_impl<MDNode *, MDNode>;
1808
1809  op_iterator op_begin() { return op_iterator(this, 0); }
1810  op_iterator op_end()   { return op_iterator(this, getNumOperands()); }
1811
1812  using const_op_iterator = op_iterator_impl<const MDNode *, MDNode>;
1813
1814  const_op_iterator op_begin() const { return const_op_iterator(this, 0); }
1815  const_op_iterator op_end()   const { return const_op_iterator(this, getNumOperands()); }
1816
1817  inline iterator_range<op_iterator>  operands() {
1818    return make_range(op_begin(), op_end());
1819  }
1820  inline iterator_range<const_op_iterator> operands() const {
1821    return make_range(op_begin(), op_end());
1822  }
1823};
1824
1825// Create wrappers for C Binding types (see CBindingWrapping.h).
1826DEFINE_ISA_CONVERSION_FUNCTIONS(NamedMDNode, LLVMNamedMDNodeRef)
1827
1828} // end namespace llvm
1829
1830#endif // LLVM_IR_METADATA_H
1831