1//===- CoroInternal.h - Internal Coroutine interfaces ---------*- C++ -*---===// 2// 3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4// See https://llvm.org/LICENSE.txt for license information. 5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6// 7//===----------------------------------------------------------------------===// 8// Common definitions/declarations used internally by coroutine lowering passes. 9//===----------------------------------------------------------------------===// 10 11#ifndef LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H 12#define LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H 13 14#include "CoroInstr.h" 15#include "llvm/IR/IRBuilder.h" 16#include "llvm/Transforms/Coroutines.h" 17 18namespace llvm { 19 20class CallGraph; 21class CallGraphSCC; 22class PassRegistry; 23 24void initializeCoroEarlyLegacyPass(PassRegistry &); 25void initializeCoroSplitLegacyPass(PassRegistry &); 26void initializeCoroElideLegacyPass(PassRegistry &); 27void initializeCoroCleanupLegacyPass(PassRegistry &); 28 29// CoroEarly pass marks every function that has coro.begin with a string 30// attribute "coroutine.presplit"="0". CoroSplit pass processes the coroutine 31// twice. First, it lets it go through complete IPO optimization pipeline as a 32// single function. It forces restart of the pipeline by inserting an indirect 33// call to an empty function "coro.devirt.trigger" which is devirtualized by 34// CoroElide pass that triggers a restart of the pipeline by CGPassManager. 35// When CoroSplit pass sees the same coroutine the second time, it splits it up, 36// adds coroutine subfunctions to the SCC to be processed by IPO pipeline. 37 38#define CORO_PRESPLIT_ATTR "coroutine.presplit" 39#define UNPREPARED_FOR_SPLIT "0" 40#define PREPARED_FOR_SPLIT "1" 41 42#define CORO_DEVIRT_TRIGGER_FN "coro.devirt.trigger" 43 44namespace coro { 45 46bool declaresIntrinsics(const Module &M, 47 const std::initializer_list<StringRef>); 48void replaceAllCoroAllocs(CoroBeginInst *CB, bool Replacement); 49void replaceAllCoroFrees(CoroBeginInst *CB, Value *Replacement); 50void replaceCoroFree(CoroIdInst *CoroId, bool Elide); 51void updateCallGraph(Function &Caller, ArrayRef<Function *> Funcs, 52 CallGraph &CG, CallGraphSCC &SCC); 53 54// Keeps data and helper functions for lowering coroutine intrinsics. 55struct LowererBase { 56 Module &TheModule; 57 LLVMContext &Context; 58 PointerType *const Int8Ptr; 59 FunctionType *const ResumeFnType; 60 ConstantPointerNull *const NullPtr; 61 62 LowererBase(Module &M); 63 Value *makeSubFnCall(Value *Arg, int Index, Instruction *InsertPt); 64}; 65 66enum class ABI { 67 /// The "resume-switch" lowering, where there are separate resume and 68 /// destroy functions that are shared between all suspend points. The 69 /// coroutine frame implicitly stores the resume and destroy functions, 70 /// the current index, and any promise value. 71 Switch, 72 73 /// The "returned-continuation" lowering, where each suspend point creates a 74 /// single continuation function that is used for both resuming and 75 /// destroying. Does not support promises. 76 Retcon, 77 78 /// The "unique returned-continuation" lowering, where each suspend point 79 /// creates a single continuation function that is used for both resuming 80 /// and destroying. Does not support promises. The function is known to 81 /// suspend at most once during its execution, and the return value of 82 /// the continuation is void. 83 RetconOnce, 84}; 85 86// Holds structural Coroutine Intrinsics for a particular function and other 87// values used during CoroSplit pass. 88struct LLVM_LIBRARY_VISIBILITY Shape { 89 CoroBeginInst *CoroBegin; 90 SmallVector<CoroEndInst *, 4> CoroEnds; 91 SmallVector<CoroSizeInst *, 2> CoroSizes; 92 SmallVector<AnyCoroSuspendInst *, 4> CoroSuspends; 93 SmallVector<CallInst*, 2> SwiftErrorOps; 94 95 // Field indexes for special fields in the switch lowering. 96 struct SwitchFieldIndex { 97 enum { 98 Resume, 99 Destroy 100 101 // The promise field is always at a fixed offset from the start of 102 // frame given its type, but the index isn't a constant for all 103 // possible frames. 104 105 // The switch-index field isn't at a fixed offset or index, either; 106 // we just work it in where it fits best. 107 }; 108 }; 109 110 coro::ABI ABI; 111 112 StructType *FrameTy; 113 Align FrameAlign; 114 uint64_t FrameSize; 115 Instruction *FramePtr; 116 BasicBlock *AllocaSpillBlock; 117 118 struct SwitchLoweringStorage { 119 SwitchInst *ResumeSwitch; 120 AllocaInst *PromiseAlloca; 121 BasicBlock *ResumeEntryBlock; 122 unsigned IndexField; 123 unsigned PromiseField; 124 bool HasFinalSuspend; 125 }; 126 127 struct RetconLoweringStorage { 128 Function *ResumePrototype; 129 Function *Alloc; 130 Function *Dealloc; 131 BasicBlock *ReturnBlock; 132 bool IsFrameInlineInStorage; 133 }; 134 135 union { 136 SwitchLoweringStorage SwitchLowering; 137 RetconLoweringStorage RetconLowering; 138 }; 139 140 CoroIdInst *getSwitchCoroId() const { 141 assert(ABI == coro::ABI::Switch); 142 return cast<CoroIdInst>(CoroBegin->getId()); 143 } 144 145 AnyCoroIdRetconInst *getRetconCoroId() const { 146 assert(ABI == coro::ABI::Retcon || 147 ABI == coro::ABI::RetconOnce); 148 return cast<AnyCoroIdRetconInst>(CoroBegin->getId()); 149 } 150 151 unsigned getSwitchIndexField() const { 152 assert(ABI == coro::ABI::Switch); 153 assert(FrameTy && "frame type not assigned"); 154 return SwitchLowering.IndexField; 155 } 156 IntegerType *getIndexType() const { 157 assert(ABI == coro::ABI::Switch); 158 assert(FrameTy && "frame type not assigned"); 159 return cast<IntegerType>(FrameTy->getElementType(getSwitchIndexField())); 160 } 161 ConstantInt *getIndex(uint64_t Value) const { 162 return ConstantInt::get(getIndexType(), Value); 163 } 164 165 PointerType *getSwitchResumePointerType() const { 166 assert(ABI == coro::ABI::Switch); 167 assert(FrameTy && "frame type not assigned"); 168 return cast<PointerType>(FrameTy->getElementType(SwitchFieldIndex::Resume)); 169 } 170 171 FunctionType *getResumeFunctionType() const { 172 switch (ABI) { 173 case coro::ABI::Switch: { 174 auto *FnPtrTy = getSwitchResumePointerType(); 175 return cast<FunctionType>(FnPtrTy->getPointerElementType()); 176 } 177 case coro::ABI::Retcon: 178 case coro::ABI::RetconOnce: 179 return RetconLowering.ResumePrototype->getFunctionType(); 180 } 181 llvm_unreachable("Unknown coro::ABI enum"); 182 } 183 184 ArrayRef<Type*> getRetconResultTypes() const { 185 assert(ABI == coro::ABI::Retcon || 186 ABI == coro::ABI::RetconOnce); 187 auto FTy = CoroBegin->getFunction()->getFunctionType(); 188 189 // The safety of all this is checked by checkWFRetconPrototype. 190 if (auto STy = dyn_cast<StructType>(FTy->getReturnType())) { 191 return STy->elements().slice(1); 192 } else { 193 return ArrayRef<Type*>(); 194 } 195 } 196 197 ArrayRef<Type*> getRetconResumeTypes() const { 198 assert(ABI == coro::ABI::Retcon || 199 ABI == coro::ABI::RetconOnce); 200 201 // The safety of all this is checked by checkWFRetconPrototype. 202 auto FTy = RetconLowering.ResumePrototype->getFunctionType(); 203 return FTy->params().slice(1); 204 } 205 206 CallingConv::ID getResumeFunctionCC() const { 207 switch (ABI) { 208 case coro::ABI::Switch: 209 return CallingConv::Fast; 210 211 case coro::ABI::Retcon: 212 case coro::ABI::RetconOnce: 213 return RetconLowering.ResumePrototype->getCallingConv(); 214 } 215 llvm_unreachable("Unknown coro::ABI enum"); 216 } 217 218 AllocaInst *getPromiseAlloca() const { 219 if (ABI == coro::ABI::Switch) 220 return SwitchLowering.PromiseAlloca; 221 return nullptr; 222 } 223 unsigned getPromiseField() const { 224 assert(ABI == coro::ABI::Switch); 225 assert(FrameTy && "frame type not assigned"); 226 assert(SwitchLowering.PromiseAlloca && "no promise alloca"); 227 return SwitchLowering.PromiseField; 228 } 229 230 /// Allocate memory according to the rules of the active lowering. 231 /// 232 /// \param CG - if non-null, will be updated for the new call 233 Value *emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const; 234 235 /// Deallocate memory according to the rules of the active lowering. 236 /// 237 /// \param CG - if non-null, will be updated for the new call 238 void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const; 239 240 Shape() = default; 241 explicit Shape(Function &F) { buildFrom(F); } 242 void buildFrom(Function &F); 243}; 244 245void buildCoroutineFrame(Function &F, Shape &Shape); 246 247} // End namespace coro. 248} // End namespace llvm 249 250#endif 251