|
Enzyme main
|
Classes | |
| struct | Node |
Typedefs | |
| using | Graph = std::map<Node, std::set<Node>> |
Functions | |
| bool | is_use_directly_needed_in_reverse (const GradientUtils *gutils, const llvm::Value *val, DerivativeMode mode, const llvm::Instruction *user, const llvm::SmallPtrSetImpl< llvm::BasicBlock * > &oldUnreachable, QueryType shadow, bool *recursiveUse=nullptr) |
| Determine if a value is needed directly to compute the adjoint of the given instruction user. | |
| template<QueryType VT, bool OneLevel = false> | |
| bool | is_value_needed_in_reverse (const GradientUtils *gutils, const llvm::Value *inst, DerivativeMode mode, std::map< UsageKey, bool > &seen, const llvm::SmallPtrSetImpl< llvm::BasicBlock * > &oldUnreachable) |
| template<QueryType VT> | |
| static bool | is_value_needed_in_reverse (const GradientUtils *gutils, const llvm::Value *inst, DerivativeMode mode, const llvm::SmallPtrSetImpl< llvm::BasicBlock * > &oldUnreachable) |
| void | dump (std::map< Node, std::set< Node > > &G) |
| void | bfs (const std::map< Node, std::set< Node > > &G, const llvm::SetVector< llvm::Value * > &Recompute, std::map< Node, Node > &parent) |
| int | cmpLoopNest (llvm::Loop *prev, llvm::Loop *next) |
| void | minCut (const llvm::DataLayout &DL, llvm::LoopInfo &OrigLI, const llvm::SetVector< llvm::Value * > &Recomputes, const llvm::SetVector< llvm::Value * > &Intermediates, llvm::SetVector< llvm::Value * > &Required, llvm::SetVector< llvm::Value * > &MinReq, const GradientUtils *gutils, llvm::TargetLibraryInfo &TLI) |
| __attribute__ ((always_inline)) static inline void forEachDirectInsertUser(llvm | |
| bool | callShouldNotUseDerivative (const GradientUtils *gutils, llvm::CallBase &orig, QueryType qtype, const llvm::Value *val) |
| Return whether or not this is a constant and should use reverse pass. | |
| using DifferentialUseAnalysis::Graph = std::map<Node, std::set<Node>> |
Definition at line 497 of file DifferentialUseAnalysis.h.
| DifferentialUseAnalysis::__attribute__ | ( | (always_inline) | ) |
Definition at line 519 of file DifferentialUseAnalysis.h.
References is_value_needed_in_reverse(), GradientUtils::isConstantValue(), GradientUtils::mode, and GradientUtils::notForAnalysis.
| void DifferentialUseAnalysis::bfs | ( | const std::map< Node, std::set< Node > > & | G, |
| const llvm::SetVector< llvm::Value * > & | Recompute, | ||
| std::map< Node, Node > & | parent ) |
| bool DifferentialUseAnalysis::callShouldNotUseDerivative | ( | const GradientUtils * | gutils, |
| llvm::CallBase & | orig, | ||
| QueryType | qtype, | ||
| const llvm::Value * | val ) |
Return whether or not this is a constant and should use reverse pass.
Referenced by AdjointGenerator::visitCallInst().
| int DifferentialUseAnalysis::cmpLoopNest | ( | llvm::Loop * | prev, |
| llvm::Loop * | next ) |
| bool DifferentialUseAnalysis::is_use_directly_needed_in_reverse | ( | const GradientUtils * | gutils, |
| const llvm::Value * | val, | ||
| DerivativeMode | mode, | ||
| const llvm::Instruction * | user, | ||
| const llvm::SmallPtrSetImpl< llvm::BasicBlock * > & | oldUnreachable, | ||
| QueryType | shadow, | ||
| bool * | recursiveUse = nullptr ) |
Determine if a value is needed directly to compute the adjoint of the given instruction user.
shadow denotes whether we are considering the shadow of the value (shadow=true) or the primal of the value (shadow=false). Recursive use is only usable in shadow mode.
Referenced by is_value_needed_in_reverse().
|
inlinestatic |
Definition at line 471 of file DifferentialUseAnalysis.h.
References is_value_needed_in_reverse(), Primal, and Shadow.
|
inline |
Definition at line 95 of file DifferentialUseAnalysis.h.
References TypeResults::allFloat(), TypeResults::anyPointer(), EnzymePrintDiffUse, ForwardModeError, getFuncNameFromCall(), is_use_directly_needed_in_reverse(), is_value_needed_in_reverse(), GradientUtils::isConstantValue(), isIntelSubscriptIntrinsic(), GradientUtils::mode, GradientUtils::needsCacheWholeAllocation(), GradientUtils::oldFunc, Primal, GradientUtils::rematerializableAllocations, Shadow, ShadowByConstPrimal, to_string(), and GradientUtils::TR.
Referenced by __attribute__(), AdjointGenerator::forwardModeInvertedPointerFallback(), AdjointGenerator::handleKnownCallDerivatives(), is_value_needed_in_reverse(), is_value_needed_in_reverse(), AdjointGenerator::recursivelyHandleSubfunction(), AdjointGenerator::visitCallInst(), and AdjointGenerator::visitLoadLike().
| void DifferentialUseAnalysis::minCut | ( | const llvm::DataLayout & | DL, |
| llvm::LoopInfo & | OrigLI, | ||
| const llvm::SetVector< llvm::Value * > & | Recomputes, | ||
| const llvm::SetVector< llvm::Value * > & | Intermediates, | ||
| llvm::SetVector< llvm::Value * > & | Required, | ||
| llvm::SetVector< llvm::Value * > & | MinReq, | ||
| const GradientUtils * | gutils, | ||
| llvm::TargetLibraryInfo & | TLI ) |