From ba02e80eafbd765e4da20a8817fc1cf2c76ee71c Mon Sep 17 00:00:00 2001 From: Henry Zongaro Date: Tue, 12 Nov 2024 10:34:04 -0800 Subject: [PATCH 1/4] Enable dumping of method trees in Store Sinking The Store Sinking optimization contain calls to dumpMethodTrees that were explicitly disabled. This hampers debugging of the optimization. This change dumps the method trees before and after the optimization if tracing of the optimization has been enabled. Signed-off-by: Henry Zongaro --- compiler/optimizer/SinkStores.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/compiler/optimizer/SinkStores.cpp b/compiler/optimizer/SinkStores.cpp index dd3be6bd40d..a828664b750 100644 --- a/compiler/optimizer/SinkStores.cpp +++ b/compiler/optimizer/SinkStores.cpp @@ -516,7 +516,7 @@ int32_t TR_GeneralSinkStores::perform() int32_t TR_SinkStores::performStoreSinking() { - if (0 && trace()) + if (trace()) { comp()->dumpMethodTrees("Before Store Sinking"); } @@ -659,7 +659,7 @@ int32_t TR_SinkStores::performStoreSinking() // if we found any, transform the code now doSinking(); - if (0 && trace()) + if (trace()) { comp()->dumpMethodTrees("After Store Sinking"); } From d9735f51c492e66ab9159c30628feb28b1245ab4 Mon Sep 17 00:00:00 2001 From: Henry Zongaro Date: Thu, 14 Nov 2024 12:25:33 -0800 Subject: [PATCH 2/4] Move UnsafeSubexpressionRemoval to its own file UnsafeSubexpressionRemoval is currently used in IsolatedStoreElimination where it is defined. The Store Sinking optimization requires the utility provided by UnsafeSubexpressionRemoval, so this change moves the declaration and implementation of that class into there own files. Signed-off-by: Henry Zongaro --- compiler/optimizer/CMakeLists.txt | 1 + .../optimizer/IsolatedStoreElimination.cpp | 200 +----------------- compiler/optimizer/SinkStores.cpp | 1 + compiler/optimizer/SinkStores.hpp | 1 + .../optimizer/UnsafeSubexpressionRemover.cpp | 196 +++++++++++++++++ .../optimizer/UnsafeSubexpressionRemover.hpp | 168 +++++++++++++++ 6 files changed, 370 insertions(+), 197 deletions(-) create mode 100644 compiler/optimizer/UnsafeSubexpressionRemover.cpp create mode 100644 compiler/optimizer/UnsafeSubexpressionRemover.hpp diff --git a/compiler/optimizer/CMakeLists.txt b/compiler/optimizer/CMakeLists.txt index f52a65c0584..43647dca3c3 100644 --- a/compiler/optimizer/CMakeLists.txt +++ b/compiler/optimizer/CMakeLists.txt @@ -104,6 +104,7 @@ compiler_library(optimizer ${CMAKE_CURRENT_LIST_DIR}/TrivialDeadBlockRemover.cpp ${CMAKE_CURRENT_LIST_DIR}/FEInliner.cpp ${CMAKE_CURRENT_LIST_DIR}/BenefitInliner.cpp + ${CMAKE_CURRENT_LIST_DIR}/UnsafeSubexpressionRemover.cpp ${CMAKE_CURRENT_LIST_DIR}/abstractinterpreter/AbsValue.cpp ${CMAKE_CURRENT_LIST_DIR}/abstractinterpreter/AbsOpStack.cpp ${CMAKE_CURRENT_LIST_DIR}/abstractinterpreter/AbsOpArray.cpp diff --git a/compiler/optimizer/IsolatedStoreElimination.cpp b/compiler/optimizer/IsolatedStoreElimination.cpp index 8907dbd6320..abefcbc3250 100644 --- a/compiler/optimizer/IsolatedStoreElimination.cpp +++ b/compiler/optimizer/IsolatedStoreElimination.cpp @@ -58,6 +58,7 @@ #include "optimizer/Optimizer.hpp" #include "optimizer/Structure.hpp" #include "optimizer/TransformUtil.hpp" +#include "optimizer/UnsafeSubexpressionRemover.hpp" #include "optimizer/UseDefInfo.hpp" #include "optimizer/VPConstraint.hpp" #include "ras/Debug.hpp" @@ -77,118 +78,6 @@ TR_IsolatedStoreElimination::TR_IsolatedStoreElimination(TR::OptimizationManager { } -struct UnsafeSubexpressionRemoval // PR69613 - { - TR_IsolatedStoreElimination *_ise; - TR_BitVector _visitedNodes; - TR_BitVector _unsafeNodes; // Nodes whose mere evaluation is not safe - - TR::Compilation *comp(){ return _ise->comp(); } - bool trace(){ return _ise->trace(); } - - bool isVisited(TR::Node *node) { return _visitedNodes.isSet(node->getGlobalIndex()); } - bool isUnsafe(TR::Node *node) - { - TR_ASSERT(isVisited(node), "Cannot call isUnsafe on n%dn before anchorSafeChildrenOfUnsafeNodes", node->getGlobalIndex()); - return _unsafeNodes.isSet(node->getGlobalIndex()); - } - - UnsafeSubexpressionRemoval(TR_IsolatedStoreElimination *ise): - _ise(ise), - _visitedNodes(ise->comp()->getNodeCount(), comp()->trMemory(), stackAlloc), - _unsafeNodes (ise->comp()->getNodeCount(), comp()->trMemory(), stackAlloc) - {} - - void recordDeadUse(TR::Node *node) - { - _visitedNodes.set(node->getGlobalIndex()); - _unsafeNodes .set(node->getGlobalIndex()); - } - - void anchorSafeChildrenOfUnsafeNodes(TR::Node *node, TR::TreeTop *anchorPoint) - { - if (isVisited(node)) - return; - else - _visitedNodes.set(node->getGlobalIndex()); - - // - // Design note: we don't decrement refcounts in here. Conceptually, - // anchoring and decrementing are independent of each other. More - // pragmatically, you end up at with a well-defined state when this - // function finishes: if the top-level node is unsafe, the caller must - // unhook it and call recursivelyDecReferenceCount, which will do the - // right thing. Otherwise, no decrementing is necessary anyway. - // - // A rule of thumb to remember is that this function will do nothing if - // the node is safe. If you call this, then find !isUnsafe(node), then - // the trees did not change. - // - - // Propagate unsafeness from children, unless we've already done it - // - for (int32_t i = 0; i < node->getNumChildren(); ++i) - { - TR::Node *child = node->getChild(i); - anchorSafeChildrenOfUnsafeNodes(child, anchorPoint); - if (isUnsafe(child)) - { - _unsafeNodes.set(node->getGlobalIndex()); - if (trace()) - { - TR::Node *child = node->getChild(i); - traceMsg(comp(), " (Marked %s n%dn unsafe due to dead child #%d %s n%dn)\n", - node->getOpCode().getName(), node->getGlobalIndex(), i, - child->getOpCode().getName(), child->getGlobalIndex()); - } - } - } - - // If node is safe, all its descendant nodes must be safe. Nothing to do. - // - // If node is unsafe, anchor all its safe children. Its unsafe children, - // at this point, must already have taken care of their own safe - // subexpressions recursively. - // - // Note that this could anchor some nodes multiple times, for complex - // subexpressions with commoning. We could make some feeble attempts to - // prevent that, but let's just leave it to deadTreesElimination. - // - if (isUnsafe(node)) - { - for (int32_t i = 0; i < node->getNumChildren(); ++i) - { - TR::Node *child = node->getChild(i); - bool didIt = anchorIfSafe(child, anchorPoint); - if (didIt && trace()) - { - traceMsg(comp(), " - Anchored child #%d %s n%d of %s n%d\n", - i, - child->getOpCode().getName(), child->getGlobalIndex(), - node->getOpCode().getName(), node->getGlobalIndex()); - } - } - } - } - - bool anchorIfSafe(TR::Node *node, TR::TreeTop *anchorPoint) - { - if (!isVisited(node)) - anchorSafeChildrenOfUnsafeNodes(node, anchorPoint); - - if (isUnsafe(node)) - { - return false; - } - else - { - anchorPoint->insertBefore(TR::TreeTop::create(comp(), TR::Node::create(TR::treetop, 1, node))); - return true; - } - } - - }; - int32_t TR_IsolatedStoreElimination::perform() { TR::StackMemoryRegion stackMemoryRegion(*trMemory()); @@ -233,7 +122,7 @@ int32_t TR_IsolatedStoreElimination::perform() // example, if the unsafe node dereferences a load of a dead pointer // whose store got deleted, then that dereference could crash at runtime. // - UnsafeSubexpressionRemoval usr(this); + OMR::UnsafeSubexpressionRemover usr(this); for (uint32_t groupIndex = 0; groupIndex < _groupsOfStoreNodes->size(); groupIndex++) { TR_BitVector *groupOfStores = _groupsOfStoreNodes->element(groupIndex); @@ -291,90 +180,7 @@ int32_t TR_IsolatedStoreElimination::perform() _trivialDefs->reset(index); // It's not a trivial def if we're handling it as a group. If we try to do both, BOOM! - // Time to delete the store. This is more complex than it sounds. - // - // Generally, we want to replace the store with a treetop (ie. a - // no-op). However, this is complicated by several factors, in - // order of increasing subtlety and complexity: - // - // 1. Stores can have multiple children -- as many as three for - // an iwrtbar. If the store is turning into a nop, we must - // anchor all the other children. - // 2. Stores can appear under other nodes like CHKs and - // compressedRefs. Particularly for the CHKs, we must - // perform the appropriate checks despite eliminating the - // store, and so we must leave the store node in the - // appropriate shape so its parents will do the right thing. - // 3. Some expressions are unsafe to evaluate after stores are - // eliminated, and those expressions themselves must NOT be - // anchored. - // - // In these transformations, we adopt the convention that we - // retain the original store whenever possible, modifying it - // in-place, and we retain its first child if possible. These - // conventions are sometimes required, and so we just adopt them - // universally for the sake of consistency, efficiency, and - // readability in the jit logs. - - // Anchor all children but the first - // - for (int32_t i = 1; i < node->getNumChildren(); i++) - { - TR::Node *child = node->getChild(i); - usr.anchorIfSafe(child, treeTop); - child->recursivelyDecReferenceCount(); - } - - node->setNumChildren(1); - // - // Note that the node at this point could be half-baked: an indirect - // store or write barrier with one child. This is an intermediate - // state, and further surgery is required for correctness. - - // Eliminate the store - // - if (treeTop->getNode()->getOpCode().isSpineCheck() && treeTop->getNode()->getFirstChild() == node) - { - // SpineCHKs are special. We don't want to require all the - // opts and codegens to do the right thing with a PassThrough - // child. Hence, we'll turn the tree into a const, which is - // a state the opts and codegens must cope with anyway, - // because if this tree had been a load instead of a store, - // the optimizer could have legitimately turned it into a const. - // - TR::Node *child = node->getChild(0); - usr.anchorIfSafe(child, treeTop); - child->recursivelyDecReferenceCount(); - TR::Node::recreate(node, comp()->il.opCodeForConst(node->getSymbolReference()->getSymbol()->getDataType())); - node->setFlags(0); - node->setNumChildren(0); - } - else - { - // Vandalize the first child if it's unsafe to evaluate - // - TR::Node *child = node->getFirstChild(); - usr.anchorSafeChildrenOfUnsafeNodes(child, treeTop); - if (usr.isUnsafe(child)) - { - // Must eradicate the dead expression. Can't leave it - // anchored because it's not safe to evaluate. - // - child->recursivelyDecReferenceCount(); - TR::Node *dummyChild = node->setAndIncChild(0, TR::Node::createConstDead(child, TR::Int32, 0xbad1 /* eyecatcher */)); - if (trace()) - traceMsg(comp(), " - replace unsafe child %s n%dn with dummy %s n%dn\n", - child->getOpCode().getName(), child->getGlobalIndex(), - dummyChild->getOpCode().getName(), dummyChild->getGlobalIndex()); - } - - // Set opcode to nop - // - if (node->getReferenceCount() >= 1) - TR::Node::recreate(node, TR::PassThrough); - else - TR::Node::recreate(node, TR::treetop); - } + usr.eliminateStore(treeTop); node->setFlags(0); eliminatedStore = true; diff --git a/compiler/optimizer/SinkStores.cpp b/compiler/optimizer/SinkStores.cpp index a828664b750..dadcf8db6a5 100644 --- a/compiler/optimizer/SinkStores.cpp +++ b/compiler/optimizer/SinkStores.cpp @@ -63,6 +63,7 @@ #include "optimizer/Optimizations.hpp" #include "optimizer/Optimizer.hpp" #include "optimizer/Structure.hpp" +#include "optimizer/UnsafeSubexpressionRemover.hpp" #define OPT_DETAILS "O^O SINK STORES: " diff --git a/compiler/optimizer/SinkStores.hpp b/compiler/optimizer/SinkStores.hpp index 19b58ea5096..ac14f24d2e9 100644 --- a/compiler/optimizer/SinkStores.hpp +++ b/compiler/optimizer/SinkStores.hpp @@ -39,6 +39,7 @@ class TR_LiveVariableInformation; class TR_Liveness; class TR_MovableStore; class TR_SinkStores; +namespace OMR { struct UnsafeSubexpressionRemover; } namespace TR { class Block; } namespace TR { class CFGEdge; } namespace TR { class CFGNode; } diff --git a/compiler/optimizer/UnsafeSubexpressionRemover.cpp b/compiler/optimizer/UnsafeSubexpressionRemover.cpp new file mode 100644 index 00000000000..71accc5437e --- /dev/null +++ b/compiler/optimizer/UnsafeSubexpressionRemover.cpp @@ -0,0 +1,196 @@ +/******************************************************************************* + * Copyright IBM Corp. and others 2014 + * + * This program and the accompanying materials are made available under + * the terms of the Eclipse Public License 2.0 which accompanies this + * distribution and is available at https://www.eclipse.org/legal/epl-2.0/ + * or the Apache License, Version 2.0 which accompanies this distribution + * and is available at https://www.apache.org/licenses/LICENSE-2.0. + * + * This Source Code may also be made available under the following Secondary + * Licenses when the conditions for such availability set forth in the + * Eclipse Public License, v. 2.0 are satisfied: GNU General Public License, + * version 2 with the GNU Classpath Exception [1] and GNU General Public + * License, version 2 with the OpenJDK Assembly Exception [2]. + * + * [1] https://www.gnu.org/software/classpath/license.html + * [2] https://openjdk.org/legal/assembly-exception.html + * + * SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0 OR GPL-2.0-only WITH OpenJDK-assembly-exception-1.0 + *******************************************************************************/ + +#include "optimizer/UnsafeSubexpressionRemover.hpp" + +void +OMR::UnsafeSubexpressionRemover::anchorSafeChildrenOfUnsafeNodes(TR::Node *node, TR::TreeTop *anchorPoint) + { + if (isVisited(node)) + return; + else + _visitedNodes.set(node->getGlobalIndex()); + + // + // Design note: we don't decrement refcounts in here. Conceptually, + // anchoring and decrementing are independent of each other. More + // pragmatically, you end up at with a well-defined state when this + // function finishes: if the top-level node is unsafe, the caller must + // unhook it and call recursivelyDecReferenceCount, which will do the + // right thing. Otherwise, no decrementing is necessary anyway. + // + // A rule of thumb to remember is that this function will do nothing if + // the node is safe. If you call this, then find !isUnsafe(node), then + // the trees did not change. + // + + // Propagate unsafeness from children, unless we've already done it + // + for (int32_t i = 0; i < node->getNumChildren(); ++i) + { + TR::Node *child = node->getChild(i); + anchorSafeChildrenOfUnsafeNodes(child, anchorPoint); + if (isUnsafe(child)) + { + _unsafeNodes.set(node->getGlobalIndex()); + if (trace()) + { + TR::Node *child = node->getChild(i); + traceMsg(comp(), " (Marked %s n%dn unsafe due to dead child #%d %s n%dn)\n", + node->getOpCode().getName(), node->getGlobalIndex(), i, + child->getOpCode().getName(), child->getGlobalIndex()); + } + } + } + + // If node is safe, all its descendant nodes must be safe. Nothing to do. + // + // If node is unsafe, anchor all its safe children. Its unsafe children, + // at this point, must already have taken care of their own safe + // subexpressions recursively. + // + // Note that this could anchor some nodes multiple times, for complex + // subexpressions with commoning. We could make some feeble attempts to + // prevent that, but let's just leave it to deadTreesElimination. + // + if (isUnsafe(node)) + { + for (int32_t i = 0; i < node->getNumChildren(); ++i) + { + TR::Node *child = node->getChild(i); + bool didIt = anchorIfSafe(child, anchorPoint); + if (didIt && trace()) + { + traceMsg(comp(), " - Anchored child #%d %s n%d of %s n%d\n", + i, + child->getOpCode().getName(), child->getGlobalIndex(), + node->getOpCode().getName(), node->getGlobalIndex()); + } + } + } + } + +bool +OMR::UnsafeSubexpressionRemover::anchorIfSafe(TR::Node *node, TR::TreeTop *anchorPoint) + { + if (!isVisited(node)) + anchorSafeChildrenOfUnsafeNodes(node, anchorPoint); + + if (isUnsafe(node)) + { + return false; + } + else + { + anchorPoint->insertBefore(TR::TreeTop::create(comp(), TR::Node::create(TR::treetop, 1, node))); + return true; + } + } + +void +OMR::UnsafeSubexpressionRemover::eliminateStore(TR::TreeTop *treeTop) + { + // Time to delete the store. This is more complex than it sounds. + // + // Generally, we want to replace the store with a treetop (ie. a + // no-op). However, this is complicated by several factors, in + // order of increasing subtlety and complexity: + // + // 1. Stores can have multiple children -- as many as three for + // an iwrtbar. If the store is turning into a nop, we must + // anchor all the other children. + // 2. Stores can appear under other nodes like CHKs and + // compressedRefs. Particularly for the CHKs, we must + // perform the appropriate checks despite eliminating the + // store, and so we must leave the store node in the + // appropriate shape so its parents will do the right thing. + // 3. Some expressions are unsafe to evaluate after stores are + // eliminated, and those expressions themselves must NOT be + // anchored. + // + // In these transformations, we adopt the convention that we + // retain the original store whenever possible, modifying it + // in-place, and we retain its first child if possible. These + // conventions are sometimes required, and so we just adopt them + // universally for the sake of consistency, efficiency, and + // readability in the jit logs. + TR::Node *node = treeTop->getNode(); + + // Anchor all children but the first + // + for (int32_t i = 1; i < node->getNumChildren(); i++) + { + TR::Node *child = node->getChild(i); + anchorIfSafe(child, treeTop); + child->recursivelyDecReferenceCount(); + } + + node->setNumChildren(1); + // + // Note that the node at this point could be half-baked: an indirect + // store or write barrier with one child. This is an intermediate + // state, and further surgery is required for correctness. + + // Eliminate the store + // + if (treeTop->getNode()->getOpCode().isSpineCheck() && treeTop->getNode()->getFirstChild() == node) + { + // SpineCHKs are special. We don't want to require all the + // opts and codegens to do the right thing with a PassThrough + // child. Hence, we'll turn the tree into a const, which is + // a state the opts and codegens must cope with anyway, + // because if this tree had been a load instead of a store, + // the optimizer could have legitimately turned it into a const. + // + TR::Node *child = node->getChild(0); + anchorIfSafe(child, treeTop); + child->recursivelyDecReferenceCount(); + TR::Node::recreate(node, comp()->il.opCodeForConst(node->getSymbolReference()->getSymbol()->getDataType())); + node->setFlags(0); + node->setNumChildren(0); + } + else + { + // Vandalize the first child if it's unsafe to evaluate + // + TR::Node *child = node->getFirstChild(); + anchorSafeChildrenOfUnsafeNodes(child, treeTop); + if (isUnsafe(child)) + { + // Must eradicate the dead expression. Can't leave it + // anchored because it's not safe to evaluate. + // + child->recursivelyDecReferenceCount(); + TR::Node *dummyChild = node->setAndIncChild(0, TR::Node::createConstDead(child, TR::Int32, 0xbad1 /* eyecatcher */)); + if (trace()) + traceMsg(comp(), " - replace unsafe child %s n%dn with dummy %s n%dn\n", + child->getOpCode().getName(), child->getGlobalIndex(), + dummyChild->getOpCode().getName(), dummyChild->getGlobalIndex()); + } + + // Set opcode to nop + // + if (node->getReferenceCount() >= 1) + TR::Node::recreate(node, TR::PassThrough); + else + TR::Node::recreate(node, TR::treetop); + } + } diff --git a/compiler/optimizer/UnsafeSubexpressionRemover.hpp b/compiler/optimizer/UnsafeSubexpressionRemover.hpp new file mode 100644 index 00000000000..d621ea9ab6d --- /dev/null +++ b/compiler/optimizer/UnsafeSubexpressionRemover.hpp @@ -0,0 +1,168 @@ +/******************************************************************************* + * Copyright IBM Corp. and others 2014 + * + * This program and the accompanying materials are made available under + * the terms of the Eclipse Public License 2.0 which accompanies this + * distribution and is available at https://www.eclipse.org/legal/epl-2.0/ + * or the Apache License, Version 2.0 which accompanies this distribution + * and is available at https://www.apache.org/licenses/LICENSE-2.0. + * + * This Source Code may also be made available under the following Secondary + * Licenses when the conditions for such availability set forth in the + * Eclipse Public License, v. 2.0 are satisfied: GNU General Public License, + * version 2 with the GNU Classpath Exception [1] and GNU General Public + * License, version 2 with the OpenJDK Assembly Exception [2]. + * + * [1] https://www.gnu.org/software/classpath/license.html + * [2] https://openjdk.org/legal/assembly-exception.html + * + * SPDX-License-Identifier: EPL-2.0 OR Apache-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0 OR GPL-2.0-only WITH OpenJDK-assembly-exception-1.0 + *******************************************************************************/ + +#ifndef UNSAFESUBEXPRESSIONREMOVER_INCL +#define UNSAFESUBEXPRESSIONREMOVER_INCL + +#include "compile/Compilation.hpp" +#include "infra/BitVector.hpp" +#include "il/Node.hpp" +#include "il/Node_inlines.hpp" +#include "optimizer/Optimization.hpp" +#include "optimizer/Optimization_inlines.hpp" + +namespace TR { class TreeTop; } + +namespace OMR + { + /** + * Utility class that will track any \ref TR::Node that must not be evaluated + * following some transformation. It can then restructure trees that contain + * such nodes to anchor the parts of the tree that are still needed to preserve + * evaluation order. + * + * The parent optimization that has made the transformation that results in + * nodes that are unsafe to evaluate is responsible for marking such nodes as + * unsafe via a call to \ref UnsafeSubexpressionRemover::recordDeadUse. + * After all such nodes have been marked, the safe parts of the tree are + * anchored and usafe parts vandalized via a call to + * \ref UnsafeSubexpressionRemover::eliminateStore. + * + * Note that this class is only used today in situations where the problematic + * transformation arose beneath a \TR::Node representing a store operation - + * hence the name of the \c eliminateStore method. That method will need to + * be generalized if this might apply in more situations in the future. + */ + struct UnsafeSubexpressionRemover + { + /** + * Optimization that is in effect. + */ + TR::Optimization *_opt; + + /** + * Bit vector used to track nodes that have already been visited. + */ + TR_BitVector _visitedNodes; + + /** + * Bit vector used to track nodes that are considered unsafe to evaluate. + */ + TR_BitVector _unsafeNodes; // Nodes whose mere evaluation is not safe + + /** + * Construct an instance of \c UnsafeSubexpressionRemover + * + * \param[in] opt The \ref TR::Optimization that is currently running. + */ + UnsafeSubexpressionRemover(TR::Optimization *opt): + _opt(opt), + _visitedNodes(opt->comp()->getNodeCount(), comp()->trMemory(), stackAlloc), + _unsafeNodes (opt->comp()->getNodeCount(), comp()->trMemory(), stackAlloc) + {} + + /** + * Mark the specified \ref TR::Node as unsafe to evaluate. + * + * \param[in] node A \c TR::Node instance + */ + void recordDeadUse(TR::Node *node) + { + _visitedNodes.set(node->getGlobalIndex()); + _unsafeNodes .set(node->getGlobalIndex()); + } + + /** + * Eliminate a store operation anchored at the specified \ref TR::TreeTop, + * traversing its children looking for any safe trees that will need to be + * anchored, and eliminating any unsafe references that must not be + * evaluated. + * + * \param[in] treeTop The \c TR::TreeTop that anchors a store operation + * that is to be eliminated. This will be used as the + * anchor point before which any safe descendants of the + * store will themselves be anchored + */ + void eliminateStore(TR::TreeTop *treeTop); + + private: + + /** + * Retrieve the \ref TR::Compilation object that is currently active + * + * \returns The \c TR::Compilation object + */ + TR::Compilation *comp() { return _opt->comp(); } + + /** + * Indicates whether trace logging is enabled for the current optimization. + * + * \returns True if trace logging is enabled for the current optimization; + * false, otherwise. + */ + bool trace() { return _opt->trace(); } + + /** + * Test whether the specified \ref TR::Node has already been visited. + * + * \param[in] node A \c TR::Node instance + * + * \returns true if \c node has already been visited; false, otherwise + */ + bool isVisited(TR::Node *node) { return _visitedNodes.isSet(node->getGlobalIndex()); } + + /** + * Test whether the specified \ref TR::Node has been marked as unsafe to evaluate. + * + * \param[in] node A \c TR::Node instance + * + * \returns true if \c node has been marked as unsafe to evaluate; false, otherwise + */ + bool isUnsafe(TR::Node *node) + { + TR_ASSERT(isVisited(node), "Cannot call isUnsafe on n%dn before anchorSafeChildrenOfUnsafeNodes", node->getGlobalIndex()); + return _unsafeNodes.isSet(node->getGlobalIndex()); + } + + /** + * Traverse a tree beginning at the specified \ref TR::Node, marking the + * node as unsafe if any of its children were unsafe. Then iterate over the + * children of any node that was found to be unsafe during that reversal, + * and anchor any child that is safe before the specified \ref TR::TreeTop. + * + * \param[in] node An unsafe \c TR::Node whose safe children should be anchored + * \param[in] anchorPoint A \c TR::TreeTop before which any safe children will + * be anchored + */ + void anchorSafeChildrenOfUnsafeNodes(TR::Node *node, TR::TreeTop *anchorPoint); + + /** + * If the specified \ref TR::Node is safe to evaluate, anchor it before the + * anchor point specified via the \ref TR::TreeTop argument. + * + * \param[in] node A \c TR::Node that will be anchored if it is safe to evaluate + * \param[in] anchorPoint A \c TR::TreeTop before which to anchor the specified + * node, if necessary + */ + bool anchorIfSafe(TR::Node *node, TR::TreeTop *anchorPoint); + }; + } +#endif From ce9af00e77e29176a4eae2c7a839758c05e5e4ca Mon Sep 17 00:00:00 2001 From: Henry Zongaro Date: Thu, 21 Nov 2024 06:31:29 -0800 Subject: [PATCH 3/4] Remove unused nextStoreWasMoved parameter of sinkStorePlacement The nextStoreWasMoved parameter of sinkStorePlacement is never referenced. This change removes this unused parameter. Signed-off-by: Henry Zongaro --- compiler/optimizer/SinkStores.cpp | 27 +++++++++++---------------- compiler/optimizer/SinkStores.hpp | 4 ++-- 2 files changed, 13 insertions(+), 18 deletions(-) diff --git a/compiler/optimizer/SinkStores.cpp b/compiler/optimizer/SinkStores.cpp index dadcf8db6a5..1a6bc0295c3 100644 --- a/compiler/optimizer/SinkStores.cpp +++ b/compiler/optimizer/SinkStores.cpp @@ -405,7 +405,7 @@ void TR_SinkStores::recordPlacementForDefAlongEdge(TR_EdgeStorePlacement *edgePl (*edgeInfo->_symbolsUsedOrKilled) |= (*_usedSymbolsToMove); (*edgeInfo->_symbolsUsedOrKilled) |= (*_killedSymbolsToMove); _allEdgePlacements.add(edgePlacement); - optimizer()->setRequestOptimization(OMR::basicBlockOrdering, true); + optimizer()->setRequestOptimization(OMR::basicBlockOrdering, true); if (_placementsForEdgesToBlock[toBlockNumber] == NULL) _placementsForEdgesToBlock[toBlockNumber] = new (trStackMemory()) TR_EdgeStorePlacementList(trMemory()); @@ -665,7 +665,7 @@ int32_t TR_SinkStores::performStoreSinking() comp()->dumpMethodTrees("After Store Sinking"); } - } // scope of the stack memory region + } // scope of the stack memory region optimizer()->enableAllLocalOpts(); @@ -915,7 +915,7 @@ void TR_SinkStores::lookForSinkableStores() traceMsg(comp(), " marking store [" POINTER_PRINTF_FORMAT "] as movable with temp because it has commoned reference used sym %d\n", store->_useOrKillInfo->_tt->getNode(), killedSymIdx); } } - storeElement = storeElement->getNextElement(); + storeElement = storeElement->getNextElement(); } } @@ -1143,7 +1143,6 @@ void TR_SinkStores::lookForSinkableStores() // now go through the list of potentially movable stores in this extended basic // block (a subset of the useOrKillInfoList) and try to sink the ones that are still movable - bool movedPreviousStore = false; while (!useOrKillInfoList.isEmpty()) { TR_UseOrKillInfo *useOrKill = useOrKillInfoList.popHead(); @@ -1203,17 +1202,15 @@ void TR_SinkStores::lookForSinkableStores() { int32_t symIdx = store->_symIdx; TR_ASSERT(_killedSymbolsToMove->isSet(symIdx), "_killedSymbolsToMove is inconsistent with potentiallyMovableStores entry for this store (symIdx=%d)!", symIdx); - if (sinkStorePlacement(store, - movedPreviousStore)) + if (sinkStorePlacement(store)) { if (trace()) traceMsg(comp(), " Successfully sunk past a non-live path\n"); - movedStore = movedPreviousStore = true; + movedStore = true; _numRemovedStores++; } else { - movedPreviousStore = false; if (trace()) traceMsg(comp(), " Didn't sink this store\n"); } @@ -1225,7 +1222,6 @@ void TR_SinkStores::lookForSinkableStores() } else { - movedPreviousStore = false; if (trace()) traceMsg(comp(), " is not movable (isStore = %s)\n", isStore ? "true" : "false"); // if a once movable store was later marked as unmovable must remove it from the list now @@ -1616,7 +1612,7 @@ void TR_SinkStores::placeStoresAlongEdges(List & stores, Li from->getLastRealTreeTop()->getNode()->getOpCode().isIf()) { TR::TreeTop *prevExit = placementBlock->getEntry()->getPrevTreeTop(); - TR::TreeTop *nextEntry = placementBlock->getExit()->getNextTreeTop(); + TR::TreeTop *nextEntry = placementBlock->getExit()->getNextTreeTop(); TR::TreeTop *newExit = from->getExit(); TR::TreeTop *newEntry = newExit->getNextTreeTop(); prevExit->join(nextEntry); @@ -1874,11 +1870,11 @@ bool TR_SinkStores::treeIsSinkableStore(TR::Node *node, bool sinkIndirectLoads, node->getOpCode().isLoadVarDirect() && node->getSymbolReference()->getSymbol()->isStatic() && (underCommonedNode || node->getReferenceCount() > 1)) - { - if (trace()) + { + if (trace()) traceMsg(comp(), " commoned static load store failure: %p\n", node); - return false; - } + return false; + } int32_t currentDepth = ++depth; bool previouslyCommoned = underCommonedNode; @@ -2287,8 +2283,7 @@ bool TR_SinkStores::storeCanMoveThroughBlock(TR_BitVector *blockKilledSet, TR_Bi } -bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore, - bool nextStoreWasMoved) +bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore) { TR::Block *sourceBlock = movableStore->_useOrKillInfo->_block; TR::TreeTop *tt = movableStore->_useOrKillInfo->_tt; diff --git a/compiler/optimizer/SinkStores.hpp b/compiler/optimizer/SinkStores.hpp index ac14f24d2e9..fd92ac35acb 100644 --- a/compiler/optimizer/SinkStores.hpp +++ b/compiler/optimizer/SinkStores.hpp @@ -257,7 +257,7 @@ class TR_SinkStores : public TR::Optimization bool &isLoadStatic, vcount_t &treeVisitCount, vcount_t &highVisitCount) = 0; - virtual bool sinkStorePlacement(TR_MovableStore *store, bool nextStoreWasMoved) = 0; + virtual bool sinkStorePlacement(TR_MovableStore *store) = 0; void coalesceSimilarEdgePlacements(); void placeStoresAlongEdges(List & stores, List & edges); void placeStoresInBlock(List & stores, TR::Block *placementBlock); @@ -375,7 +375,7 @@ class TR_GeneralSinkStores : public TR_SinkStores bool &isLoadStatic, vcount_t &treeVisitCount, vcount_t &highVisitCount); - virtual bool sinkStorePlacement(TR_MovableStore *store, bool nextStoreWasMoved); + virtual bool sinkStorePlacement(TR_MovableStore *store); }; // current call store with commoned load with be moved with temp because current From 4e68ec72c997524051762be81dae8789bd39213e Mon Sep 17 00:00:00 2001 From: Henry Zongaro Date: Mon, 25 Nov 2024 13:08:33 -0800 Subject: [PATCH 4/4] Handle unsafe loads in trees left behind by Store Sinking Store Sinking might leave behind trees that contain commoned loads if necessary to preserve the order of evaluation of operations. It is possible for such trees to contain a load of a symbol that had a value assigned to it in a tree before the current tree, but that was also moved by Store Sinking. Typically such a load would end up being removed by Dead Trees Elimination, but if that optimization fails to run or fails to run to completion, such a load of an uninitialized symbol might be left behind. That can be especially problematic if it is used in an indirect reference. This change checks for such cases and makes use of UnsafeSubexpressionRemover to rework such a tree. It will keep only the parts that are safe to evaluate and vandalize any loads that are unsafe to evaluate. Signed-off-by: Henry Zongaro --- compiler/optimizer/SinkStores.cpp | 169 +++++++++++++++++++++++++----- compiler/optimizer/SinkStores.hpp | 21 +++- 2 files changed, 162 insertions(+), 28 deletions(-) diff --git a/compiler/optimizer/SinkStores.cpp b/compiler/optimizer/SinkStores.cpp index 1a6bc0295c3..de11c0571c3 100644 --- a/compiler/optimizer/SinkStores.cpp +++ b/compiler/optimizer/SinkStores.cpp @@ -280,6 +280,7 @@ TR_MovableStore::TR_MovableStore(TR_SinkStores *s, TR_UseOrKillInfo *useOrKillIn _commonedLoadsAfter(commonedLoadsAfter), _depth(depth), _needTempForCommonedLoads(needTempForCommonedLoads), + _unsafeLoads(NULL), _movable(true), _isLoadStatic(false) { @@ -870,10 +871,10 @@ void TR_SinkStores::lookForSinkableStores() } // check to see if we can find a store to a local that is used later in the EBB via a commoned reference - TR_BitVectorIterator bvi(*killedLiveCommonedLoads); - while (bvi.hasMoreElements()) + TR_BitVectorIterator bviKilledLiveCommoned(*killedLiveCommonedLoads); + while (bviKilledLiveCommoned.hasMoreElements()) { - int32_t killedSymIdx = bvi.getNextElement(); + int32_t killedSymIdx = bviKilledLiveCommoned.getNextElement(); if (trace()) traceMsg(comp(), " symbol stores to a live commoned reference, searching stores below that load this symbol via commoned reference ...\n"); _killMarkWalks++; @@ -915,10 +916,49 @@ void TR_SinkStores::lookForSinkableStores() traceMsg(comp(), " marking store [" POINTER_PRINTF_FORMAT "] as movable with temp because it has commoned reference used sym %d\n", store->_useOrKillInfo->_tt->getNode(), killedSymIdx); } } + storeElement = storeElement->getNextElement(); } } + static const bool ignoreUnsafeLoads = (feGetEnv("TR_SinkStoresIgnoreUnsafeLoads") != NULL); + + if (!ignoreUnsafeLoads) + { + TR_BitVectorIterator bviKilledSymbols(*killedSymbols); + + while (bviKilledSymbols.hasMoreElements()) + { + int32_t killedSymIdx = bviKilledSymbols.getNextElement(); + + // find any stores that use this symbol (via a commoned reference) and mark them unmovable + ListElement *storeElement=potentiallyMovableStores.getListHead(); + + while (storeElement != NULL) + { + TR_MovableStore *store = storeElement->getData(); + + if (store->_movable && + store->_useOrKillInfo->_usedSymbols && + store->_useOrKillInfo->_usedSymbols->get(killedSymIdx)) + { + if (store->_unsafeLoads == NULL) + { + store->_unsafeLoads = new (trStackMemory()) TR_BitVector(numLocals, trMemory()); + } + + store->_unsafeLoads->set(killedSymIdx); + if (trace()) + { + traceMsg(comp(), " marking store [" POINTER_PRINTF_FORMAT "] as having potentially unsafe load of sym %d\n", store->_useOrKillInfo->_tt->getNode(), killedSymIdx); + } + } + + storeElement = storeElement->getNextElement(); + } + } + } + // When the commoned symbols are tracked precisely it is enough to run the above code that intersects used // with savedLiveCommonedLoads to find the first uses of symbols // With this information an anchor or temp store can be created when/if it is actually needed in during @@ -1649,7 +1689,8 @@ void TR_SinkStores::doSinking() coalesceSimilarEdgePlacements(); // collect the original stores here - List storesToRemove(trMemory()); + List storesToRemove(trMemory()); + List unsafeStores(trMemory()); List movedStores(trMemory()); // do all the edge placements @@ -1665,14 +1706,21 @@ void TR_SinkStores::doSinking() while (!placement->_stores.isEmpty()) { TR_StoreInformation *storeInfo=placement->_stores.popHead(); - TR::TreeTop *store=storeInfo->_store; if (storeInfo->_copy) { - if (!storesToRemove.find(store)) - storesToRemove.add(store); + if (!storesToRemove.find(storeInfo)) + { + storesToRemove.add(storeInfo); + + if (storeInfo->_unsafeLoads != NULL) + { + unsafeStores.add(storeInfo); + } + } } else { + TR::TreeTop *store=storeInfo->_store; // if (trace()) // traceMsg(comp(), " adding store [" POINTER_PRINTF_FORMAT "] to movedStores (edge placement)\n", store); TR_ASSERT(!movedStores.find(store), "shouldn't find a moved store more than once"); @@ -1692,14 +1740,21 @@ void TR_SinkStores::doSinking() while (!placement->_stores.isEmpty()) { TR_StoreInformation *storeInfo=placement->_stores.popHead(); - TR::TreeTop *store=storeInfo->_store; if (storeInfo->_copy) { - if (!storesToRemove.find(store)) - storesToRemove.add(store); + if (!storesToRemove.find(storeInfo)) + { + storesToRemove.add(storeInfo); + + if (storeInfo->_unsafeLoads != NULL) + { + unsafeStores.add(storeInfo); + } + } } else { + TR::TreeTop *store=storeInfo->_store; if (trace()) traceMsg(comp(), " adding store [" POINTER_PRINTF_FORMAT "] to movedStores (block placement)\n", store); TR_ASSERT(!movedStores.find(store), "shouldn't find a moved store more than once"); @@ -1709,10 +1764,45 @@ void TR_SinkStores::doSinking() } } + OMR::UnsafeSubexpressionRemover usr(this); + + // Any stores that remain will be left in place, and its store operation + // will be replaced with a TR::treetop below. If it contains any load + // of an unsafe symbol (i.e., a symbol whose store has been sunk past + // this store) use an UnsafeSubexpressionRemover to walk through the tree + // marking any unsafe loads, as such a load must not be left in place. + // + while (!unsafeStores.isEmpty()) + { + TR_StoreInformation *originalStoreInfo = unsafeStores.popHead(); + TR::TreeTop *originalStore = originalStoreInfo->_store; + + if (trace()) + { + traceMsg(comp(), "Looking for unsafe loads of "); + originalStoreInfo->_unsafeLoads->print(comp()); + traceMsg(comp(), " in original tree for store [" POINTER_PRINTF_FORMAT "]\n", originalStore->getNode()); + } + + // Not sure whether a store that contains unsafe loads + // could already have been moved, but just in case. . . . + if (movedStores.find(originalStore)) + { + if (trace()) + traceMsg(comp(), " this store has been moved already, so no need to remove it\n"); + } + else + { + findUnsafeLoads(usr, originalStoreInfo->_unsafeLoads, originalStore->getNode()); + } + } + // now remove the original stores while (!storesToRemove.isEmpty()) { - TR::TreeTop *originalStore = storesToRemove.popHead(); + TR_StoreInformation *originalStoreInfo = storesToRemove.popHead(); + TR::TreeTop *originalStore = originalStoreInfo->_store; + if (trace()) traceMsg(comp(), "Removing original store [" POINTER_PRINTF_FORMAT "]\n", originalStore->getNode()); @@ -1723,20 +1813,48 @@ void TR_SinkStores::doSinking() } else { - // unlink tt since we've now sunk it to other places - //TR::TreeTop *originalPrev = originalStore->getPrevTreeTop(); - //TR::TreeTop *originalNext = originalStore->getNextTreeTop(); - - //originalStore->getPrevTreeTop()->setNextTreeTop(originalNext); - //originalStore->getNextTreeTop()->setPrevTreeTop(originalPrev); - //originalStore->getNode()->recursivelyDecReferenceCount(); - TR::Node::recreate(originalStore->getNode(), TR::treetop); - //requestOpt(deadTreesElimination, true, originalStore->getEnclosingBlock()->startOfExtendedBlock()); + if (originalStoreInfo->_unsafeLoads != NULL) + { + usr.eliminateStore(originalStore); + } + else + { + TR::Node::recreate(originalStore->getNode(), TR::treetop); + } } } } +void +TR_SinkStores::findUnsafeLoads(OMR::UnsafeSubexpressionRemover &usr, TR_BitVector *unsafeLoads, TR::Node *node) + { + if (node->getOpCode().isLoadVarDirect()) + { + TR::RegisterMappedSymbol *local = getSinkableSymbol(node); + if (local != NULL) + { + int32_t symIdx = local->getLiveLocalIndex(); + + if (symIdx != INVALID_LIVENESS_INDEX && unsafeLoads->get(symIdx)) + { + usr.recordDeadUse(node); + if (trace()) + { + traceMsg(comp(), "Found unsafe load of local %d in node [" POINTER_PRINTF_FORMAT "] n%dn\n", symIdx, node, node->getGlobalIndex()); + } + } + } + } + else + { + for (int i = 0; i < node->getNumChildren(); i++) + { + findUnsafeLoads(usr, unsafeLoads, node->getChild(i)); + } + } + } + TR::RegisterMappedSymbol * TR_SinkStores::getSinkableSymbol(TR::Node *node) { @@ -2292,6 +2410,7 @@ bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore) uint32_t indirectLoadCount = movableStore->_useOrKillInfo->_indirectLoadCount; TR_BitVector *commonedSymbols = movableStore->_commonedLoadsUnderTree; TR_BitVector *needTempForCommonedLoads = movableStore->_needTempForCommonedLoads; + TR_BitVector *unsafeLoads = movableStore->_unsafeLoads; int32_t sourceBlockNumber=sourceBlock->getNumber(); int32_t sourceBlockFrequency = sourceBlock->getFrequency(); @@ -2417,7 +2536,7 @@ bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore) { bool copyStore=!isSuccBlockInEBB; //recordPlacementForDefAlongEdge(tt, succEdge, true); //copyStore); - TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, copyStore); + TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, unsafeLoads, copyStore); // init _storeTemp field if (!needTempForCommonedLoads) storeInfo->_storeTemp = tt; @@ -2500,7 +2619,7 @@ bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore) bool copyStore= !areAllBlocksInEBB; //recordPlacementForDefAlongEdge(tt, predEdgeLONAP, true); // block can't be an extension of source block so copy - TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, copyStore); + TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, unsafeLoads, copyStore); // init _storeTemp field if (!needTempForCommonedLoads) storeInfo->_storeTemp = tt; @@ -2539,7 +2658,7 @@ bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore) traceMsg(comp(), " intersection of used with symbolsKilled[%d] is %s\n", blockNumber, _symbolsKilledInBlock[blockNumber]->intersects(*_usedSymbolsToMove) ? "true" : "false"); traceMsg(comp(), " symbol %d in symbolsKilled is %s\n", symIdx, _symbolsKilledInBlock[blockNumber]->get(symIdx) ? "true" : "false"); } - traceMsg(comp(), " symbolUseds == NULL? %s\n", (_symbolsUsedInBlock[blockNumber] == NULL) ? "true" : "false"); + traceMsg(comp(), " symbolsUsed == NULL? %s\n", (_symbolsUsedInBlock[blockNumber] == NULL) ? "true" : "false"); if (_symbolsUsedInBlock[blockNumber]) { // remove 0 for more detail tracing @@ -2657,7 +2776,7 @@ bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore) { bool copyStore=!isSuccBlockInEBB; //recordPlacementForDefAlongEdge(tt, succEdge, true); //copyStore); - TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, copyStore); + TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, unsafeLoads, copyStore); // init _storeTemp field if (!needTempForCommonedLoads) storeInfo->_storeTemp = tt; @@ -2716,7 +2835,7 @@ bool TR_GeneralSinkStores::sinkStorePlacement(TR_MovableStore *movableStore) // tree bool copyStore=!isCurrentBlockInEBB; //recordPlacementForDefInBlock(tt, block, copyStore); - TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, copyStore); + TR_StoreInformation *storeInfo = new (trStackMemory()) TR_StoreInformation(tt, unsafeLoads, copyStore); // init _storeTemp field if (!needTempForCommonedLoads) storeInfo->_storeTemp = tt; diff --git a/compiler/optimizer/SinkStores.hpp b/compiler/optimizer/SinkStores.hpp index fd92ac35acb..d840a3c55bf 100644 --- a/compiler/optimizer/SinkStores.hpp +++ b/compiler/optimizer/SinkStores.hpp @@ -136,9 +136,10 @@ class TR_MovableStore TR::Compilation * _comp; TR_SinkStores * _s; + TR_BitVector *_needTempForCommonedLoads; // move stores with commoned load + TR_BitVector *_unsafeLoads; // int32_t _depth; // a measure of store tree complexity bool _movable; - TR_BitVector *_needTempForCommonedLoads; // move stores with commoned load bool _isLoadStatic; // is this a store of a static load? }; @@ -147,11 +148,13 @@ class TR_StoreInformation { public: TR_ALLOC(TR_Memory::DataFlowAnalysis) - TR_StoreInformation(TR::TreeTop *store, bool copy, bool needsDuplication = true) : - _store(store), _copy(copy), _needsDuplication(needsDuplication), _storeTemp(NULL) { } + TR_StoreInformation(TR::TreeTop *store, TR_BitVector *unsafeLoads, bool copy, bool needsDuplication = true) : + _store(store), _unsafeLoads(unsafeLoads), _copy(copy), + _needsDuplication(needsDuplication), _storeTemp(NULL) { } TR::TreeTop *_store; // original store to be sunk TR::TreeTop *_storeTemp;// dup store with commoned loads replaced by temp + TR_BitVector *_unsafeLoads; bool _copy; // whether original store should be copied or moved // it should be copied if the store is placed in a block that does NOT extend the original source block // it should be moved if it is to be placed in a block that extends the original source block @@ -247,6 +250,18 @@ class TR_SinkStores : public TR::Optimization virtual void doSinking(); TR_EdgeInformation *findEdgeInformation(TR::CFGEdge *edge, List & edgeList); + /** + * Walk through a tree looking for loads of local symbols that are considered to + * unsafe. + * + * \param[in] usr An \ref OMR::UnsafeSubexpressionRemover that is used to keep track of nodes that + * are considered to be unsafe. + * \param[in] unsafeLoads A \ref TR_BitVector containing the set of local sym indices that are + * considered to be unsafe to load. + * \param[in] node The tree that is to be searched for loads of unsafe local syms. + */ + void findUnsafeLoads(OMR::UnsafeSubexpressionRemover &usr, TR_BitVector *unsafeLoads, TR::Node *node); + protected: virtual bool storeIsSinkingCandidate(TR::Block *block, TR::Node *node,