GCC Middle and Back End API Reference
tree-ssa-loop-im.c File Reference

Data Structures

struct  lim_aux_data
struct  mem_ref_loc
struct  mem_ref
struct  mem_ref_hasher
class  invariantness_dom_walker
class  move_computations_dom_walker
struct  fmt_data
struct  rewrite_mem_ref_loc
struct  first_mem_ref_loc_1
struct  prev_flag_edges
struct  sm_set_flag_if_changed
struct  ref_always_accessed

Typedefs

typedef struct mem_ref_locmem_ref_loc_p
typedef struct mem_refmem_ref_p

Enumerations

enum  move_pos { MOVE_IMPOSSIBLE, MOVE_PRESERVE_EXECUTION, MOVE_POSSIBLE }

Functions

static bool ref_indep_loop_p (struct loop *, mem_ref_p)
static struct lim_aux_datainit_lim_data ()
static struct lim_aux_dataget_lim_data ()
static void free_lim_aux_data ()
static void clear_lim_data ()
enum move_pos movement_possibility ()
static struct loopoutermost_invariant_loop ()
static bool add_dependency (tree def, struct lim_aux_data *data, struct loop *loop, bool add_cost)
static unsigned stmt_cost ()
static struct loopoutermost_indep_loop ()
static treesimple_mem_ref_in_stmt ()
static mem_ref_p mem_ref_in_stmt ()
static bool extract_true_false_args_from_phi (basic_block dom, gimple phi, tree *true_arg_p, tree *false_arg_p)
static bool determine_max_movement ()
static void set_level ()
static void set_profitable_level ()
static bool nonpure_call_p ()
static gimple rewrite_reciprocal ()
static gimple rewrite_bittest ()
static bool arith_code_with_undefined_signed_overflow ()
static gimple_seq rewrite_to_defined_overflow ()
static unsigned int move_computations ()
static bool may_move_till ()
static void force_move_till_op ()
static bool force_move_till ()
static void memref_free ()
static mem_ref_p mem_ref_alloc ()
static void record_mem_ref_loc ()
static void mark_ref_stored ()
static void gather_mem_refs_stmt ()
static int sort_bbs_in_loop_postorder_cmp ()
static void analyze_memory_references ()
static bool mem_refs_may_alias_p (mem_ref_p mem1, mem_ref_p mem2, struct pointer_map_t **ttae_cache)
template<typename FN >
static bool for_all_locs_in_loop ()
static void rewrite_mem_refs ()
static mem_ref_loc_p first_mem_ref_loc ()
static void execute_sm_if_changed ()
static tree execute_sm_if_changed_flag_set ()
static void execute_sm ()
static void hoist_memory_references (struct loop *loop, bitmap mem_refs, vec< edge > exits)
static bool ref_always_accessed_p ()
static bool refs_independent_p ()
static void record_dep_loop ()
static bool ref_indep_loop_p_1 ()
static bool ref_indep_loop_p_2 ()
static bool ref_indep_loop_p ()
static bool can_sm_ref_p ()
static void find_refs_for_sm ()
static bool loop_suitable_for_sm (struct loop *loop, vec< edge > exits)
static void store_motion_loop ()
static void store_motion ()
static void fill_always_executed_in_1 ()
static void fill_always_executed_in ()
static void tree_ssa_lim_initialize ()
static void tree_ssa_lim_finalize ()
unsigned int tree_ssa_lim ()
static unsigned int tree_ssa_loop_im ()
static bool gate_tree_ssa_loop_im ()
gimple_opt_passmake_pass_lim ()

Variables

static struct pointer_map_tlim_aux_data_map
struct {
   hash_table< mem_ref_hasher >   refs
   vec< mem_ref_p >   refs_list
   vec< bitmap_head >   refs_in_loop
   vec< bitmap_head >   refs_stored_in_loop
   vec< bitmap_head >   all_refs_stored_in_loop
   struct pointer_map_t *   ttae_cache
memory_accesses
static bitmap_obstack lim_bitmap_obstack
static unsigned * bb_loop_postorder

Typedef Documentation

typedef struct mem_ref_loc * mem_ref_loc_p
   Description of a memory reference location.  
typedef struct mem_ref * mem_ref_p
   Description of a memory reference.  

Enumeration Type Documentation

enum move_pos
   The possibilities of statement movement.  
Enumerator:
MOVE_IMPOSSIBLE 
MOVE_PRESERVE_EXECUTION 
MOVE_POSSIBLE 

Function Documentation

static bool add_dependency ( tree  def,
struct lim_aux_data data,
struct loop loop,
bool  add_cost 
)
static
   DATA is a structure containing information associated with a statement
   inside LOOP.  DEF is one of the operands of this statement.

   Find the outermost loop enclosing LOOP in that value of DEF is invariant
   and record this in DATA->max_loop field.  If DEF itself is defined inside
   this loop as well (i.e. we need to hoist it out of the loop if we want
   to hoist the statement represented by DATA), record the statement in that
   DEF is defined to the DATA->depends list.  Additionally if ADD_COST is true,
   add the cost of the computation of DEF to the DATA->cost.

   If DEF is not invariant in LOOP, return false.  Otherwise return TRUE.  
         Only add the cost if the statement defining DEF is inside LOOP,
         i.e. if it is likely that by moving the invariants dependent
         on it, we will be able to avoid creating a new register for
         it (since it will be only used in these dependent invariants).  

References BUILT_IN_NORMAL, gimple_assign_rhs_code(), gimple_call_fndecl(), gimple_references_memory_p(), and is_gimple_call().

Referenced by determine_max_movement().

static void analyze_memory_references ( )
static
   Gathers memory references in loops.  
     Initialize bb_loop_postorder with a mapping from loop->num to
     its postorder index.  
     Collect all basic-blocks in loops and sort them after their
     loops postorder.  
     Visit blocks in loop postorder and assign mem-ref IDs in that order.
     That results in better locality for all the bitmaps.  
     Propagate the information about accessed memory references up
     the loop hierarchy.  
         Finalize the overall touched references (including subloops).  
         Propagate the information about accessed memory references up
         the loop hierarchy.  

References mem_ref::accesses_in_loop, for_all_locs_in_loop(), loop::inner, loop::next, and loop::num.

static bool arith_code_with_undefined_signed_overflow ( )
static
   Return true if CODE is an operation that when operating on signed
   integer types involves undefined behavior on overflow and the
   operation can be expressed with unsigned arithmetic.  

References gsi_next().

static bool can_sm_ref_p ( )
static
   Returns true if we can perform store motion of REF from LOOP.  
     Can't hoist unanalyzable refs.  
     It should be movable.  
     If it can throw fail, we do not properly update EH info.  
     If it can trap, it must be always executed in LOOP.
     Readonly memory locations may trap when storing to them, but
     tree_could_trap_p is a predicate for rvalues, so check that
     explicitly.  
     And it must be independent on all other memory references
     in LOOP.  

References CDI_DOMINATORS, dominated_by_p(), and basic_block_def::loop_father.

Referenced by ref_indep_loop_p_1().

static void clear_lim_data ( )
static

References MOVE_POSSIBLE.

static bool determine_max_movement ( )
static
   Determine the outermost loop to that it is possible to hoist a statement
   STMT and store it to LIM_DATA (STMT)->max_loop.  To do this we determine
   the outermost loop in that the value computed by STMT is invariant.
   If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
   we preserve the fact whether STMT is executed.  It also fills other related
   information to LIM_DATA (STMT).

   The function returns false if STMT cannot be hoisted outside of the loop it
   is defined in, and true otherwise.  
         We will end up promoting dependencies to be unconditionally
         evaluated.  For this reason the PHI cost (and thus the
         cost we remove from the loop by doing the invariant motion)
         is that of the cheapest PHI argument dependency chain.  
             Verify that this is an extended form of a diamond and
             the PHI arguments are completely controlled by the
             predicate in DOM.  
             Fold in dependencies and cost of the condition.  
             We want to avoid unconditionally executing very expensive
             operations.  As costs for our dependencies cannot be
             negative just claim we are not invariand for this case.
             We also are not sure whether the control-flow inside the
             loop will vanish.  
             Assume that the control-flow in the loop will vanish.
             ???  We should verify this and not artificially increase
             the cost if that is not the case.  

References add_dependency(), lim_aux_data::cost, and get_lim_data().

static void execute_sm ( )
static
   Executes store motion of memory reference REF from LOOP.
   Exits from the LOOP are stored in EXITS.  The initialization of the
   temporary variable is put to the preheader of the loop, and assignments
   to the reference from the temporary variable are emitted to exits.  
     Emit the load code on a random exit edge or into the latch if
     the loop does not exit, so that we are sure it will be processed
     by move_computations after all dependencies.  
     FIXME/TODO: For the multi-threaded variant, we could avoid this
     load altogether, since the store is predicated by a flag.  We
     could, do the load only if it was originally in the loop.  
     Sink the store to every exit from the loop.  
static void execute_sm_if_changed ( )
static
@verbatim 

Helper function for execute_sm. Emit code to store TMP_VAR into MEM along edge EX.

The store is only done if MEM has changed. We do this so no changes to MEM occur on code paths that did not originally store into it.

The common case for execute_sm will transform:

for (...) { if (foo) stuff; else MEM = TMP_VAR; }

into:

lsm = MEM; for (...) { if (foo) stuff; else lsm = TMP_VAR; } MEM = lsm;

This function will generate:

lsm = MEM;

lsm_flag = false; ... for (...) { if (foo) stuff; else { lsm = TMP_VAR; lsm_flag = true; } } if (lsm_flag) <-- MEM = lsm; <--

     ?? Insert store after previous store if applicable.  See note
     below.  
     Insert actual store.  
     ?? Because stores may alias, they must happen in the exact
     sequence they originally happened.  Save the position right after
     the (_lsm) store we just created so we can continue appending after
     it and maintain the original order.  
     Remove the original fall through edge.  This was the
     single_succ_edge (new_bb).  
static tree execute_sm_if_changed_flag_set ( )
static
   Helper function for execute_sm.  On every location where REF is
   set, set an appropriate flag indicating the store.  

References ref_always_accessed::base, get_base_address(), gimple_get_lhs(), and mem_ref_loc::stmt.

static bool extract_true_false_args_from_phi ( basic_block  dom,
gimple  phi,
tree true_arg_p,
tree false_arg_p 
)
static
   From a controlling predicate in DOM determine the arguments from
   the PHI node PHI that are chosen if the predicate evaluates to
   true and false and store them to *TRUE_ARG_P and *FALSE_ARG_P if
   they are non-NULL.  Returns true if the arguments can be determined,
   else return false.  
     We have to verify that one edge into the PHI node is dominated
     by the true edge of the predicate block and the other edge
     dominated by the false edge.  This ensures that the PHI argument
     we are going to take is completely determined by the path we
     take from the predicate block.
     We can only use BB dominance checks below if the destination of
     the true/false edges are dominated by their edge, thus only
     have a single predecessor.  
static void fill_always_executed_in ( )
static
   Fills ALWAYS_EXECUTED_IN information for basic blocks, i.e.
   for each such basic block bb records the outermost loop for that execution
   of its header implies execution of bb.  

References GIMPLE_PASS.

static void fill_always_executed_in_1 ( )
static
   Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
   for each such basic block bb records the outermost loop for that execution
   of its header implies execution of bb.  CONTAINS_CALL is the bitmap of
   blocks that contain a nonpure call.  
             A loop might be infinite (TODO use simple loop analysis
             to disprove this if possible).  
                 In a loop that is always entered we may proceed anyway.
                 But record that we entered it and stop once we leave it.  
static void find_refs_for_sm ( )
static
   Marks the references in LOOP for that store motion should be performed
   in REFS_TO_SM.  SM_EXECUTED is the set of references for that store
   motion was performed in one of the outer loops.  

References bitmap_clear(), bitmap_set_bit(), gsi_end_p(), gsi_next(), gsi_start_bb(), gsi_stmt(), basic_block_def::index, nonpure_call_p(), and sbitmap_alloc().

Referenced by ref_indep_loop_p_2().

static mem_ref_loc_p first_mem_ref_loc ( )
static
   Returns the first reference location to REF in LOOP.  

References edge_def::aux.

template<typename FN >
static bool for_all_locs_in_loop ( )
static
   Iterates over all locations of REF in LOOP and its subloops calling
   fn.operator() with the location as argument.  When that operator
   returns true the iteration is stopped and true is returned.
   Otherwise false is returned.  

Referenced by analyze_memory_references().

static bool force_move_till ( )
static
static void force_move_till_op ( )
static
   If OP is SSA NAME, force the statement that defines it to be
   moved out of the LOOP.  ORIG_LOOP is the loop in that EXPR is used.  
static void free_lim_aux_data ( )
static
   Releases the memory occupied by DATA.  

Referenced by init_lim_data().

static bool gate_tree_ssa_loop_im ( )
static
static void gather_mem_refs_stmt ( )
static
   Gathers memory references in statement STMT in LOOP, storing the
   information about them in the memory_accesses structure.  Marks
   the vops accessed through unrecognized statements there as
   well.  
         We use the shared mem_ref for all unanalyzable refs.  
static struct lim_aux_data* get_lim_data ( )
staticread
static void hoist_memory_references ( struct loop loop,
bitmap  mem_refs,
vec< edge exits 
)
static
   Hoists memory references MEM_REFS out of LOOP.  EXITS is the list of exit
   edges of the LOOP.  

References memory_accesses, and refs_independent_p().

Referenced by ref_indep_loop_p_2().

static struct lim_aux_data* init_lim_data ( )
staticread
static bool loop_suitable_for_sm ( struct loop loop,
vec< edge exits 
)
static
   Checks whether LOOP (with exits stored in EXITS array) is suitable
   for a store motion optimization (i.e. whether we can insert statement
   on its exits).  

Referenced by ref_indep_loop_p_2().

gimple_opt_pass* make_pass_lim ( )
static void mark_ref_stored ( )
static
   Marks reference REF as stored in LOOP.  

References cfun, LI_FROM_INNERMOST, loop::num, and number_of_loops().

static bool may_move_till ( )
static
   Checks whether the statement defining variable *INDEX can be hoisted
   out of the loop passed in DATA.  Callback for for_each_index.  
     If REF is an array reference, check also that the step and the lower
     bound is invariant in LOOP.  

References mem_ref::accesses_in_loop, ao_ref_init(), mem_ref::dep_loop, mem_ref::hash, mem_ref::id, mem_ref::indep_loop, mem_ref::mem, and mem_ref::stored.

Referenced by refs_independent_p().

static mem_ref_p mem_ref_alloc ( )
static
   Allocates and returns a memory reference description for MEM whose hash
   value is HASH and id is ID.  
static mem_ref_p mem_ref_in_stmt ( )
static
   Returns the memory reference contained in STMT.  
static bool mem_refs_may_alias_p ( mem_ref_p  mem1,
mem_ref_p  mem2,
struct pointer_map_t **  ttae_cache 
)
static
   Returns true if MEM1 and MEM2 may alias.  TTAE_CACHE is used as a cache in
   tree_to_aff_combination_expand.  
     Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
     object and their offset differ in such a way that the locations cannot
     overlap, then they cannot alias.  
     Perform basic offset and type-based disambiguation.  
     The expansion of addresses may be a bit expensive, thus we only do
     the check at -O2 and higher optimization levels.  
static void memref_free ( )
static
   A function to free the mem_ref object OBJ.  
static unsigned int move_computations ( )
static
   Hoist the statements out of the loops prescribed by data stored in
   LIM_DATA structures associated with each statement.
enum move_pos movement_possibility ( )
   If it is possible to hoist the statement STMT unconditionally,
   returns MOVE_POSSIBLE.
   If it is possible to hoist the statement STMT, but we must avoid making
   it executed if it would not be executed in the original program (e.g.
   because it may trap), return MOVE_PRESERVE_EXECUTION.
   Otherwise return MOVE_IMPOSSIBLE.  
         If we perform unswitching, force the operands of the invariant
         condition to be moved out of the loop.  
         While pure or const call is guaranteed to have no side effects, we
         cannot move it arbitrarily.  Consider code like

         char *s = something ();

         while (1)
           {
             if (s)
               t = strlen (s);
             else
               t = 0;
           }

         Here the strlen call cannot be moved out of the loop, even though
         s is invariant.  In addition to possibly creating a call with
         invalid arguments, moving out a function call that is not executed
         may cause performance regressions in case the call is costly and
         not executed at all.  
     Non local loads in a transaction cannot be hoisted out.  Well,
     unless the load happens on every path out of the loop, but we
     don't take this into account yet.  

References gimple_call_lhs(), and MOVE_PRESERVE_EXECUTION.

static bool nonpure_call_p ( )
static
   Returns true if STMT is a call that has side effects.  

Referenced by find_refs_for_sm().

static struct loop* outermost_indep_loop ( )
staticread
   Finds the outermost loop between OUTER and LOOP in that the memory reference
   REF is independent.  If REF is not independent in LOOP, NULL is returned
   instead.  
static struct loop* outermost_invariant_loop ( )
staticread
   Suppose that operand DEF is used inside the LOOP.  Returns the outermost
   loop to that we could move the expression using DEF if it did not have
   other operands, i.e. the outermost loop enclosing LOOP in that the value
   of DEF is invariant.  

References lim_aux_data::cost, lim_aux_data::depends, flow_loop_nested_p(), get_lim_data(), gimple_bb(), basic_block_def::loop_father, and lim_aux_data::max_loop.

static void record_dep_loop ( )
static
   Mark REF dependent on stores or loads (according to STORED_P) in LOOP
   and its super-loops.  
     We can propagate dependent-in-loop bits up the loop
     hierarchy to all outer loops.  
static void record_mem_ref_loc ( )
static
   Records memory reference location *LOC in LOOP to the memory reference
   description REF.  The reference occurs in statement STMT.  

References basic_block_def::loop_father, and loop::num.

static bool ref_always_accessed_p ( )
static
   Returns true if REF is always accessed in LOOP.  If STORED_P is true
   make sure REF is always stored to in LOOP.  

References ref_indep_loop_p_2().

Referenced by refs_independent_p().

static bool ref_indep_loop_p ( struct loop ,
mem_ref_p   
)
static
static bool ref_indep_loop_p ( )
static
   Returns true if REF is independent on all other memory references in
   LOOP.  

References CDI_DOMINATORS, dominated_by_p(), get_loop_body_in_dom_order(), loop::latch, and loop::num_nodes.

static bool ref_indep_loop_p_1 ( )
static
   Returns true if REF is independent on all other memory references in
   LOOP.  

References bitmap_set_bit(), can_sm_ref_p(), memory_accesses, loop::num, and refs.

static bool ref_indep_loop_p_2 ( )
static
   Returns true if REF is independent on all other memory references in
   LOOP.  Wrapper over ref_indep_loop_p_1, caching its results.  
     Record the computed result in the cache.  
             If it's independend against all refs then it's independent
             against stores, too.  
             If it's dependent against stores it's dependent against
             all refs, too.  

References bitmap_and_compl_into(), bitmap_ior_into(), find_refs_for_sm(), get_loop_exit_edges(), hoist_memory_references(), loop::inner, loop_suitable_for_sm(), loop::next, and store_motion_loop().

Referenced by ref_always_accessed::operator()(), and ref_always_accessed_p().

static bool refs_independent_p ( )
static
static gimple rewrite_bittest ( )
static
   Check if the pattern at *BSI is a bittest of the form
   (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0.  
     Verify that the single use of lhs is a comparison against zero.  
     Get at the operands of the shift.  The rhs is TMP1 & 1.  
     There is a conversion in between possibly inserted by fold.  
     Verify that B is loop invariant but A is not.  Verify that with
     all the stmt walking we are still in the same loop.  
         1 << B 
         A & (1 << B) 
         Replace the SSA_NAME we compare against zero.  Adjust
         the type of zero accordingly.  
         Don't use gsi_replace here, none of the new assignments sets
         the variable originally set in stmt.  Move bsi to stmt1, and
         then remove the original stmt, so that we get a chance to
         retain debug info for it.  
static void rewrite_mem_refs ( )
static
   Rewrites all references to REF in LOOP by variable TMP_VAR.  
static gimple rewrite_reciprocal ( )
static
   Rewrite a/b to a*(1/b).  Return the invariant stmt to process.  
     Replace division stmt with reciprocal and multiply stmts.
     The multiply stmt is not invariant, so update iterator
     and avoid rescanning.  
     Continue processing with invariant reciprocal statement.  

References gimple_assign_rhs1(), and has_single_use().

static gimple_seq rewrite_to_defined_overflow ( )
static
   Rewrite STMT, an assignment with a signed integer or pointer arithmetic
   operation that can be transformed to unsigned arithmetic by converting
   its operand, carrying out the operation in the corresponding unsigned
   type and converting the result back to the original type.

   Returns a sequence of statements that replace STMT and also contain
   a modified form of STMT itself.  

References gimple_build_assign_with_ops(), and gimple_phi_result().

static void set_level ( )
static
   Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
   and that one of the operands of this statement is computed by STMT.
   Ensure that STMT (together with all the statements that define its
   operands) is hoisted at least out of the loop LEVEL.  
static void set_profitable_level ( )
static
   Determines an outermost loop from that we want to hoist the statement STMT.
   For now we chose the outermost possible loop.  TODO -- use profiling
   information to set it more sanely.  
static tree* simple_mem_ref_in_stmt ( )
static
   If there is a simple load or store to a memory reference in STMT, returns
   the location of the memory reference, and sets IS_STORE according to whether
   it is a store or load.  Otherwise, returns NULL.  
     Recognize SSA_NAME = MEM and MEM = (SSA_NAME | invariant) patterns.  

References CDI_DOMINATORS, edge_def::dest, edge_def::dest_idx, dominated_by_p(), extract_true_false_edges_from_block(), gimple_bb(), single_pred_p(), and edge_def::src.

static int sort_bbs_in_loop_postorder_cmp ( )
static
   qsort sort function to sort blocks after their loop fathers postorder.  
static unsigned stmt_cost ( )
static
   Returns an estimate for a cost of statement STMT.  The values here
   are just ad-hoc constants, similar to costs for inlining.  
     Always try to create possibilities for unswitching.  
     We should be hoisting calls if possible.  
         Unless the call is a builtin_constant_p; this always folds to a
         constant, so moving it is useless.  
     Hoisting memory references out should almost surely be a win.  
         Division and multiplication are usually expensive.  
         Shifts and rotates are usually expensive.  
         Make vector construction cost proportional to the number
         of elements.  
         Whether or not something is wrapped inside a PAREN_EXPR
         should not change move cost.  Nor should an intermediate
         unpropagated SSA name copy.  
static void store_motion ( )
static
   Try to perform store motion for all memory references modified inside
   loops.  
static void store_motion_loop ( )
static
   Try to perform store motion for all memory references modified inside
   LOOP.  SM_EXECUTED is the bitmap of the memory references for that
   store motion was executed in one of the outer loops.  

Referenced by ref_indep_loop_p_2().

unsigned int tree_ssa_lim ( )
   Moves invariants from loops.  Only "expensive" invariants are moved out --
   i.e. those that are likely to be win regardless of the register pressure.  
     Gathers information about memory accesses in the loops.  
     Fills ALWAYS_EXECUTED_IN information for basic blocks.  
     For each statement determine the outermost loop in that it is
     invariant and cost for computing the invariant.  
     Execute store motion.  Force the necessary invariants to be moved
     out of the loops as well.  
     Move the expressions that are expensive enough.  
static void tree_ssa_lim_finalize ( )
static
   Cleans up after the invariant motion pass.  
static void tree_ssa_lim_initialize ( )
static
   Compute the global information needed by the loop invariant motion pass.  
     Allocate a special, unanalyzable mem-ref with ID zero.  
static unsigned int tree_ssa_loop_im ( )
static
   Loop invariant motion pass.  

Variable Documentation

vec<bitmap_head> all_refs_stored_in_loop
     The set of memory references stored in each loop, including subloops .  
unsigned* bb_loop_postorder
static
struct pointer_map_t* lim_aux_data_map
static
   Maps statements to their lim_aux_data.  
bitmap_obstack lim_bitmap_obstack
static
   Obstack for the bitmaps in the above data structures.  
struct { ... } memory_accesses
   Description of memory accesses in loops.  

Referenced by hoist_memory_references(), and ref_indep_loop_p_1().

     The hash table of memory references accessed in loops.  

Referenced by contains_placeholder_p(), df_get_eh_block_artificial_uses(), mem_ref_hasher::hash(), and ref_indep_loop_p_1().

vec<bitmap_head> refs_in_loop
     The set of memory references accessed in each loop.  
vec<mem_ref_p> refs_list
     The list of memory references.  
vec<bitmap_head> refs_stored_in_loop
     The set of memory references stored in each loop.  
struct pointer_map_t* ttae_cache
     Cache for expanding memory addresses.