diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index fb77db6d53f015aa72db30a044a90267210ae7ef..dac6ea85fbaf8d4ff9f531be6e1b37b60cb406ca 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,15 @@
+2010-08-23  Changpeng Fang  <changpeng.fang@amd.com>
+
+	* tree-flow.h (may_be_nonaddressable_p): New definition. Make the
+	existing static function global.
+
+	*tree-ssa-loop-ivopts.c (may_be_nonaddressable_p): This function
+	is changed to global.
+
+	*tree-ssa-loop-prefetch.c (gather_memory_references_ref): Call
+	may_be_nonaddressable_p on base, and don't collect this reference
+	if the address of the base could not be taken.
+
 2010-08-23  Michael Meissner  <meissner@linux.vnet.ibm.com>
 
 	* config/rs6000/rs6000.opt (-mveclibabi=mass): New option to
diff --git a/gcc/tree-flow.h b/gcc/tree-flow.h
index 04ba5325f11f0eca6a679bfd6635b9f37b81de09..67313088ffdf75357e0c6bd6f4262daa24a4ddc9 100644
--- a/gcc/tree-flow.h
+++ b/gcc/tree-flow.h
@@ -817,6 +817,7 @@ bool stmt_invariant_in_loop_p (struct loop *, gimple);
 bool multiplier_allowed_in_address_p (HOST_WIDE_INT, enum machine_mode,
 				      addr_space_t);
 unsigned multiply_by_cost (HOST_WIDE_INT, enum machine_mode, bool);
+bool may_be_nonaddressable_p (tree expr);
 
 /* In tree-ssa-threadupdate.c.  */
 extern bool thread_through_all_blocks (bool);
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index a347c86025ad246d1ea0cccf6b7a2475f2310a7e..0029c762687e73a803f42dca8cfadd4ca41959ad 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -1640,7 +1640,7 @@ may_be_unaligned_p (tree ref, tree step)
 
 /* Return true if EXPR may be non-addressable.   */
 
-static bool
+bool
 may_be_nonaddressable_p (tree expr)
 {
   switch (TREE_CODE (expr))
diff --git a/gcc/tree-ssa-loop-prefetch.c b/gcc/tree-ssa-loop-prefetch.c
index 9ad6cd2a59e550801cb1b556a05b862ee5b4a9c8..4e7068f18593eb24e18ac99733ae8b0e0852f772 100644
--- a/gcc/tree-ssa-loop-prefetch.c
+++ b/gcc/tree-ssa-loop-prefetch.c
@@ -539,6 +539,10 @@ gather_memory_references_ref (struct loop *loop, struct mem_ref_group **refs,
   if (step == NULL_TREE)
     return false;
 
+  /* Stop if the address of BASE could not taken.  */
+  if (may_be_nonaddressable_p (base))
+    return false;
+
   /* Limit non-constant step prefetching only to the innermost loops.  */
   if (!cst_and_fits_in_hwi (step) && loop->inner != NULL)
     return false;