diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 9333fb6804c70c2b2496c96004f19d1b2b09be6b..7161754b2b3dfd3c35a04b368c8c207bccfd48a5 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,9 @@
+2014-02-20  Richard Henderson <rth@redhat.com>
+
+	PR c++/60272
+	* builtins.c (expand_builtin_atomic_compare_exchange): Conditionalize
+	on failure the store back into EXPECT.
+
 2014-02-20  Chung-Lin Tang  <cltang@codesourcery.com>
 	    Sandra Loosemore  <sandra@codesourcery.com>
 
diff --git a/gcc/builtins.c b/gcc/builtins.c
index f5f55bf0e2e60ffb009a4368573ec4fbb165ecd8..09fefe50a8d91d0b444877b511e4b8e990badf43 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -5292,7 +5292,7 @@ static rtx
 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp, 
 					rtx target)
 {
-  rtx expect, desired, mem, oldval;
+  rtx expect, desired, mem, oldval, label;
   enum memmodel success, failure;
   tree weak;
   bool is_weak;
@@ -5330,14 +5330,23 @@ expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
     is_weak = true;
 
+  if (target == const0_rtx)
+    target = NULL;
   oldval = expect;
-  if (!expand_atomic_compare_and_swap ((target == const0_rtx ? NULL : &target),
-				       &oldval, mem, oldval, desired,
+
+  if (!expand_atomic_compare_and_swap (&target, &oldval, mem, oldval, desired,
 				       is_weak, success, failure))
     return NULL_RTX;
 
-  if (oldval != expect)
-    emit_move_insn (expect, oldval);
+  /* Conditionally store back to EXPECT, lest we create a race condition
+     with an improper store to memory.  */
+  /* ??? With a rearrangement of atomics at the gimple level, we can handle
+     the normal case where EXPECT is totally private, i.e. a register.  At
+     which point the store can be unconditional.  */
+  label = gen_label_rtx ();
+  emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
+  emit_move_insn (expect, oldval);
+  emit_label (label);
 
   return target;
 }
diff --git a/libatomic/ChangeLog b/libatomic/ChangeLog
index 2d1c9bd18ae73e2e44e5d8c597752fb63fed35d4..1bd3c2178aa5ca94f2dd0d0d6eb93e75dae9f004 100644
--- a/libatomic/ChangeLog
+++ b/libatomic/ChangeLog
@@ -1,3 +1,9 @@
+2014-02-20  Richard Henderson <rth@redhat.com>
+
+	PR c++/60272
+	* cas_n.c (libat_compare_exchange): Conditionalize on failure
+	the store back to EPTR.
+
 2014-01-02  Richard Sandiford  <rdsandiford@googlemail.com>
 
 	Update copyright years
diff --git a/libatomic/cas_n.c b/libatomic/cas_n.c
index 39c78332423c578c6f3a72b289c1748c2f2b2f24..801262d551c4bbf5079a8b3a58977f230a03d653 100644
--- a/libatomic/cas_n.c
+++ b/libatomic/cas_n.c
@@ -51,10 +51,9 @@ SIZE(libat_compare_exchange) (UTYPE *mptr, UTYPE *eptr, UTYPE newval,
 #if !DONE && N <= WORDSIZE && defined(atomic_compare_exchange_w)
 bool
 SIZE(libat_compare_exchange) (UTYPE *mptr, UTYPE *eptr, UTYPE newval,
-			      int smodel, int fmodel UNUSED)
+			      int smodel, int fmodel)
 {
   UWORD mask, shift, weval, woldval, wnewval, t, *wptr;
-  bool ret = false;
 
   pre_barrier (smodel);
 
@@ -82,12 +81,13 @@ SIZE(libat_compare_exchange) (UTYPE *mptr, UTYPE *eptr, UTYPE newval,
     }
   while (!atomic_compare_exchange_w (wptr, &woldval, t, true,
 				     __ATOMIC_RELAXED, __ATOMIC_RELAXED));
-  ret = true;
+  post_barrier (smodel);
+  return true;
+
  failure:
   *eptr = woldval >> shift;
-
-  post_barrier (smodel);
-  return ret;
+  post_barrier (fmodel);
+  return false;
 }
 
 #define DONE 1
@@ -102,18 +102,17 @@ SIZE(libat_compare_exchange) (UTYPE *mptr, UTYPE *eptr, UTYPE newval,
 {
   UTYPE oldval;
   UWORD magic;
-  bool ret = false;
+  bool ret;
 
   pre_seq_barrier (smodel);
   magic = protect_start (mptr);
 
   oldval = *mptr;
-  if (oldval == *eptr)
-    {
-      *mptr = newval;
-      ret = true;
-    }
-  *eptr = oldval;
+  ret = (oldval == *eptr);
+  if (ret)
+    *mptr = newval;
+  else
+    *eptr = oldval;
 
   protect_end (mptr, magic);
   post_seq_barrier (smodel);