Blob Blame History Raw
diff -up v8-3.14.5.10/src/zone.cc.3a9bfec v8-3.14.5.10/src/zone.cc
--- v8-3.14.5.10/src/zone.cc.3a9bfec	2016-07-06 11:21:45.362891427 -0400
+++ v8-3.14.5.10/src/zone.cc	2016-07-06 11:22:08.538764825 -0400
@@ -168,7 +168,10 @@ Address Zone::NewExpand(int size) {
   // Make sure the requested size is already properly aligned and that
   // there isn't enough room in the Zone to satisfy the request.
   ASSERT(size == RoundDown(size, kAlignment));
-  ASSERT(size > limit_ - position_);
+  ASSERT(limit_ < position_ ||
+         reinterpret_cast<uintptr_t>(limit_) -
+                 reinterpret_cast<uintptr_t>(position_) <
+             size);
 
   // Compute the new segment size. We use a 'high water mark'
   // strategy, where we increase the segment size every time we expand
diff -up v8-3.14.5.10/src/zone-inl.h.3a9bfec v8-3.14.5.10/src/zone-inl.h
--- v8-3.14.5.10/src/zone-inl.h.3a9bfec	2016-07-06 11:21:00.075136898 -0400
+++ v8-3.14.5.10/src/zone-inl.h	2016-07-06 11:21:31.546966899 -0400
@@ -55,7 +55,10 @@ inline void* Zone::New(int size) {
   // Check if the requested size is available without expanding.
   Address result = position_;
 
-  if (size > limit_ - position_) {
+  const uintptr_t limit = reinterpret_cast<uintptr_t>(limit_);
+  const uintptr_t position = reinterpret_cast<uintptr_t>(position_);
+  // position_ > limit_ can be true after the alignment correction above.
+  if (limit < position || (size_t) size > limit - position) {
      result = NewExpand(size);
   } else {
      position_ += size;