Re: [gofrontend-dev] libgo patch committed: Fix 32-bit memory allocation

2014-01-09 Thread Michael Hudson-Doyle

Ian Lance Taylor i...@google.com writes:

 This patch to libgo fixes memory allocation on 32-bit systems when a lot
 of memory has been allocated.  The problem is described in this patch to
 the master repository: https://codereview.appspot.com/49460043 .

Here's a patch for the 4.8 branch if you are interested.  I haven't
tested it yet -- well, it's in progress but I'm not going to hang around
long enough for it to finish today.

Cheers,
mwh

diff --git a/libgo/runtime/malloc.goc b/libgo/runtime/malloc.goc
index 8ccaa6b..f0871dd 100644
--- a/libgo/runtime/malloc.goc
+++ b/libgo/runtime/malloc.goc
@@ -541,8 +541,7 @@ runtime_settype_flush(M *mp, bool sysalloc)
 
 		// (Manually inlined copy of runtime_MHeap_Lookup)
 		p = (uintptr)vPageShift;
-		if(sizeof(void*) == 8)
-			p -= (uintptr)runtime_mheap-arena_start  PageShift;
+		p -= (uintptr)runtime_mheap-arena_start  PageShift;
 		s = runtime_mheap-map[p];
 
 		if(s-sizeclass == 0) {
diff --git a/libgo/runtime/mgc0.c b/libgo/runtime/mgc0.c
index c3b3211..9f17bdc 100644
--- a/libgo/runtime/mgc0.c
+++ b/libgo/runtime/mgc0.c
@@ -239,8 +239,7 @@ markonly(void *obj)
 	// (Manually inlined copy of MHeap_LookupMaybe.)
 	k = (uintptr)objPageShift;
 	x = k;
-	if(sizeof(void*) == 8)
-		x -= (uintptr)runtime_mheap-arena_startPageShift;
+	x -= (uintptr)runtime_mheap-arena_startPageShift;
 	s = runtime_mheap-map[x];
 	if(s == nil || k  s-start || k - s-start = s-npages || s-state != MSpanInUse)
 		return false;
@@ -418,8 +417,7 @@ flushptrbuf(PtrTarget *ptrbuf, PtrTarget **ptrbufpos, Obj **_wp, Workbuf **_wbuf
 			// (Manually inlined copy of MHeap_LookupMaybe.)
 			k = (uintptr)objPageShift;
 			x = k;
-			if(sizeof(void*) == 8)
-x -= (uintptr)arena_startPageShift;
+			x -= (uintptr)arena_startPageShift;
 			s = runtime_mheap-map[x];
 			if(s == nil || k  s-start || k - s-start = s-npages || s-state != MSpanInUse)
 continue;
@@ -466,8 +464,7 @@ flushptrbuf(PtrTarget *ptrbuf, PtrTarget **ptrbufpos, Obj **_wp, Workbuf **_wbuf
 			// Ask span about size class.
 			// (Manually inlined copy of MHeap_Lookup.)
 			x = (uintptr)obj  PageShift;
-			if(sizeof(void*) == 8)
-x -= (uintptr)arena_startPageShift;
+			x -= (uintptr)arena_startPageShift;
 			s = runtime_mheap-map[x];
 
 			PREFETCH(obj);
@@ -585,8 +582,7 @@ checkptr(void *obj, uintptr objti)
 	if(t == nil)
 		return;
 	x = (uintptr)obj  PageShift;
-	if(sizeof(void*) == 8)
-		x -= (uintptr)(runtime_mheap-arena_start)PageShift;
+	x -= (uintptr)(runtime_mheap-arena_start)PageShift;
 	s = runtime_mheap-map[x];
 	objstart = (byte*)((uintptr)s-startPageShift);
 	if(s-sizeclass != 0) {
diff --git a/libgo/runtime/mheap.c b/libgo/runtime/mheap.c
index b4d94b6..af46bfb 100644
--- a/libgo/runtime/mheap.c
+++ b/libgo/runtime/mheap.c
@@ -150,8 +150,7 @@ HaveSpan:
 		runtime_MSpan_Init(t, s-start + npage, s-npages - npage);
 		s-npages = npage;
 		p = t-start;
-		if(sizeof(void*) == 8)
-			p -= ((uintptr)h-arena_startPageShift);
+		p -= ((uintptr)h-arena_startPageShift);
 		if(p  0)
 			h-map[p-1] = s;
 		h-map[p] = t;
@@ -169,8 +168,7 @@ HaveSpan:
 	s-elemsize = (sizeclass==0 ? s-npagesPageShift : (uintptr)runtime_class_to_size[sizeclass]);
 	s-types.compression = MTypes_Empty;
 	p = s-start;
-	if(sizeof(void*) == 8)
-		p -= ((uintptr)h-arena_startPageShift);
+	p -= ((uintptr)h-arena_startPageShift);
 	for(n=0; nnpage; n++)
 		h-map[p+n] = s;
 	return s;
@@ -241,8 +239,7 @@ MHeap_Grow(MHeap *h, uintptr npage)
 	mstats.mspan_sys = h-spanalloc.sys;
 	runtime_MSpan_Init(s, (uintptr)vPageShift, askPageShift);
 	p = s-start;
-	if(sizeof(void*) == 8)
-		p -= ((uintptr)h-arena_startPageShift);
+	p -= ((uintptr)h-arena_startPageShift);
 	h-map[p] = s;
 	h-map[p + s-npages - 1] = s;
 	s-state = MSpanInUse;
@@ -259,8 +256,7 @@ runtime_MHeap_Lookup(MHeap *h, void *v)
 	uintptr p;
 	
 	p = (uintptr)v;
-	if(sizeof(void*) == 8)
-		p -= (uintptr)h-arena_start;
+	p -= (uintptr)h-arena_start;
 	return h-map[p  PageShift];
 }
 
@@ -281,8 +277,7 @@ runtime_MHeap_LookupMaybe(MHeap *h, void *v)
 		return nil;
 	p = (uintptr)vPageShift;
 	q = p;
-	if(sizeof(void*) == 8)
-		q -= (uintptr)h-arena_start  PageShift;
+	q -= (uintptr)h-arena_start  PageShift;
 	s = h-map[q];
 	if(s == nil || p  s-start || p - s-start = s-npages)
 		return nil;
@@ -332,8 +327,7 @@ MHeap_FreeLocked(MHeap *h, MSpan *s)
 
 	// Coalesce with earlier, later spans.
 	p = s-start;
-	if(sizeof(void*) == 8)
-		p -= (uintptr)h-arena_start  PageShift;
+	p -= (uintptr)h-arena_start  PageShift;
 	if(p  0  (t = h-map[p-1]) != nil  t-state != MSpanInUse) {
 		tp = (uintptr*)(t-startPageShift);
 		*tp |= *sp;	// propagate needs zeroing mark


Re: [gofrontend-dev] libgo patch committed: Fix 32-bit memory allocation

2014-01-09 Thread Ian Lance Taylor
On Thu, Jan 9, 2014 at 6:34 PM, Michael Hudson-Doyle
michael.hud...@linaro.org wrote:

 Ian Lance Taylor i...@google.com writes:

 This patch to libgo fixes memory allocation on 32-bit systems when a lot
 of memory has been allocated.  The problem is described in this patch to
 the master repository: https://codereview.appspot.com/49460043 .

 Here's a patch for the 4.8 branch if you are interested.  I haven't
 tested it yet -- well, it's in progress but I'm not going to hang around
 long enough for it to finish today.

Thanks.  Committed to 4.8 branch after testing.

Ian