[POWERPC] Change rheap functions to use ulongs instead of pointers

The rheap allocation functions return a pointer, but the actual value is based
on how the heap was initialized, and so it can be anything, e.g. an offset
into a buffer.  A ulong is a better representation of the value returned by
the allocation functions.

This patch changes all of the relevant rheap functions to use a unsigned long
integers instead of a pointer.  In case of an error, the value returned is
a negative error code that has been cast to an unsigned long.  The caller can
use the IS_ERR_VALUE() macro to check for this.

All code which calls the rheap functions is updated accordingly.  Macros
IS_MURAM_ERR() and IS_DPERR(), have been deleted in favor of IS_ERR_VALUE().

Also added error checking to rh_attach_region().

Signed-off-by: Timur Tabi <timur@freescale.com>
Signed-off-by: Kumar Gala <galak@kernel.crashing.org>
diff --git a/arch/powerpc/lib/rheap.c b/arch/powerpc/lib/rheap.c
index 6c5c5dd..b2f6dcc 100644
--- a/arch/powerpc/lib/rheap.c
+++ b/arch/powerpc/lib/rheap.c
@@ -133,7 +133,7 @@
 	info->empty_slots--;
 
 	/* Initialize */
-	blk->start = NULL;
+	blk->start = 0;
 	blk->size = 0;
 	blk->owner = NULL;
 
@@ -158,7 +158,7 @@
 
 	/* We assume that they are aligned properly */
 	size = blkn->size;
-	s = (unsigned long)blkn->start;
+	s = blkn->start;
 	e = s + size;
 
 	/* Find the blocks immediately before and after the given one
@@ -170,7 +170,7 @@
 	list_for_each(l, &info->free_list) {
 		blk = list_entry(l, rh_block_t, list);
 
-		bs = (unsigned long)blk->start;
+		bs = blk->start;
 		be = bs + blk->size;
 
 		if (next == NULL && s >= bs)
@@ -188,10 +188,10 @@
 	}
 
 	/* Now check if they are really adjacent */
-	if (before != NULL && s != (unsigned long)before->start + before->size)
+	if (before && s != (before->start + before->size))
 		before = NULL;
 
-	if (after != NULL && e != (unsigned long)after->start)
+	if (after && e != after->start)
 		after = NULL;
 
 	/* No coalescing; list insert and return */
@@ -216,7 +216,7 @@
 
 	/* Grow the after block backwards */
 	if (before == NULL && after != NULL) {
-		after->start = (int8_t *)after->start - size;
+		after->start -= size;
 		after->size += size;
 		return;
 	}
@@ -321,14 +321,14 @@
 }
 
 /* Attach a free memory region, coalesces regions if adjuscent */
-int rh_attach_region(rh_info_t * info, void *start, int size)
+int rh_attach_region(rh_info_t * info, unsigned long start, int size)
 {
 	rh_block_t *blk;
 	unsigned long s, e, m;
 	int r;
 
 	/* The region must be aligned */
-	s = (unsigned long)start;
+	s = start;
 	e = s + size;
 	m = info->alignment - 1;
 
@@ -338,9 +338,12 @@
 	/* Round end down */
 	e = e & ~m;
 
+	if (IS_ERR_VALUE(e) || (e < s))
+		return -ERANGE;
+
 	/* Take final values */
-	start = (void *)s;
-	size = (int)(e - s);
+	start = s;
+	size = e - s;
 
 	/* Grow the blocks, if needed */
 	r = assure_empty(info, 1);
@@ -358,7 +361,7 @@
 }
 
 /* Detatch given address range, splits free block if needed. */
-void *rh_detach_region(rh_info_t * info, void *start, int size)
+unsigned long rh_detach_region(rh_info_t * info, unsigned long start, int size)
 {
 	struct list_head *l;
 	rh_block_t *blk, *newblk;
@@ -366,10 +369,10 @@
 
 	/* Validate size */
 	if (size <= 0)
-		return ERR_PTR(-EINVAL);
+		return (unsigned long) -EINVAL;
 
 	/* The region must be aligned */
-	s = (unsigned long)start;
+	s = start;
 	e = s + size;
 	m = info->alignment - 1;
 
@@ -380,34 +383,34 @@
 	e = e & ~m;
 
 	if (assure_empty(info, 1) < 0)
-		return ERR_PTR(-ENOMEM);
+		return (unsigned long) -ENOMEM;
 
 	blk = NULL;
 	list_for_each(l, &info->free_list) {
 		blk = list_entry(l, rh_block_t, list);
 		/* The range must lie entirely inside one free block */
-		bs = (unsigned long)blk->start;
-		be = (unsigned long)blk->start + blk->size;
+		bs = blk->start;
+		be = blk->start + blk->size;
 		if (s >= bs && e <= be)
 			break;
 		blk = NULL;
 	}
 
 	if (blk == NULL)
-		return ERR_PTR(-ENOMEM);
+		return (unsigned long) -ENOMEM;
 
 	/* Perfect fit */
 	if (bs == s && be == e) {
 		/* Delete from free list, release slot */
 		list_del(&blk->list);
 		release_slot(info, blk);
-		return (void *)s;
+		return s;
 	}
 
 	/* blk still in free list, with updated start and/or size */
 	if (bs == s || be == e) {
 		if (bs == s)
-			blk->start = (int8_t *)blk->start + size;
+			blk->start += size;
 		blk->size -= size;
 
 	} else {
@@ -416,25 +419,29 @@
 
 		/* the back free fragment */
 		newblk = get_slot(info);
-		newblk->start = (void *)e;
+		newblk->start = e;
 		newblk->size = be - e;
 
 		list_add(&newblk->list, &blk->list);
 	}
 
-	return (void *)s;
+	return s;
 }
 
-void *rh_alloc_align(rh_info_t * info, int size, int alignment, const char *owner)
+/* Allocate a block of memory at the specified alignment.  The value returned
+ * is an offset into the buffer initialized by rh_init(), or a negative number
+ * if there is an error.
+ */
+unsigned long rh_alloc_align(rh_info_t * info, int size, int alignment, const char *owner)
 {
 	struct list_head *l;
 	rh_block_t *blk;
 	rh_block_t *newblk;
-	void *start;
+	unsigned long start;
 
-	/* Validate size, (must be power of two) */
+	/* Validate size, and alignment must be power of two */
 	if (size <= 0 || (alignment & (alignment - 1)) != 0)
-		return ERR_PTR(-EINVAL);
+		return (unsigned long) -EINVAL;
 
 	/* given alignment larger that default rheap alignment */
 	if (alignment > info->alignment)
@@ -444,7 +451,7 @@
 	size = (size + (info->alignment - 1)) & ~(info->alignment - 1);
 
 	if (assure_empty(info, 1) < 0)
-		return ERR_PTR(-ENOMEM);
+		return (unsigned long) -ENOMEM;
 
 	blk = NULL;
 	list_for_each(l, &info->free_list) {
@@ -455,7 +462,7 @@
 	}
 
 	if (blk == NULL)
-		return ERR_PTR(-ENOMEM);
+		return (unsigned long) -ENOMEM;
 
 	/* Just fits */
 	if (blk->size == size) {
@@ -475,7 +482,7 @@
 	newblk->owner = owner;
 
 	/* blk still in free list, with updated start, size */
-	blk->start = (int8_t *)blk->start + size;
+	blk->start += size;
 	blk->size -= size;
 
 	start = newblk->start;
@@ -486,19 +493,25 @@
 	/* this is no problem with the deallocator since */
 	/* we scan for pointers that lie in the blocks   */
 	if (alignment > info->alignment)
-		start = (void *)(((unsigned long)start + alignment - 1) &
-				~(alignment - 1));
+		start = (start + alignment - 1) & ~(alignment - 1);
 
 	return start;
 }
 
-void *rh_alloc(rh_info_t * info, int size, const char *owner)
+/* Allocate a block of memory at the default alignment.  The value returned is
+ * an offset into the buffer initialized by rh_init(), or a negative number if
+ * there is an error.
+ */
+unsigned long rh_alloc(rh_info_t * info, int size, const char *owner)
 {
 	return rh_alloc_align(info, size, info->alignment, owner);
 }
 
-/* allocate at precisely the given address */
-void *rh_alloc_fixed(rh_info_t * info, void *start, int size, const char *owner)
+/* Allocate a block of memory at the given offset, rounded up to the default
+ * alignment.  The value returned is an offset into the buffer initialized by
+ * rh_init(), or a negative number if there is an error.
+ */
+unsigned long rh_alloc_fixed(rh_info_t * info, unsigned long start, int size, const char *owner)
 {
 	struct list_head *l;
 	rh_block_t *blk, *newblk1, *newblk2;
@@ -506,10 +519,10 @@
 
 	/* Validate size */
 	if (size <= 0)
-		return ERR_PTR(-EINVAL);
+		return (unsigned long) -EINVAL;
 
 	/* The region must be aligned */
-	s = (unsigned long)start;
+	s = start;
 	e = s + size;
 	m = info->alignment - 1;
 
@@ -520,20 +533,20 @@
 	e = e & ~m;
 
 	if (assure_empty(info, 2) < 0)
-		return ERR_PTR(-ENOMEM);
+		return (unsigned long) -ENOMEM;
 
 	blk = NULL;
 	list_for_each(l, &info->free_list) {
 		blk = list_entry(l, rh_block_t, list);
 		/* The range must lie entirely inside one free block */
-		bs = (unsigned long)blk->start;
-		be = (unsigned long)blk->start + blk->size;
+		bs = blk->start;
+		be = blk->start + blk->size;
 		if (s >= bs && e <= be)
 			break;
 	}
 
 	if (blk == NULL)
-		return ERR_PTR(-ENOMEM);
+		return (unsigned long) -ENOMEM;
 
 	/* Perfect fit */
 	if (bs == s && be == e) {
@@ -551,7 +564,7 @@
 	/* blk still in free list, with updated start and/or size */
 	if (bs == s || be == e) {
 		if (bs == s)
-			blk->start = (int8_t *)blk->start + size;
+			blk->start += size;
 		blk->size -= size;
 
 	} else {
@@ -560,14 +573,14 @@
 
 		/* The back free fragment */
 		newblk2 = get_slot(info);
-		newblk2->start = (void *)e;
+		newblk2->start = e;
 		newblk2->size = be - e;
 
 		list_add(&newblk2->list, &blk->list);
 	}
 
 	newblk1 = get_slot(info);
-	newblk1->start = (void *)s;
+	newblk1->start = s;
 	newblk1->size = e - s;
 	newblk1->owner = owner;
 
@@ -577,7 +590,11 @@
 	return start;
 }
 
-int rh_free(rh_info_t * info, void *start)
+/* Deallocate the memory previously allocated by one of the rh_alloc functions.
+ * The return value is the size of the deallocated block, or a negative number
+ * if there is an error.
+ */
+int rh_free(rh_info_t * info, unsigned long start)
 {
 	rh_block_t *blk, *blk2;
 	struct list_head *l;
@@ -642,7 +659,7 @@
 	return nr;
 }
 
-int rh_set_owner(rh_info_t * info, void *start, const char *owner)
+int rh_set_owner(rh_info_t * info, unsigned long start, const char *owner)
 {
 	rh_block_t *blk, *blk2;
 	struct list_head *l;
@@ -684,8 +701,8 @@
 		nr = maxnr;
 	for (i = 0; i < nr; i++)
 		printk(KERN_INFO
-		       "    0x%p-0x%p (%u)\n",
-		       st[i].start, (int8_t *) st[i].start + st[i].size,
+		       "    0x%lx-0x%lx (%u)\n",
+		       st[i].start, st[i].start + st[i].size,
 		       st[i].size);
 	printk(KERN_INFO "\n");
 
@@ -695,8 +712,8 @@
 		nr = maxnr;
 	for (i = 0; i < nr; i++)
 		printk(KERN_INFO
-		       "    0x%p-0x%p (%u) %s\n",
-		       st[i].start, (int8_t *) st[i].start + st[i].size,
+		       "    0x%lx-0x%lx (%u) %s\n",
+		       st[i].start, st[i].start + st[i].size,
 		       st[i].size, st[i].owner != NULL ? st[i].owner : "");
 	printk(KERN_INFO "\n");
 }
@@ -704,6 +721,6 @@
 void rh_dump_blk(rh_info_t * info, rh_block_t * blk)
 {
 	printk(KERN_INFO
-	       "blk @0x%p: 0x%p-0x%p (%u)\n",
-	       blk, blk->start, (int8_t *) blk->start + blk->size, blk->size);
+	       "blk @0x%p: 0x%lx-0x%lx (%u)\n",
+	       blk, blk->start, blk->start + blk->size, blk->size);
 }