dma_alloc_coherent: clean it up
authorAndrew Morton <akpm@linux-foundation.org>
Tue, 6 Jan 2009 22:43:09 +0000 (14:43 -0800)
committerLinus Torvalds <torvalds@linux-foundation.org>
Tue, 6 Jan 2009 23:59:31 +0000 (15:59 -0800)
This thing was rather stupidly coded.  Rework it all prior to making
changes.

Also, rename local variable `page': kernel readers expect something called
`page' to have type `struct page *'.

Cc: Guennadi Liakhovetski <lg@denx.de>
Cc: Johannes Weiner <hannes@cmpxchg.org>
Cc: Pekka Enberg <penberg@cs.helsinki.fi>
Cc: Dmitry Baryshkov <dbaryshkov@gmail.com>
Cc: Jesse Barnes <jbarnes@virtuousgeek.org>
Cc: Tetsuo Handa <penguin-kernel@i-love.sakura.ne.jp>
Signed-off-by: Andrew Morton <akpm@linux-foundation.org>
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
kernel/dma-coherent.c

index 4bdcea8..8056d08 100644 (file)
@@ -109,34 +109,38 @@ EXPORT_SYMBOL(dma_mark_declared_memory_occupied);
 int dma_alloc_from_coherent(struct device *dev, ssize_t size,
                                       dma_addr_t *dma_handle, void **ret)
 {
-       struct dma_coherent_mem *mem = dev ? dev->dma_mem : NULL;
+       struct dma_coherent_mem *mem;
        int order = get_order(size);
+       int pageno;
 
-       if (mem) {
-               int page = bitmap_find_free_region(mem->bitmap, mem->size,
-                                                    order);
-               if (page >= 0) {
-                       /*
-                        * Memory was found in the per-device arena.
-                        */
-                       *dma_handle = mem->device_base + (page << PAGE_SHIFT);
-                       *ret = mem->virt_base + (page << PAGE_SHIFT);
-                       memset(*ret, 0, size);
-               } else if (mem->flags & DMA_MEMORY_EXCLUSIVE) {
-                       /*
-                        * The per-device arena is exhausted and we are not
-                        * permitted to fall back to generic memory.
-                        */
-                       *ret = NULL;
-               } else {
-                       /*
-                        * The per-device arena is exhausted and we are
-                        * permitted to fall back to generic memory.
-                        */
-                        return 0;
-               }
+       if (!dev)
+               return 0;
+       mem = dev->dma_mem;
+       if (!mem)
+               return 0;
+
+       pageno = bitmap_find_free_region(mem->bitmap, mem->size, order);
+       if (pageno >= 0) {
+               /*
+                * Memory was found in the per-device arena.
+                */
+               *dma_handle = mem->device_base + (pageno << PAGE_SHIFT);
+               *ret = mem->virt_base + (pageno << PAGE_SHIFT);
+               memset(*ret, 0, size);
+       } else if (mem->flags & DMA_MEMORY_EXCLUSIVE) {
+               /*
+                * The per-device arena is exhausted and we are not
+                * permitted to fall back to generic memory.
+                */
+               *ret = NULL;
+       } else {
+               /*
+                * The per-device arena is exhausted and we are
+                * permitted to fall back to generic memory.
+                */
+                return 0;
        }
-       return (mem != NULL);
+       return 1;
 }
 EXPORT_SYMBOL(dma_alloc_from_coherent);