Main Page | Class Hierarchy | Compound List | File List | Compound Members | File Members

malloc.c

Go to the documentation of this file.
00001 /*--------------------------------------------------------------------
00002  *
00003  * (C) Copyright Koninklijke Philips Electronics NV 2006. 
00004  * All rights reserved. This software is licensed under the terms of
00005  * version 2.1 of the GNU Lesser General Public License as published 
00006  * by the Free Software Foundation. For licensing and warranty
00007  * information, see the file COPYING in the main directory.
00008  *
00009  *------------------------------------------------------------------*/
00010 
00011  *  If the size of the space requested is zero, return NULL.
00012  *
00013  * Uses the boundary-tag allocation method (first fit) with roving pointer.
00014  * Calls sbrk() as necessary to grow the memory arena by grabbing from the heap,
00015  * but does not assume that successive sbrk() calls return contiguous memory.
00016  *
00017  * Each block (p) in the arena is marked with a size word (p->size)
00018  * with its low bit (MARKBIT) set if the block is in use.
00019  * The size|mark word is replicated in the losize word of the successor block
00020  * to facilitate joining adjacent freed blocks and thus avoid fragmentation.
00021  * The block can hold a malloc'ed block of up to p->size - OVERHEAD bytes.
00022  * The free list is maintained as a doubly linked list:
00023  * each free block contains pointers to previous free and next free block,
00024  * to facilitate simple addition and deletion of free blocks.
00025  * The bottom losize word and top size word of each allocated arena contain
00026  * a 0 | MARKBIT word to indicate a used block of size 0.
00027  *
00028  * Reference:
00029  *  Harry R. Lewis and Larry Denenberg, "Data Structures and
00030  *  Their Algorithms," Harper Collins, 1991, Chapter 10, pp. 361ff.
00031  */
00032 
00033 #include "trt.h"
00034 
00035 #if TRT_MIPSTM
00036 
00037 #define EXCL_START()    ien = acquire_lock(trt_heap.lock, 1)
00038 #define EXCL_END()    release_lock(trt_heap.lock, ien)
00039 #define EXCL_END_NO_IEN() release_lock_no_ien(trt_heap.lock)
00040 
00041 typedef unsigned int uint;
00042 
00043 #define offsetof(s, m)  (size_t)(&(((s *)0)->m))
00044 
00045 /* Memory block structure. */
00046 typedef struct  block {
00047   uint    losize;   /* size | MARKBIT of predecessor */
00048   uint    size;   /* size | MARKBIT   */
00049   struct  block *prev;    /* previous free block (if free) */
00050   struct  block *next;    /* next free block (if free)  */
00051 } BLOCK;
00052 
00053 /* Manifest constants. */
00054 #define MARKBIT   1   /* used mark for size field */
00055 #define MINBLOCK  (sizeof(BLOCK)) /* minimum block size   */
00056 #define OVERHEAD  (offsetof(BLOCK, prev)) /* block overhead (if used) */
00057 
00058 /*
00059  * Tuneable manifest constants.
00060  * N.B. grow_free() forces each allocated BLOCK *p to be ALIGNMENT-aligned,
00061  * but then the code below currently implicitly assumes the resulting
00062  * malloc pointer memory(p) to be ALIGNMENT-aligned too.
00063  * Changing ALIGNMENT here to a larger value here will ->not<- guarantee
00064  * ALIGNMENT-aligned malloc values unless the code is modified.
00065  */
00066 #define ALIGNMENT (sizeof(uint))    /* result alignment (2^n) */
00067 #define SBRK_ROUNDUP  2048      /* sbrk() arg roundup (2^n) */
00068 
00069 /*
00070  * Macros:
00071  *  add(p)    add p to doubly linked freelist
00072  *  blockp(vp)  convert void * malloc pointer to BLOCK *
00073  *  bump(p, n)  bump pointer p by n bytes; result is BLOCK *
00074  *  clearmarks(p) clear the used marks of p
00075  *  delete(p) delete p from doubly linked list
00076  *  is_free(p)  iff p is free
00077  *  is_predfree(p)  iff predecessor of p is free
00078  *  memory(p) convert BLOCK * to void * malloc pointer
00079  *  needed(n) min allocation block size for n byte malloc
00080  *  pred(p)   predecessor of p
00081  *  pred_size(p)  size of predecessor of p
00082  *  roundup(n, m) round n up to size m; assumes m is power of 2
00083  *  setmarks(p) set used marks of p
00084  *  setsizes(p, n)  set size of p and losize of successor of p to n
00085  *  succ(p)   successor of p
00086  */
00087 #define add(p)    (p)->prev = trt_heap.freelist;    \
00088       (p)->next = trt_heap.freelist->next;  \
00089       trt_heap.freelist->next = trt_heap.freelist->next->prev = (p);
00090 #define blockp(vp)  bump(vp, -OVERHEAD)
00091 #define bump(p, n)  ((BLOCK *)(((char *)(p)) + (n)))
00092 #define clearmarks(p) (p)->size &= ~MARKBIT; succ(p)->losize &= ~MARKBIT
00093 #define delete(p) (p)->prev->next = (p)->next;  \
00094       (p)->next->prev = (p)->prev;
00095 #define is_free(p)  (((p)->size & MARKBIT) == 0)
00096 #define is_predfree(p)  (((p)->losize & MARKBIT) == 0)
00097 #define memory(p) ((void *)bump(p, OVERHEAD))
00098 #define needed(n) (((n) + OVERHEAD < MINBLOCK) ? MINBLOCK : (n)+OVERHEAD)
00099 #define pred(p)   bump((p), -pred_size(p))
00100 #define pred_size(p)  ((p)->losize & ~MARKBIT)
00101 #define roundup(n, m) (((n) + (m) - 1) & ~((m) - 1))
00102 #define setmarks(p) (succ(p)->losize = (p)->size = (p)->size | MARKBIT)
00103 #define setsizes(p, n)  (p)->size = (n); succ(p)->losize = (p)->size
00104 #define succ(p)   bump((p), ((p)->size & ~MARKBIT))
00105 
00106 /* Static locals. */
00107 static  BLOCK freelist0 = { 0, 0, &freelist0, &freelist0 };
00108 
00109 struct heap
00110 {
00111   volatile int init;  /* initialized?? */
00112   volatile int lock;  
00113   BLOCK *freelist;
00114   BLOCK *rover;
00115   void *base;
00116 }; 
00117 
00118 extern struct heap trt_heap;
00119 /* note: initially locked. mem_init() unlocks it. */
00120 
00121 /* Global functions. */
00122 
00123 /*
00124  * Add a non-malloc'ed piece of memory to the free list.
00125  * Does not check for overlap with the existing arena, caveat utilitor!
00126  * If we kept a list of the blocks added by this routine,
00127  * we could modify _alloc_check() to check everything correctly.
00128  * Instead, _alloc_check() currently only checks the arena grown by
00129  * grow_free(), and its checks are relaxed once _add_free() is called.
00130  */
00131 void add_free(void *ptr, size_t size)
00132 {
00133   BLOCK *p;
00134 
00135   /* Force the memory to be ALIGNMENT-aligned. */
00136   while (size > 0 && (((uint)ptr) & (ALIGNMENT - 1)) != 0) {
00137     ptr = (void *)((uint)ptr + 1);
00138     --size;
00139   }
00140 
00141   /* Make sure the block is large enough to matter. */
00142   if (size < MINBLOCK + OVERHEAD)
00143     return;
00144 
00145   /* Mark the block appropriately and add it to the free list. */
00146   p = (BLOCK *)ptr;
00147   size -= OVERHEAD;   /* for empty marker at end */
00148   p->losize = 0 | MARKBIT;  /* predecessor size 0 */
00149   setsizes(p, size);    /* size of added block */
00150   succ(p)->size = 0 | MARKBIT;  /* successor size 0 */
00151   add(p);       /* add to free list */
00152 }
00153 
00154 /* Allocate and clear memory. */
00155 void *calloc(size_t nmemb, size_t size)
00156 {
00157   void *malloc(size_t size);
00158   void *s;
00159 
00160   size *= nmemb;
00161   if ((s = malloc(size)) == NULL)
00162     return NULL;
00163   memset(s, 0, size);
00164   return s;
00165 }
00166 
00167 /*
00168  * Free allocated memory.
00169  * Avoid fragmentation of the memory arena by joining all adjacent freed blocks.
00170  */
00171 void free(void *ptr)
00172 {
00173   BLOCK *p, *q;
00174   size_t  n;
00175   int ien;
00176 
00177   if (ptr == NULL)
00178     return;     /* ANSI says so */
00179 
00180   /* Make sure the block is currently allocated. */
00181   p = blockp(ptr);
00182   if (is_free(p)) {   /* block is not allocated */
00183     return;     /* ignore */
00184   }
00185   EXCL_START();
00186   clearmarks(p);      /* mark this block as free */
00187 
00188   /* Coalese with predecessor block if free. */
00189   if (is_predfree(p)) {
00190     /* Coalese this block with free predecessor block. */
00191     n = p->size;
00192     p = pred(p);
00193     setsizes(p, p->size + n);
00194     delete(p);    /* delete p from the free list */
00195           /* (it gets put back again below...) */
00196   }
00197 
00198   /* Coalese with successor block if free. */
00199   q = succ(p);
00200   if (is_free(q)) {
00201     /* Coalese p with successor free block q. */
00202     setsizes(p, p->size + q->size);
00203     if (trt_heap.rover == q)
00204       trt_heap.rover = p;
00205     delete(q);    /* delete q from the free list */
00206   }
00207   add(p);       /* add p to freelist */
00208   EXCL_END();
00209 }
00210 
00211 /* Allocate memory. */
00212 void *malloc(size_t size)
00213 {
00214   BLOCK *p, *q;
00215   size_t  m, n;
00216   int ien;
00217 
00218   if (size == (size_t)0)
00219     return NULL;    /* implementation-defined behavior */
00220 
00221   /* Search the freelist for a big enough block. */
00222   EXCL_START();
00223   n = roundup(needed(size), ALIGNMENT); /* needed memory block size */
00224   p = trt_heap.rover;
00225   do {
00226     if (p->size >= n)
00227       break;    /* block p is large enough */
00228     p = p->next;
00229   } while (p != trt_heap.rover);
00230 
00231   if (p->size < n) {
00232     EXCL_END();
00233     return NULL;    /* allocation failed */
00234   }
00235 
00236   /* Found or allocated a suitable free block p. */
00237   trt_heap.rover = p->next;
00238   m = p->size;
00239   if (m - n >= MINBLOCK) {
00240     /*
00241      * Split block p into free and used parts.
00242      * Rather than leaving the bottom block free
00243      * and just leaving it on the free list,
00244      * this leaves the top part free.  This requires an
00245      * extra add() and delete(), but leaving the top part free
00246      * helps avoid fragmentation when grow_arena() adds memory,
00247      * because the top of the previous arena is often free.
00248      */
00249     setsizes(p, n);
00250     q = succ(p);
00251     setsizes(q, m - n);
00252     add(q);
00253   }
00254   delete(p);    /* delete p from free list */
00255   setmarks(p);    /* mark p as used */
00256   EXCL_END();
00257 
00258   return (void *)memory(p);
00259 }
00260 
00261 /* Reallocate memory. */
00262 void *realloc(void *ptr, size_t size)
00263 {
00264   BLOCK *p;
00265   size_t  old, new;
00266   void  *vp;
00267   int ien;
00268 
00269   if (ptr == NULL)
00270     return malloc(size);  /* ANSI says so */
00271   if (size == (size_t)0) {
00272     free(ptr);    /* ANSI says so */
00273     return NULL;    /* implementation-defined */
00274   }
00275 
00276   /* Make sure the block is currently allocated. */
00277   p = blockp(ptr);
00278   if (is_free(p)) {   /* block is not allocated */
00279     return NULL;    /* ignore */
00280   }
00281 
00282   EXCL_START();
00283   old = (p->size & ~MARKBIT);
00284   new = roundup(needed(size), ALIGNMENT);
00285   if (new <= old) {
00286     /* Shrink the block: either return unchanged or split it. */
00287     if (old - new < MINBLOCK) {
00288       EXCL_END();
00289       return ptr; /* return block unchanged */
00290     }
00291 
00292     /* Shrink the block by splitting it. */
00293     setsizes(p, new);
00294     setmarks(p);
00295     p = succ(p);
00296     setsizes(p, old - new);
00297     add(p);
00298     EXCL_END();
00299     return ptr;
00300   } else {
00301     /* Grow the block. */
00302     /* N.B. Could check if enough space in start of next block... */
00303     EXCL_END();
00304     if ((vp = malloc(size)) == NULL)
00305       return NULL;
00306     memcpy(vp, ptr, old - OVERHEAD);
00307     free(ptr);
00308     return vp;
00309   }
00310 }
00311 
00312 
00313 void *dsm_ddr_start();
00314 unsigned int dsm_ddr_size();
00315 
00316 void mem_init()
00317 {
00318 #ifdef HEAP_IN_STATIC_ARRAY
00319   static int heap[2 * 1024 * 1024];
00320 
00321   if(atomic_add(trt_heap.init, 1) == 0)
00322         {
00323                 trt_heap.freelist = &freelist0;
00324                 trt_heap.rover = &freelist0;
00325 
00326     add_free(heap, sizeof(heap));
00327 
00328                 serial_out("mem_init: heap = %dMB\n", sizeof(heap) >> 20);
00329 
00330                 EXCL_END_NO_IEN();
00331         }
00332 #else
00333   /* heap in memory between end of executable and end of memory */
00334 
00335   unsigned int heap_size;
00336   char *ddr_end, *heap_base;
00337 
00338   if(atomic_add(trt_heap.init, 1) == 0)
00339   {
00340     trt_heap.freelist = &freelist0;
00341     trt_heap.rover = &freelist0;
00342 
00343     ddr_end = (char *) dsm_ddr_start() + dsm_ddr_size();
00344 
00345     heap_base = trt_heap.base;
00346     heap_size = ddr_end - heap_base;
00347 
00348     add_free(heap_base, heap_size);
00349 
00350     serial_out("mem_init: heap = %dMB\n", heap_size >> 20);
00351 
00352     EXCL_END_NO_IEN();
00353   }
00354 #endif
00355 }
00356 
00357 void mem_update_heap_base(void *new_base)
00358 {
00359   new_base = (char *) new_base + TRT_BLOCK_SIZE;
00360 
00361   if(trt_heap.base < new_base)
00362     trt_heap.base = new_base;
00363 }
00364 
00365 #endif

Generated on Wed Feb 15 14:52:39 2006 for yapi by doxygen 1.3.2