summaryrefslogtreecommitdiffstats
path: root/libc/stdlib/malloc/heap.h
blob: 30380791f26170d3bc6f2acfa823183b7c0172b0 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
/*
 * libc/stdlib/malloc/heap.h -- heap allocator used for malloc
 *
 *  Copyright (C) 2002,03  NEC Electronics Corporation
 *  Copyright (C) 2002,03  Miles Bader <miles@gnu.org>
 *
 * This file is subject to the terms and conditions of the GNU Lesser
 * General Public License.  See the file COPYING.LIB in the main
 * directory of this archive for more details.
 *
 * Written by Miles Bader <miles@gnu.org>
 */

#include <features.h>


/* On multi-threaded systems, the heap includes a lock.  */
#ifdef __UCLIBC_HAS_THREADS__
# include <bits/uClibc_mutex.h>
# define HEAP_USE_LOCKING
# define __heap_lock(heap_lock) __UCLIBC_MUTEX_LOCK_CANCEL_UNSAFE(*(heap_lock))
# define __heap_unlock(heap_lock) __UCLIBC_MUTEX_UNLOCK_CANCEL_UNSAFE(*(heap_lock))
#else
# define __heap_lock(heap_lock)
# define __heap_unlock(heap_lock)
#endif


/* The heap allocates in multiples of, and aligned to, HEAP_GRANULARITY.
   HEAP_GRANULARITY must be a power of 2.  Malloc depends on this being the
   same as MALLOC_ALIGNMENT.  */
#define HEAP_GRANULARITY_TYPE	double __attribute_aligned__ (sizeof (size_t))
#define HEAP_GRANULARITY	(__alignof__ (HEAP_GRANULARITY_TYPE))


/* The HEAP_INIT macro can be used as a static initializer for a heap
   variable.  The HEAP_INIT_WITH_FA variant is used to initialize a heap
   with an initial static free-area; its argument FA should be declared
   using HEAP_DECLARE_STATIC_FREE_AREA.  */
# define HEAP_INIT 		0
# define HEAP_INIT_WITH_FA(fa)	&fa._fa

/* A free-list area `header'.  These are actually stored at the _ends_ of
   free areas (to make allocating from the beginning of the area simpler),
   so one might call it a `footer'.  */
struct heap_free_area
{
	size_t size;
	struct heap_free_area *next, *prev;
};

/* Return the address of the end of the frea area FA.  */
#define HEAP_FREE_AREA_END(fa) ((void *)(fa + 1))
/* Return the address of the beginning of the frea area FA.  FA is
   evaulated multiple times.  */
#define HEAP_FREE_AREA_START(fa) ((void *)((char *)(fa + 1) - (fa)->size))
/* Return the size of the frea area FA.  */
#define HEAP_FREE_AREA_SIZE(fa) ((fa)->size)

/* This rather clumsy macro allows one to declare a static free-area for
   passing to HEAP_INIT_WITH_FA initializer macro.  This is only use for
   which NAME is allowed.  */
#define HEAP_DECLARE_STATIC_FREE_AREA(name, size)			      \
  static struct								      \
  {									      \
    HEAP_GRANULARITY_TYPE aligned_space;				      \
    char space[HEAP_ADJUST_SIZE(size)					      \
	       - sizeof (struct heap_free_area)				      \
	       - HEAP_GRANULARITY];					      \
    struct heap_free_area _fa;						      \
  } name = { (HEAP_GRANULARITY_TYPE)0, "", { HEAP_ADJUST_SIZE(size), 0, 0 } }


/* Rounds SZ up to be a multiple of HEAP_GRANULARITY.  */
#define HEAP_ADJUST_SIZE(sz)  \
   (((sz) + HEAP_GRANULARITY - 1) & ~(HEAP_GRANULARITY - 1))


/* The minimum allocatable size.  */
#define HEAP_MIN_SIZE	HEAP_ADJUST_SIZE (sizeof (struct heap_free_area))

/* The minimum size of a free area; if allocating memory from a free-area
   would make the free-area smaller than this, the allocation is simply
   given the whole free-area instead.  It must include at least enough room
   to hold a struct heap_free_area, plus some extra to avoid excessive heap
   fragmentation (thus increasing speed).  This is only a heuristic -- it's
   possible for smaller free-areas than this to exist (say, by realloc
   returning the tail-end of a previous allocation), but __heap_alloc will
   try to get rid of them when possible.  */
#define HEAP_MIN_FREE_AREA_SIZE  \
  HEAP_ADJUST_SIZE (sizeof (struct heap_free_area) + 32)


/* branch-prediction macros; they may already be defined by libc.  */
#ifndef likely
#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 96)
#define likely(cond)	__builtin_expect(!!(int)(cond), 1)
#define unlikely(cond)	__builtin_expect((int)(cond), 0)
#else
#define likely(cond)	(cond)
#define unlikely(cond)	(cond)
#endif
#endif /* !likely */


/* Define HEAP_DEBUGGING to cause the heap routines to emit debugging info
   to stderr when the variable __heap_debug is set to true.  */
#ifdef HEAP_DEBUGGING
extern int __heap_debug;
#define HEAP_DEBUG(heap, str) (__heap_debug ? __heap_dump (heap, str) : 0)
#else
#define HEAP_DEBUG(heap, str) (void)0
#endif

/* Output a text representation of HEAP to stderr, labelling it with STR.  */
extern void __heap_dump (struct heap_free_area *heap, const char *str);

/* Do some consistency checks on HEAP.  If they fail, output an error
   message to stderr, and exit.  STR is printed with the failure message.  */
extern void __heap_check (struct heap_free_area *heap, const char *str);


/* Delete the free-area FA from HEAP.  */
static __inline__ void
__heap_delete (struct heap_free_area **heap, struct heap_free_area *fa)
{
  if (fa->next)
    fa->next->prev = fa->prev;
  if (fa->prev)
    fa->prev->next = fa->next;
  else
    *heap = fa->next;
}


/* Link the free-area FA between the existing free-area's PREV and NEXT in
   HEAP.  PREV and NEXT may be 0; if PREV is 0, FA is installed as the
   first free-area.  */
static __inline__ void
__heap_link_free_area (struct heap_free_area **heap, struct heap_free_area *fa,
		       struct heap_free_area *prev,
		       struct heap_free_area *next)
{
  fa->next = next;
  fa->prev = prev;

  if (prev)
    prev->next = fa;
  else
    *heap = fa;
  if (next)
    next->prev = fa;
}

/* Update the mutual links between the free-areas PREV and FA in HEAP.
   PREV may be 0, in which case FA is installed as the first free-area (but
   FA may not be 0).  */
static __inline__ void
__heap_link_free_area_after (struct heap_free_area **heap,
			     struct heap_free_area *fa,
			     struct heap_free_area *prev)
{
  if (prev)
    prev->next = fa;
  else
    *heap = fa;
  fa->prev = prev;
}

/* Add a new free-area MEM, of length SIZE, in between the existing
   free-area's PREV and NEXT in HEAP, and return a pointer to its header.
   PREV and NEXT may be 0; if PREV is 0, MEM is installed as the first
   free-area.  */
static __inline__ struct heap_free_area *
__heap_add_free_area (struct heap_free_area **heap, void *mem, size_t size,
		      struct heap_free_area *prev,
		      struct heap_free_area *next)
{
  struct heap_free_area *fa = (struct heap_free_area *)
    ((char *)mem + size - sizeof (struct heap_free_area));

  fa->size = size;

  __heap_link_free_area (heap, fa, prev, next);

  return fa;
}


/* Allocate SIZE bytes from the front of the free-area FA in HEAP, and
   return the amount actually allocated (which may be more than SIZE).  */
static __inline__ size_t
__heap_free_area_alloc (struct heap_free_area **heap,
			struct heap_free_area *fa, size_t size)
{
  size_t fa_size = fa->size;

  if (fa_size < size + HEAP_MIN_FREE_AREA_SIZE)
    /* There's not enough room left over in FA after allocating the block, so
       just use the whole thing, removing it from the list of free areas.  */
    {
      __heap_delete (heap, fa);
      /* Remember that we've alloced the whole area.  */
      size = fa_size;
    }
  else
    /* Reduce size of FA to account for this allocation.  */
    fa->size = fa_size - size;

  return size;
}


/* Allocate and return a block at least *SIZE bytes long from HEAP.
   *SIZE is adjusted to reflect the actual amount allocated (which may be
   greater than requested).  */
extern void *__heap_alloc (struct heap_free_area **heap, size_t *size);

/* Allocate SIZE bytes at address MEM in HEAP.  Return the actual size
   allocated, or 0 if we failed.  */
extern size_t __heap_alloc_at (struct heap_free_area **heap, void *mem, size_t size);

/* Return the memory area MEM of size SIZE to HEAP.
   Returns the heap free area into which the memory was placed.  */
extern struct heap_free_area *__heap_free (struct heap_free_area **heap,
					   void *mem, size_t size);

/* Return true if HEAP contains absolutely no memory.  */
#define __heap_is_empty(heap) (! (heap))