File: tjheap.cpp

package info (click to toggle)
odin 2.0.5-8
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid
  • size: 9,196 kB
  • sloc: cpp: 62,638; sh: 4,541; makefile: 779
file content (279 lines) | stat: -rw-r--r-- 7,265 bytes parent folder | download | duplicates (8)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
#include "tjheap.h"
#include "tjstring.h" // for ftos


#ifdef CUSTOM_HEAP


#define MYHEAP_SIZE CUSTOM_HEAP_SIZE*0x100000


void custom_heap_error(const char* txt) {
  if(Heap::tracefunc) Heap::tracefunc(txt);
  else fprintf(stderr,"%s\n",txt);
  abort();
}


//#define USE_TRIVIAL_ALGORITM


#ifdef USE_TRIVIAL_ALGORITM
//////////////////////////////////// START of Trivial Algoritm /////////////////////////////

#define TRIVIAL_HEAP_SIZE 100*MYHEAP_SIZE // Use large heap since it is never freed

static char myheap[TRIVIAL_HEAP_SIZE];
static unsigned int heapcount=0;


void* trivial_alloc(size_t size) {
  if((heapcount+size)>=TRIVIAL_HEAP_SIZE) {
    custom_heap_error("Out of memory");
  }
  void* result=myheap+heapcount;
  heapcount+=size;
  return result;
}


//#ifndef STL_REPLACEMENT
//void* operator new(size_t size) throw (std::bad_alloc) {return trivial_alloc(size);}
//#else
void* operator new(size_t size) {return trivial_alloc(size);}
//#endif

//void* operator new(size_t size) {return qf_malloc(size);}
void* operator new[](size_t size) {return trivial_alloc(size);}

void operator delete(void* mptr) {}
void operator delete[](void* mptr) {}


//////////////////////////////////// END of Trivial Algoritm /////////////////////////////

#else // USE_TRIVIAL_ALGORITM

/////////////////////////////////// START of List Allocator Algoritm /////////////////////

#define ALIGNMENT 8
static char la_heap[MYHEAP_SIZE];

static bool la_init_done=false;

inline size_t la_downalign(size_t ptr) {
  return (ptr/ALIGNMENT)*ALIGNMENT;
}

inline size_t la_upalign(size_t ptr) {
  if(ptr%ALIGNMENT) return (ptr/ALIGNMENT+1)*ALIGNMENT;
  return (ptr/ALIGNMENT)*ALIGNMENT;
}


struct la_cell {

  inline void set_used()   {next=(la_cell*)((size_t)next | 1);} // Set least significant bit
  inline void set_unused() {next=(la_cell*)((size_t)next & ~1);} // Clear least significant bit

  inline bool is_used() const {
    return (size_t)next & 1; // Read least significant bit
  }

  inline size_t size() const {
    if(next<this) return 0; // last cell
    return (size_t)next-(size_t)this-sizeof(la_cell);
  }

  inline void* memptr() const {
    return (void*)((size_t)this+sizeof(la_cell));
  }

  inline la_cell* aligned_prev() {return prev;} // Already aligned
  inline la_cell* aligned_next() {return (la_cell*)la_downalign((size_t)next);}

  la_cell* prev;
  la_cell* next;
};

la_cell* la_rovptr=0;
la_cell* la_begin=0;
la_cell* la_end=0;


/*
void check_integrity(const char* caller, la_cell* cell) {
  la_cell* next=cell->aligned_next();
  la_cell* prev=cell->aligned_prev();

  // Exclude from check
  if(next==la_begin || next==la_end) return;
  if(prev==la_begin || prev==la_end) return;
  if(cell==la_begin || cell==la_end) return;

  if(prev>=cell) {fprintf(stderr,"%s: prev>=cell\n",caller); custom_heap_error("check_integrity failed");}
  if(cell>=next) {fprintf(stderr,"%s: cell>=next\n",caller); custom_heap_error("check_integrity failed");}
}

void la_dump() {
  la_cell* iter=la_begin;
  while(1) {
    fprintf(stdout,"size(%p)=%i\t\t prev=%p\t\t next=%p\t\t used=%i\n",iter,iter->size(),iter->aligned_prev(),iter->aligned_next(),iter->is_used());
    iter=iter->aligned_next();
    if(iter==la_begin) {
      break;
    }
  }
}

*/


inline void la_init() {

  // some checks for implicit assumptions
  if(la_upalign(sizeof(la_cell))!=sizeof(la_cell)) custom_heap_error("la_cell does not align");
  if(sizeof(la_cell)!=(2*sizeof(la_cell*))) custom_heap_error("sizeof(la_cell) exceeds");

  // Place begin cell
  la_begin=(la_cell*)la_upalign((size_t)la_heap);

  // Place end cell
  char* offset=la_heap+MYHEAP_SIZE-2*(ALIGNMENT+sizeof(la_cell)); // leave enough space for un-aligned memory and both cells
  la_end=(la_cell*)la_downalign((size_t)offset);

  // Connect both cells cyclically
  la_begin->prev=la_begin->next=la_end;
  la_end->prev=la_end->next=la_begin;

  la_begin->set_unused(); // This will hold all of the free memory upon startup
  la_end->set_used(); // So last cell will never be modified

  la_rovptr=la_begin; // set to begin of list

  la_init_done=true;
}


inline void* list_alloc(size_t size) {
  if(!la_init_done) la_init();

  size=la_upalign(size);

  // Iterate to next free cell
  la_cell* iter=la_rovptr;
  while(iter->is_used() || iter->size()<size) {
    iter=iter->aligned_next();
    if(iter==la_rovptr) {
//      la_dump();
      custom_heap_error("Out of memory");
    }
  }

  // To split a cell is only useful if it can accomodate
  // some data (i.e. ALIGNMENT) plus one extra cell header
  bool split_cell = ( iter->size() >= (size+ALIGNMENT+sizeof(la_cell)) );

  if(split_cell) {

    la_cell* next=iter->aligned_next();

    // Create new header to hold remainder of cell
    la_cell* remainder= (la_cell*)( (size_t)iter + sizeof(la_cell) + size );

    // Connect cells
    remainder->next=next;
    remainder->prev=iter;
    next->prev=remainder;
    iter->next=remainder;

    remainder->set_unused();
    iter->set_used();

    la_rovptr=remainder; // Set to the (free) remainder

    return iter->memptr();

  } else { // Use entire cell

    iter->set_used();
    la_rovptr=iter->aligned_next(); // Set to the cell after the exact match
    return iter->memptr();
  }

}



inline void delete_cell(la_cell* cell) {
//  check_integrity("delete_cell:cell",cell);

  la_cell* next=cell->aligned_next();
  la_cell* prev=cell->aligned_prev();

  // Connect ends
  prev->next=next;
  next->prev=prev;

  // Alternative: Just jump to freed space if sufficiently large
  // However, this seems to have a somewhat arbitray maximum in
  // performance depending on the threshold, so we will not use it
//  if(prev->size()>16*ALIGNMENT) la_rovptr=prev;
//  else la_rovptr=next;

  la_rovptr=prev; // Set roving pointer to free space. (Found out empirically that this gives a great performance boost)
}


inline void list_free(void *ptr) {

  // Convert pointer to header
  la_cell* freecell = (la_cell*) ( (size_t)ptr - sizeof(la_cell) );

  // Try to join with next
  la_cell* nextcell=freecell->aligned_next();
  if(!nextcell->is_used()) delete_cell(nextcell);

  // Try to join with previous
  if(!freecell->aligned_prev()->is_used()) delete_cell(freecell);
  else freecell->set_unused(); // Otherwise, just reset used bit
}



//#ifndef STL_REPLACEMENT
//void* operator new(size_t size) throw (std::bad_alloc) {return list_alloc(size);}
//#else
void* operator new(size_t size) {return list_alloc(size);}
//#endif

//void* operator new(size_t size) {return qf_malloc(size);}
void* operator new[](size_t size) {return list_alloc(size);}

void operator delete(void* mptr) {list_free(mptr);}
void operator delete[](void* mptr) {list_free(mptr);}


/////////////////////////////////// END of List Allocator Algoritm /////////////////////
#endif // USE_TRIVIAL_ALGORITM

#endif // CUSTOM_HEAP



heaptracefunction Heap::tracefunc=0;


void Heap::malloc_stats() {
#ifdef CUSTOM_HEAP

#ifndef USE_TRIVIAL_ALGORITM
  if(la_end) {
    float used_mb=float(MYHEAP_SIZE-la_end->aligned_prev()->size())/float(0x100000); // Estimate by size of last cell
    if(Heap::tracefunc) Heap::tracefunc(("Memory used: "+ftos(used_mb)+"/"+ftos(CUSTOM_HEAP_SIZE)+" MB").c_str());
  }
#endif


#endif // CUSTOM_HEAP
}