52 m_area->chunk_size = size;
53 m_area->alloc_chunks = 0;
54 m_area->dirty_chunks = 0;
55 m_area->next_chunk = 0;
56 m_area->num_chunks = num_chunks;
57 m_area->state_changed = 0;
58 m_area->last_access = -1;
59 m_area->use_bitmap = NULL;
60 m_area->dirty_bitmap = NULL;
61 m_area->self_pointer = NULL;
71 SET_AREA_LOCK_BIT(m_area);
88 state->num_areas = NUM_AREAS;
89 state->max_num_areas = MAX_NUM_AREAS;
90 state->timestamp = -1;
94 if (
unlikely(state->areas == NULL)) {
95 rootsim_error(
true,
"Unable to allocate memory.\n");
98 chunk_size = MIN_CHUNK_SIZE;
99 num_chunks = MIN_NUM_CHUNKS;
101 for (i = 0; i < NUM_AREAS; i++) {
104 state->areas[i].idx = i;
105 chunk_size = chunk_size << 1;
115 for (i = 0; i < NUM_AREAS; i++) {
116 free_buddy_memory(mm->buddy, mm->segment->base, mm->m_state->areas[i].self_pointer);
134 size +=
sizeof(
long long);
137 size_new = POWEROF2(size);
138 if (
unlikely(size_new < MIN_CHUNK_SIZE))
139 size_new = MIN_CHUNK_SIZE;
156 m_area->next_chunk++;
158 while (m_area->next_chunk < m_area->num_chunks) {
159 if (!
bitmap_check(m_area->use_bitmap, m_area->next_chunk))
162 m_area->next_chunk++;
171 size_t area_size, bitmap_size;
176 rootsim_error(
false,
"Requested a 0 sized malloc\n");
183 if (
unlikely(size > MAX_CHUNK_SIZE)) {
184 rootsim_error(
false,
"Requested a memory allocation of %d but the limit is %d. Reconfigure MAX_CHUNK_SIZE. Returning NULL.\n",
185 size, MAX_CHUNK_SIZE);
189 m_state = lp->
mm->m_state;
190 m_area = &m_state->areas[B_CTZ(size) - B_CTZ(MIN_CHUNK_SIZE)];
197 while (m_area != NULL && m_area->alloc_chunks == m_area->num_chunks) {
199 if (m_area->next == -1) {
202 m_area = &(m_state->areas[m_area->next]);
211 if (prev_area != NULL) {
216 if (m_area == NULL) {
218 if (m_state->num_areas == m_state->max_num_areas) {
222 if ((m_state->max_num_areas << 1) > MAX_LIMIT_NUM_AREAS) {
229 m_state->max_num_areas <<= 1;
231 rootsim_error(
true,
"To reimplement\n");
239 "DyMeLoR: cannot reallocate the block of malloc_area.\n");
240 m_state->max_num_areas >>= 1;
248 m_state->areas = tmp;
251 m_area = &m_state->areas[m_state->num_areas];
261 m_area->idx = m_state->num_areas;
262 m_state->num_areas++;
263 prev_area->next = m_area->idx;
264 m_area->prev = prev_area->idx;
268 if (m_area->area == NULL) {
272 area_size =
sizeof(
malloc_area *) + bitmap_size * 2 + m_area->num_chunks * size;
274 m_area->self_pointer = (
malloc_area *)allocate_buddy_memory(lp->
mm->buddy, lp->
mm->segment->base, area_size);
275 memset(m_area->self_pointer, 0, area_size);
277 if (
unlikely(m_area->self_pointer == NULL)) {
278 rootsim_error(
true,
"Error while allocating memory.\n");
281 m_area->dirty_chunks = 0;
282 *(
unsigned long long *)(m_area->self_pointer) =
283 (
unsigned long long)m_area;
286 ((
unsigned char *)m_area->self_pointer +
289 m_area->dirty_bitmap =
290 ((
unsigned char *)m_area->use_bitmap + bitmap_size);
293 (
void *)((
char *)m_area->dirty_bitmap + bitmap_size);
296 if (
unlikely(m_area->area == NULL)) {
297 rootsim_error(
true,
"Error while allocating memory.\n");
301 if (
bitmap_check(m_area->use_bitmap, m_area->next_chunk)) {
302 rootsim_error(
true,
"Error: reallocating an already allocated chunk\n");
306 ptr = (
void *)((
char *)m_area->area + (m_area->next_chunk * size));
308 bitmap_set(m_area->use_bitmap, m_area->next_chunk);
312 if (m_area->alloc_chunks == 0) {
313 m_state->total_log_size += bitmap_size +
sizeof(
malloc_area);
316 if (m_area->state_changed == 0) {
317 m_state->total_inc_size += bitmap_size +
sizeof(
malloc_area);
320 m_area->state_changed = 1;
323 if (!CHECK_LOG_MODE_BIT(m_area)) {
324 if ((
double)m_area->alloc_chunks / (
double)m_area->num_chunks > MAX_LOG_THRESHOLD) {
325 SET_LOG_MODE_BIT(m_area);
326 m_state->total_log_size += (m_area->num_chunks - m_area->alloc_chunks) * size;
328 m_state->total_log_size += size;
331 m_area->alloc_chunks++;
335 memset(ptr, 0xe8, size);
338 *(
unsigned long long *)ptr = (
unsigned long long)m_area->self_pointer;
344 return (
void *)((
char *)ptr +
sizeof(
long long));
347 void do_free(
struct lp_struct *lp,
void *ptr)
353 size_t chunk_size, bitmap_size;
358 rootsim_error(
false,
"Requested a free on the NULL pointer\n");
363 m_state = lp->
mm->m_state;
364 m_area = get_area(ptr);
367 "Invalid pointer during free: malloc_area NULL\n");
375 chunk_size = UNTAGGED_CHUNK_SIZE(m_area);
377 idx = (int)((
char *)ptr - (
char *)m_area->area) / chunk_size;
380 rootsim_error(
false,
"double free() corruption or address not malloc'd\n");
387 m_area->alloc_chunks--;
389 if (m_area->alloc_chunks == 0) {
390 m_state->total_log_size -= bitmap_size +
sizeof(
malloc_area);
393 if (m_area->state_changed == 0) {
394 m_state->total_inc_size += bitmap_size +
sizeof(
malloc_area);
399 m_area->dirty_chunks--;
401 if (m_area->state_changed == 1 && m_area->dirty_chunks == 0)
402 m_state->total_inc_size -= bitmap_size;
404 m_state->total_inc_size -= chunk_size;
406 if (
unlikely(m_area->dirty_chunks < 0)) {
407 rootsim_error(
true,
"negative number of chunks\n");
411 m_area->state_changed = 1;
415 if (CHECK_LOG_MODE_BIT(m_area)) {
416 if ((
double)m_area->alloc_chunks / (
double)m_area->num_chunks < MIN_LOG_THRESHOLD) {
417 RESET_LOG_MODE_BIT(m_area);
418 m_state->total_log_size -= (m_area->num_chunks - m_area->alloc_chunks) * chunk_size;
421 m_state->total_log_size -= chunk_size;
424 if (idx < m_area->next_chunk)
425 m_area->next_chunk = idx;
468 int first_chunk, last_chunk, i, chk_size;
474 if (m_area != NULL) {
476 chk_size = UNTAGGED_CHUNK_SIZE(m_area->chunk_size);
480 (int)(((
char *)base - (
char *)m_area->area) / chk_size);
487 last_chunk = m_area->num_chunks - 1;
489 last_chunk = (int)(((
char *)base + size - 1 - (
char *)m_area->area) / chk_size);
493 if (m_area->state_changed == 1) {
494 if (m_area->dirty_chunks == 0)
495 lp->
mm->m_state->dirty_bitmap_size += bitmap_size;
497 lp->
mm->m_state->dirty_areas++;
498 lp->
mm->m_state->dirty_bitmap_size += bitmap_size * 2;
499 m_area->state_changed = 1;
502 for (i = first_chunk; i <= last_chunk; i++) {
507 lp->
mm->m_state->total_inc_size += chk_size;
509 m_area->dirty_chunks++;
531 if (is_incremental(logged_state)) {
532 return logged_state->total_inc_size;
534 return logged_state->total_log_size;
557 switch_to_platform_mode();
567 switch_to_application_mode();
591 switch_to_platform_mode();
601 switch_to_application_mode();
623 return rsrealloc(ptr, size);
635 m_area = get_area(ptr);
639 old_size = UNTAGGED_CHUNK_SIZE(m_area);
646 copy_size =
min(size, old_size);
647 memcpy(new_buffer, ptr, copy_size);
669 return rscalloc(nmemb, size);
672 if (
unlikely(nmemb == 0 || size == 0))
679 bzero(buffer, nmemb * size);
691 for (i = NUM_AREAS; i < state->num_areas; i++) {
692 m_area = &state->areas[i];
694 if (m_area->alloc_chunks == 0
695 && m_area->last_access < time_barrier
696 && !CHECK_AREA_LOCK_BIT(m_area)) {
698 if (m_area->self_pointer != NULL) {
701 rsfree(m_area->self_pointer);
703 m_area->use_bitmap = NULL;
704 m_area->dirty_bitmap = NULL;
705 m_area->self_pointer = NULL;
707 m_area->state_changed = 0;
710 if (m_area->prev != -1)
711 state->areas[m_area->prev].next = m_area->next;
712 if (m_area->next != -1)
713 state->areas[m_area->next].prev = m_area->prev;
716 if (i < state->num_areas - 1) {
717 memcpy(m_area, &state->areas[state->num_areas - 1],
sizeof(
malloc_area));
719 if (m_area->prev != -1)
720 state->areas[m_area->prev].next = m_area->idx;
721 if (m_area->next != -1)
722 state->areas[m_area->next].prev = m_area->idx;
725 *(
long long *)m_area->self_pointer = (
long long)m_area;
#define bitmap_set(bitmap, bit_index)
This sets the bit with index bit_index of the bitmap bitmap.
#define min(a, b)
Macro to find the minimum among two values.
static size_t compute_size(size_t size)
void * do_malloc(struct lp_struct *lp, size_t size)
#define atomic_read(v)
Read operation on an atomic counter.
#define bitmap_required_size(requested_bits)
Computes the required size of a bitmap with requested_bits entries.
void dirty_mem(void *base, int size)
void atomic_dec(atomic_t *)
void * __wrap_calloc(size_t nmemb, size_t size)
Dynamic Memory Logger and Restorer (DyMeLoR)
The ROOT-Sim scheduler main module header.
#define bitmap_reset(bitmap, bit_index)
This resets the bit with index bit_index of the bitmap bitmap.
struct memory_map * mm
Memory map of the LP.
__thread struct lp_struct * current
This is a per-thread variable pointing to the block state of the LP currently scheduled.
void __wrap_free(void *ptr)
bool is_incremental
Tells if it is an incremental log or a full one (when used for logging)
#define atomic_set(v, i)
Set operation on an atomic counter.
double simtime_t
This defines the type with whom timestamps are represented.
simulation_configuration rootsim_config
This global variable holds the configuration for the current simulation.
static void malloc_area_init(malloc_area *m_area, size_t size, int num_chunks)
Memory Manager main header.
static void find_next_free(malloc_area *m_area)
#define bitmap_check(bitmap, bit_index)
This checks if the bit with index bit_index of the bitmap bitmap is set or unset. ...
void * __wrap_realloc(void *ptr, size_t size)
Definition of the memory map.
size_t get_log_size(malloc_state *logged_state)
malloc_state * malloc_state_init(void)
void * __wrap_malloc(size_t size)
bool serial
If the simulation must be run serially.
void atomic_inc(atomic_t *)
#define unlikely(exp)
Optimize the branch as likely not taken.
This structure let DyMeLoR handle one malloc area (for serving given-size memory requests) ...