51 m_area->chunk_size = size;
52 m_area->alloc_chunks = 0;
53 m_area->dirty_chunks = 0;
54 m_area->next_chunk = 0;
55 m_area->num_chunks = num_chunks;
56 m_area->state_changed = 0;
57 m_area->last_access = -1;
58 m_area->use_bitmap = NULL;
59 m_area->dirty_bitmap = NULL;
60 m_area->self_pointer = NULL;
70 SET_AREA_LOCK_BIT(m_area);
85 state->total_log_size = 0;
86 state->total_inc_size = 0;
87 state->busy_areas = 0;
88 state->dirty_areas = 0;
89 state->num_areas = NUM_AREAS;
90 state->max_num_areas = MAX_NUM_AREAS;
91 state->bitmap_size = 0;
92 state->dirty_bitmap_size = 0;
93 state->timestamp = -1;
97 if (
unlikely(state->areas == NULL)) {
98 rootsim_error(
true,
"Unable to allocate memory.\n");
101 chunk_size = MIN_CHUNK_SIZE;
102 num_chunks = MIN_NUM_CHUNKS;
104 for (i = 0; i < NUM_AREAS; i++) {
107 state->areas[i].idx = i;
108 chunk_size = chunk_size << 1;
119 for (i = 0; i < NUM_AREAS; i++) {
120 rsfree(state->areas[i].self_pointer);
139 size +=
sizeof(
long long);
142 size_new = POWEROF2(size);
143 if (
unlikely(size_new < MIN_CHUNK_SIZE))
144 size_new = MIN_CHUNK_SIZE;
161 m_area->next_chunk++;
163 while (m_area->next_chunk < m_area->num_chunks) {
164 if (!
bitmap_check(m_area->use_bitmap, m_area->next_chunk))
167 m_area->next_chunk++;
176 size_t area_size, bitmap_size;
180 if (
unlikely(size > MAX_CHUNK_SIZE)) {
181 rootsim_error(
false,
"Requested a memory allocation of %d but the limit is %d. Reconfigure MAX_CHUNK_SIZE. Returning NULL.\n",
182 size, MAX_CHUNK_SIZE);
186 m_area = &lp->
mm->m_state->areas[(int)log2(size) -
187 (int)log2(MIN_CHUNK_SIZE)];
194 while (m_area != NULL && m_area->alloc_chunks == m_area->num_chunks) {
196 if (m_area->next == -1) {
199 m_area = &(lp->
mm->m_state->areas[m_area->next]);
208 if (prev_area != NULL) {
213 if (m_area == NULL) {
215 printf(
"Initializing an additional area\n");
218 if (lp->
mm->m_state->num_areas == lp->
mm->m_state->max_num_areas) {
222 if ((lp->
mm->m_state->max_num_areas << 1) > MAX_LIMIT_NUM_AREAS) {
229 lp->
mm->m_state->max_num_areas <<= 1;
231 rootsim_error(
true,
"To reimplement\n");
239 "DyMeLoR: cannot reallocate the block of malloc_area.\n");
240 lp->
mm->m_state->max_num_areas >>= 1;
248 lp->
mm->m_state->areas = tmp;
251 m_area = &lp->
mm->m_state->areas[lp->
mm->m_state->num_areas];
261 m_area->idx = lp->
mm->m_state->num_areas;
262 lp->
mm->m_state->num_areas++;
263 prev_area->next = m_area->idx;
264 m_area->prev = prev_area->idx;
268 if (m_area->area == NULL) {
272 area_size =
sizeof(
malloc_area *) + bitmap_size * 2 + m_area->num_chunks * size;
275 m_area->self_pointer = rsalloc(area_size);
276 bzero(m_area->self_pointer, area_size);
278 if (
unlikely(m_area->self_pointer == NULL)) {
279 rootsim_error(
true,
"Error while allocating memory.\n");
282 m_area->dirty_chunks = 0;
283 *(
unsigned long long *)(m_area->self_pointer) =
284 (
unsigned long long)m_area;
287 ((
unsigned char *)m_area->self_pointer +
290 m_area->dirty_bitmap =
291 ((
unsigned char *)m_area->use_bitmap + bitmap_size);
294 (
void *)((
char *)m_area->dirty_bitmap + bitmap_size);
297 if (
unlikely(m_area->area == NULL)) {
298 rootsim_error(
true,
"Error while allocating memory.\n");
301 if (
bitmap_check(m_area->use_bitmap, m_area->next_chunk)) {
302 rootsim_error(
true,
"Error: reallocating an already allocated chunk at %s:%d\n");
306 ptr = (
void *)((
char *)m_area->area + (m_area->next_chunk * size));
308 bitmap_set(m_area->use_bitmap, m_area->next_chunk);
312 if (m_area->alloc_chunks == 0) {
313 lp->
mm->m_state->bitmap_size += bitmap_size;
314 lp->
mm->m_state->busy_areas++;
317 if (m_area->state_changed == 0) {
318 lp->
mm->m_state->dirty_bitmap_size += bitmap_size;
319 lp->
mm->m_state->dirty_areas++;
322 m_area->state_changed = 1;
325 if (!CHECK_LOG_MODE_BIT(m_area)) {
326 if ((
double)m_area->alloc_chunks / (
double)m_area->num_chunks > MAX_LOG_THRESHOLD) {
327 SET_LOG_MODE_BIT(m_area);
328 lp->
mm->m_state->total_log_size += (m_area->num_chunks - (m_area->alloc_chunks - 1)) * size;
330 lp->
mm->m_state->total_log_size += size;
336 m_area->alloc_chunks++;
340 memset(ptr, 0xe8, size);
343 *(
unsigned long long *)ptr = (
unsigned long long)m_area->self_pointer;
349 return (
void *)((
char *)ptr +
sizeof(
long long));
352 void do_free(
struct lp_struct *lp,
void *ptr)
358 size_t chunk_size, bitmap_size;
366 rootsim_error(
false,
"Invalid pointer during free\n");
370 m_area = get_area(ptr);
373 "Invalid pointer during free: malloc_area NULL\n");
381 chunk_size = UNTAGGED_CHUNK_SIZE(m_area);
383 idx = (int)((
char *)ptr - (
char *)m_area->area) / chunk_size;
386 rootsim_error(
false,
"double free() corruption or address not malloc'd\n");
393 m_area->alloc_chunks--;
395 if (m_area->alloc_chunks == 0) {
396 lp->
mm->m_state->bitmap_size -= bitmap_size;
397 lp->
mm->m_state->busy_areas--;
400 if (m_area->state_changed == 0) {
401 lp->
mm->m_state->dirty_bitmap_size += bitmap_size;
402 lp->
mm->m_state->dirty_areas++;
407 m_area->dirty_chunks--;
409 if (m_area->state_changed == 1 && m_area->dirty_chunks == 0)
410 lp->
mm->m_state->dirty_bitmap_size -= bitmap_size;
412 lp->
mm->m_state->total_inc_size -= chunk_size;
414 if (
unlikely(m_area->dirty_chunks < 0)) {
415 rootsim_error(
true,
"negative number of chunks\n");
419 m_area->state_changed = 1;
423 if (CHECK_LOG_MODE_BIT(m_area)) {
424 if ((
double)m_area->alloc_chunks / (
double)m_area->num_chunks < MIN_LOG_THRESHOLD) {
425 RESET_LOG_MODE_BIT(m_area);
426 lp->
mm->m_state->total_log_size -= (m_area->num_chunks - m_area->alloc_chunks) * chunk_size;
429 lp->
mm->m_state->total_log_size -= chunk_size;
432 if (idx < m_area->next_chunk)
433 m_area->next_chunk = idx;
476 int first_chunk, last_chunk, i, chk_size;
482 if (m_area != NULL) {
484 chk_size = UNTAGGED_CHUNK_SIZE(m_area->chunk_size);
488 (int)(((
char *)base - (
char *)m_area->area) / chk_size);
495 last_chunk = m_area->num_chunks - 1;
497 last_chunk = (int)(((
char *)base + size - 1 - (
char *)m_area->area) / chk_size);
501 if (m_area->state_changed == 1) {
502 if (m_area->dirty_chunks == 0)
503 lp->
mm->m_state->dirty_bitmap_size += bitmap_size;
505 lp->
mm->m_state->dirty_areas++;
506 lp->
mm->m_state->dirty_bitmap_size += bitmap_size * 2;
507 m_area->state_changed = 1;
510 for (i = first_chunk; i <= last_chunk; i++) {
515 lp->
mm->m_state->total_inc_size += chk_size;
517 m_area->dirty_chunks++;
539 if (is_incremental(logged_state)) {
542 logged_state->dirty_bitmap_size +
543 logged_state->total_inc_size;
547 logged_state->bitmap_size + logged_state->total_log_size;
570 switch_to_platform_mode();
580 switch_to_application_mode();
604 switch_to_platform_mode();
614 switch_to_application_mode();
636 return rsrealloc(ptr, size);
648 m_area = get_area(ptr);
652 old_size = UNTAGGED_CHUNK_SIZE(m_area);
659 copy_size =
min(size, old_size);
660 memcpy(new_buffer, ptr, copy_size);
682 return rscalloc(nmemb, size);
685 if (
unlikely(nmemb == 0 || size == 0))
692 bzero(buffer, nmemb * size);
704 for (i = NUM_AREAS; i < state->num_areas; i++) {
705 m_area = &state->areas[i];
707 if (m_area->alloc_chunks == 0
708 && m_area->last_access < time_barrier
709 && !CHECK_AREA_LOCK_BIT(m_area)) {
711 if (m_area->self_pointer != NULL) {
714 rsfree(m_area->self_pointer);
716 m_area->use_bitmap = NULL;
717 m_area->dirty_bitmap = NULL;
718 m_area->self_pointer = NULL;
720 m_area->state_changed = 0;
723 if (m_area->prev != -1)
724 state->areas[m_area->prev].next = m_area->next;
725 if (m_area->next != -1)
726 state->areas[m_area->next].prev = m_area->prev;
729 if (i < state->num_areas - 1) {
730 memcpy(m_area, &state->areas[state->num_areas - 1],
sizeof(
malloc_area));
732 if (m_area->prev != -1)
733 state->areas[m_area->prev].next = m_area->idx;
734 if (m_area->next != -1)
735 state->areas[m_area->next].prev = m_area->idx;
738 *(
long long *)m_area->self_pointer = (
long long)m_area;
#define bitmap_set(bitmap, bit_index)
This sets the bit with index bit_index of the bitmap bitmap.
#define min(a, b)
Macro to find the minimum among two values.
static size_t compute_size(size_t size)
void * do_malloc(struct lp_struct *lp, size_t size)
#define atomic_read(v)
Read operation on an atomic counter.
#define bitmap_required_size(requested_bits)
Computes the required size of a bitmap with requested_bits entries.
void dirty_mem(void *base, int size)
void atomic_dec(atomic_t *)
void * __wrap_calloc(size_t nmemb, size_t size)
The ROOT-Sim scheduler main module header.
#define bitmap_reset(bitmap, bit_index)
This resets the bit with index bit_index of the bitmap bitmap.
struct memory_map * mm
Memory map of the LP.
__thread struct lp_struct * current
This is a per-thread variable pointing to the block state of the LP currently scheduled.
void __wrap_free(void *ptr)
bool is_incremental
Tells if it is an incremental log or a full one (when used for logging)
#define atomic_set(v, i)
Set operation on an atomic counter.
double simtime_t
This defines the type with whom timestamps are represented.
simulation_configuration rootsim_config
This global variable holds the configuration for the current simulation.
static void malloc_area_init(malloc_area *m_area, size_t size, int num_chunks)
Memory Manager main header.
static void find_next_free(malloc_area *m_area)
#define bitmap_check(bitmap, bit_index)
This checks if the bit with index bit_index of the bitmap bitmap is set or unset. ...
void * __wrap_realloc(void *ptr, size_t size)
Definition of the memory map.
size_t get_log_size(malloc_state *logged_state)
malloc_state * malloc_state_init(void)
void * __wrap_malloc(size_t size)
bool serial
If the simulation must be run serially.
void atomic_inc(atomic_t *)
#define unlikely(exp)
Optimize the branch as likely not taken.
This structure let DyMeLoR handle one malloc area (for serving given-size memory requests) ...