1

use static logger

This commit is contained in:
2022-07-22 23:32:46 +02:00
parent ec09b0e6d2
commit bd95c02a08
24 changed files with 139 additions and 151 deletions

View File

@ -25,7 +25,7 @@ void BumpAllocator::init() {
this->allocations = 0;
this->next = (unsigned char*)heap_start;
log << INFO << "Initialized Bump Allocator" << endl;
log.info() << "Initialized Bump Allocator" << endl;
}
/*****************************************************************************
@ -51,10 +51,10 @@ void* BumpAllocator::alloc(unsigned int req_size) {
/* Hier muess Code eingefuegt werden */
log << DEBUG << "Requested " << hex << req_size << " Bytes" << endl;
log.debug() << "Requested " << hex << req_size << " Bytes" << endl;
if (req_size + (unsigned int)this->next > this->heap_end) {
log << ERROR << " - More memory requested than available :(" << endl;
log.error() << " - More memory requested than available :(" << endl;
return NULL;
}
@ -62,7 +62,7 @@ void* BumpAllocator::alloc(unsigned int req_size) {
this->next = (unsigned char*)((unsigned int)this->next + req_size);
this->allocations = this->allocations + 1;
log << TRACE << " - Allocated " << hex << req_size << " Bytes." << endl;
log.trace() << " - Allocated " << hex << req_size << " Bytes." << endl;
return allocated;
}
@ -73,5 +73,5 @@ void* BumpAllocator::alloc(unsigned int req_size) {
* Beschreibung: Nicht implementiert. *
*****************************************************************************/
void BumpAllocator::free(void* ptr) {
log << ERROR << " mm_free: ptr= " << hex << (unsigned int)ptr << ", not supported" << endl;
log.error() << " mm_free: ptr= " << hex << (unsigned int)ptr << ", not supported" << endl;
}

View File

@ -21,7 +21,7 @@ private:
unsigned char* next;
unsigned int allocations;
Logger log;
NamedLogger log;
BumpAllocator(Allocator& copy) = delete; // Verhindere Kopieren

View File

@ -38,7 +38,7 @@ void LinkedListAllocator::init() {
this->free_start->size = this->heap_size - sizeof(free_block_t);
this->free_start->next = this->free_start; // Only one block, points to itself
log << INFO << "Initialized LinkedList Allocator" << endl;
log.info() << "Initialized LinkedList Allocator" << endl;
}
/*****************************************************************************
@ -77,10 +77,10 @@ void* LinkedListAllocator::alloc(unsigned int req_size) {
/* Hier muess Code eingefuegt werden */
// NOTE: next pointer zeigt auf headeranfang, returned wird zeiger auf anfang des nutzbaren freispeichers
log << DEBUG << "Requested " << hex << req_size << " Bytes" << endl;
log.debug() << "Requested " << hex << req_size << " Bytes" << endl;
if (this->free_start == NULL) {
log << ERROR << " - No free memory remaining :(" << endl;
log.error() << " - No free memory remaining :(" << endl;
this->lock.release();
return NULL;
}
@ -89,7 +89,7 @@ void* LinkedListAllocator::alloc(unsigned int req_size) {
unsigned int req_size_diff = (BASIC_ALIGN - req_size % BASIC_ALIGN) % BASIC_ALIGN;
unsigned int rreq_size = req_size + req_size_diff;
if (req_size_diff > 0) {
log << TRACE << " - Rounded to word border (+" << dec << req_size_diff << " bytes)" << endl;
log.trace() << " - Rounded to word border (+" << dec << req_size_diff << " bytes)" << endl;
}
free_block_t* current = this->free_start;
@ -121,7 +121,7 @@ void* LinkedListAllocator::alloc(unsigned int req_size) {
// Next-fit
this->free_start = new_next;
log << TRACE << " - Allocated " << hex << rreq_size << " Bytes with cutting" << endl;
log.trace() << " - Allocated " << hex << rreq_size << " Bytes with cutting" << endl;
} else {
// Block too small to be cut, allocate whole block
@ -129,11 +129,11 @@ void* LinkedListAllocator::alloc(unsigned int req_size) {
this->free_start = current->next; // Pointer keeps pointing to current if last block
if (this->free_start == current) {
// No free block remaining
log << TRACE << " - Disabled freelist" << endl;
log.trace() << " - Disabled freelist" << endl;
this->free_start = NULL;
}
log << TRACE << " - Allocated " << hex << current->size << " Bytes without cutting" << endl;
log.trace() << " - Allocated " << hex << current->size << " Bytes without cutting" << endl;
}
// Block aushängen
@ -149,14 +149,14 @@ void* LinkedListAllocator::alloc(unsigned int req_size) {
// HACK: Checking list integrity
// free_block_t* c = current;
// log << DEBUG << "Checking list Integrity" << endl;
// log.debug() << "Checking list Integrity" << endl;
// while (c->allocated) {
// log << DEBUG << hex << (unsigned int)c << endl;
// log.debug() << hex << (unsigned int)c << endl;
// c = c->next;
// }
// log << DEBUG << "Finished check" << endl;
// log.debug() << "Finished check" << endl;
log << DEBUG << "Returning memory address " << hex << ((unsigned int)current + sizeof(free_block_t)) << endl;
log.debug() << "Returning memory address " << hex << ((unsigned int)current + sizeof(free_block_t)) << endl;
this->lock.release();
return (void*)((unsigned int)current + sizeof(free_block_t)); // Speicheranfang, nicht header
}
@ -164,7 +164,7 @@ void* LinkedListAllocator::alloc(unsigned int req_size) {
current = current->next;
} while (current != this->free_start); // Stop when arriving at the first block again
log << ERROR << " - More memory requested than available :(" << endl;
log.error() << " - More memory requested than available :(" << endl;
this->lock.release();
return NULL;
}
@ -182,10 +182,10 @@ void LinkedListAllocator::free(void* ptr) {
// Account for header
free_block_t* block_start = (free_block_t*)((unsigned int)ptr - sizeof(free_block_t));
log << DEBUG << "Freeing " << hex << (unsigned int)ptr << ", Size: " << block_start->size << endl;
log.debug() << "Freeing " << hex << (unsigned int)ptr << ", Size: " << block_start->size << endl;
if (!block_start->allocated) {
log << ERROR << "Block already free" << endl;
log.error() << "Block already free" << endl;
this->lock.release();
return;
}
@ -197,7 +197,7 @@ void LinkedListAllocator::free(void* ptr) {
block_start->allocated = false;
block_start->next = block_start;
log << TRACE << " - Enabling freelist with one block" << endl;
log.trace() << " - Enabling freelist with one block" << endl;
this->lock.release();
return;
}
@ -226,16 +226,16 @@ void LinkedListAllocator::free(void* ptr) {
// - If previous_free_next and block_start are the same block we can merge backward
// Should result in: [block_start]
// log << TRACE << "Before doing any merging:" << endl;
// log << TRACE << "previous_free:" << hex << (unsigned int)previous_free << "Size:" << previous_free->size << "Next:" << (unsigned int)previous_free->next << endl;
// log << TRACE << "previous_free_next:" << hex << (unsigned int)previous_free_next << "Size:" << previous_free_next->size << "Next:" << (unsigned int)previous_free_next->next << endl;
// log << TRACE << "block_start:" << hex << (unsigned int)block_start << "Size:" << block_start->size << "Next:" << (unsigned int)block_start->next << endl;
// log << TRACE << "next_block:" << hex << (unsigned int)next_block << "Size:" << next_block->size << "Next:" << (unsigned int)next_block->next << endl;
// log << TRACE << "next_free:" << hex << (unsigned int)next_free << "Size:" << next_free->size << "Next:" << (unsigned int)next_free->next << endl;
// log.trace() << "Before doing any merging:" << endl;
// log.trace() << "previous_free:" << hex << (unsigned int)previous_free << "Size:" << previous_free->size << "Next:" << (unsigned int)previous_free->next << endl;
// log.trace() << "previous_free_next:" << hex << (unsigned int)previous_free_next << "Size:" << previous_free_next->size << "Next:" << (unsigned int)previous_free_next->next << endl;
// log.trace() << "block_start:" << hex << (unsigned int)block_start << "Size:" << block_start->size << "Next:" << (unsigned int)block_start->next << endl;
// log.trace() << "next_block:" << hex << (unsigned int)next_block << "Size:" << next_block->size << "Next:" << (unsigned int)next_block->next << endl;
// log.trace() << "next_free:" << hex << (unsigned int)next_free << "Size:" << next_free->size << "Next:" << (unsigned int)next_free->next << endl;
// Try to merge forward ========================================================================
if (next_block == next_free) {
log << TRACE << " - Merging block forward" << endl;
log.trace() << " - Merging block forward" << endl;
// Current and next adjacent block can be merged
// [previous_free | previous_free_next | <> | block_start | next_free]
@ -257,7 +257,7 @@ void LinkedListAllocator::free(void* ptr) {
if (this->free_start == next_free) {
// next_free is now invalid after merge
log << TRACE << " - Moving freelist start to " << hex << (unsigned int)block_start << endl;
log.trace() << " - Moving freelist start to " << hex << (unsigned int)block_start << endl;
this->free_start = block_start;
}
} else {
@ -273,7 +273,7 @@ void LinkedListAllocator::free(void* ptr) {
// Try to merge backward =====================================================================
if (previous_free_next == block_start) {
log << TRACE << " - Merging block backward" << endl;
log.trace() << " - Merging block backward" << endl;
// Current and previous adjacent block can be merged
// [previous_free | block_start]
@ -286,7 +286,7 @@ void LinkedListAllocator::free(void* ptr) {
if (this->free_start == block_start) {
// block_start is now invalid after merge
log << TRACE << " - Moving freelist start to " << hex << (unsigned int)previous_free << endl;
log.trace() << " - Moving freelist start to " << hex << (unsigned int)previous_free << endl;
this->free_start = previous_free;
}
}

View File

@ -41,7 +41,7 @@ private:
// aren't reachable from the freelist.
static struct free_block* find_previous_block(struct free_block*);
Logger log;
NamedLogger log;
SpinLock lock;
public:

View File

@ -14,7 +14,7 @@ void TreeAllocator::init() {
this->free_start->next = (list_block_t*)this->free_start;
this->free_start->previous = (list_block_t*)this->free_start;
log << INFO << "Initialized Tree Allocator" << endl;
log.info() << "Initialized Tree Allocator" << endl;
}
void TreeAllocator::dump_free_memory() {
@ -29,7 +29,7 @@ void TreeAllocator::dump_free_memory() {
}
void* TreeAllocator::alloc(unsigned int req_size) {
log << DEBUG << "Requested " << dec << req_size << " Bytes" << endl;
log.debug() << "Requested " << dec << req_size << " Bytes" << endl;
// Round to word borders + tree_block size
unsigned int rreq_size = req_size;
@ -37,28 +37,28 @@ void* TreeAllocator::alloc(unsigned int req_size) {
// the list_block_t is part of every block, but when freeing
// memory we need enough space to store the rbt metadata
rreq_size = sizeof(tree_block_t) - sizeof(list_block_t);
log << TRACE << " - Increased block size for rbt metadata" << endl;
log.trace() << " - Increased block size for rbt metadata" << endl;
}
unsigned int req_size_diff = (BASIC_ALIGN - rreq_size % BASIC_ALIGN) % BASIC_ALIGN;
rreq_size = rreq_size + req_size_diff;
if (req_size_diff > 0) {
log << TRACE << " - Rounded to word border (+" << dec << req_size_diff << " bytes)" << endl;
log.trace() << " - Rounded to word border (+" << dec << req_size_diff << " bytes)" << endl;
}
// Finds smallest block that is large enough
tree_block_t* best_fit = this->rbt_search_bestfit(rreq_size);
if (best_fit == NULL) {
log << ERROR << " - No block found" << endl;
log.error() << " - No block found" << endl;
return NULL;
}
if (best_fit->allocated) {
// Something went really wrong
log << ERROR << " - Block already allocated :(" << endl;
log.error() << " - Block already allocated :(" << endl;
return NULL;
}
best_fit->allocated = true;
unsigned int size = this->get_size(best_fit);
log << TRACE << " - Found best-fit: " << hex << (unsigned int)best_fit << endl;
log.trace() << " - Found best-fit: " << hex << (unsigned int)best_fit << endl;
// HACK: I didn't want to handle situations with only one block (where the tree root would
// get removed), so I make sure there are always at least 2 blocks by inserting a dummy
@ -72,7 +72,7 @@ void* TreeAllocator::alloc(unsigned int req_size) {
this->rbt_remove(best_fit); // BUG: Can trigger bluescreen
if (size > HEAP_MIN_FREE_BLOCK_SIZE + rreq_size + sizeof(list_block_t)) {
// Block can be cut
log << TRACE << " - Allocating " << dec << rreq_size << " Bytes with cutting" << endl;
log.trace() << " - Allocating " << dec << rreq_size << " Bytes with cutting" << endl;
// [best_fit_start | sizeof(list_block_t) | rreq_size | new_block_start]
tree_block_t* new_block = (tree_block_t*)((char*)best_fit + sizeof(list_block_t) + rreq_size);
@ -83,18 +83,18 @@ void* TreeAllocator::alloc(unsigned int req_size) {
// Don't cut block
// The block is already correctly positioned in the linked list so we only
// need to remove it from the freelist, which is done for both cases
log << TRACE << " - Allocating " << dec << rreq_size << " Bytes without cutting" << endl;
log.trace() << " - Allocating " << dec << rreq_size << " Bytes without cutting" << endl;
}
// HACK: Remove the dummy element
this->rbt_remove(&dummy);
log << TRACE << " - Returned address " << hex << (unsigned int)((char*)best_fit + sizeof(list_block_t)) << endl;
log.trace() << " - Returned address " << hex << (unsigned int)((char*)best_fit + sizeof(list_block_t)) << endl;
return (void*)((char*)best_fit + sizeof(list_block_t));
}
void TreeAllocator::free(void* ptr) {
log << INFO << "Freeing " << hex << (unsigned int)ptr << endl;
log.info() << "Freeing " << hex << (unsigned int)ptr << endl;
list_block_t* block = (list_block_t*)((char*)ptr - sizeof(list_block_t));
if (!block->allocated) {
@ -119,7 +119,7 @@ void TreeAllocator::free(void* ptr) {
if (!next->allocated) {
// Merge forward
log << TRACE << " - Merging forward" << endl;
log.trace() << " - Merging forward" << endl;
// Remove the next block from all lists as it is now part of our freed block
this->dll_remove(next);
@ -132,7 +132,7 @@ void TreeAllocator::free(void* ptr) {
if (!previous->allocated) {
// Merge backward
log << TRACE << " - Merging backward" << endl;
log.trace() << " - Merging backward" << endl;
// Remove the current block from all lists as it is now part of the previous block
// It doesn't have to be removed from rbt as it wasn't in there as it was allocated before

View File

@ -37,7 +37,7 @@ private:
// Root of the rbt
tree_block_t* free_start;
Logger log;
NamedLogger log;
TreeAllocator(Allocator& copy) = delete; // Verhindere Kopieren