Implement malloc_stats for nano.
* libc/stdlib/nano-mallocr.c (RONECALL): New parameter macro. (sbrk_start): New variable. (current_mallinfo, free_list): Forward declaration of data. (chunk): Define size as long type. (mallinfo): Synchronize with latest malloc.h. (nano_*): Forward declaration of functions. (get_chunk_from_ptr): Defined to static inline. (nano_mallinfo): Implement this function. (nano_malloc_stats): Implement this function.
This commit is contained in:
parent
4afcc6ced9
commit
566fd12b68
|
@ -1,4 +1,17 @@
|
|||
2013-10-30 Sebastian Huber <sebastian.huber@embedded-brains.de>
|
||||
2013-10-30 Joey Ye <joey.ye@arm.com>
|
||||
|
||||
Implement malloc_stats for nano.
|
||||
* libc/stdlib/nano-mallocr.c (RONECALL): New parameter macro.
|
||||
(sbrk_start): New variable.
|
||||
(current_mallinfo, free_list): Forward declaration of data.
|
||||
(chunk): Define size as long type.
|
||||
(mallinfo): Synchronize with latest malloc.h.
|
||||
(nano_*): Forward declaration of functions.
|
||||
(get_chunk_from_ptr): Defined to static inline.
|
||||
(nano_mallinfo): Implement this function.
|
||||
(nano_malloc_stats): Implement this function.
|
||||
|
||||
2013-10-30 Sebastian Huber <sebastian.huber@embedded-brains.de>
|
||||
|
||||
* libc/include/machine/_default_types.h: Revert change from 2013-10-10.
|
||||
|
||||
|
|
|
@ -54,6 +54,7 @@
|
|||
#define RARG struct _reent *reent_ptr,
|
||||
#define RONEARG struct _reent *reent_ptr
|
||||
#define RCALL reent_ptr,
|
||||
#define RONECALL reent_ptr
|
||||
|
||||
/* Disable MALLOC_LOCK so far. So it won't be thread safe */
|
||||
#define MALLOC_LOCK /*__malloc_lock(reent_ptr) */
|
||||
|
@ -79,6 +80,7 @@
|
|||
#define RARG
|
||||
#define RONEARG
|
||||
#define RCALL
|
||||
#define RONECALL
|
||||
#define MALLOC_LOCK
|
||||
#define MALLOC_UNLOCK
|
||||
#define RERRNO errno
|
||||
|
@ -97,8 +99,10 @@
|
|||
#define nano_mallopt mallopt
|
||||
#endif /* ! INTERNAL_NEWLIB */
|
||||
|
||||
/* Define free_list as internal name to avoid conflict with user names */
|
||||
/* Redefine names to avoid conflict with user names */
|
||||
#define free_list __malloc_free_list
|
||||
#define sbrk_start __malloc_sbrk_start
|
||||
#define current_mallinfo __malloc_current_mallinfo
|
||||
|
||||
#define ALIGN_TO(size, align) \
|
||||
(((size) + (align) -1) & ~((align) -1))
|
||||
|
@ -134,19 +138,53 @@ typedef struct malloc_chunk
|
|||
*/
|
||||
/* size of the allocated payload area, including size before
|
||||
CHUNK_OFFSET */
|
||||
int size;
|
||||
long size;
|
||||
|
||||
/* since here, the memory is either the next free block, or data load */
|
||||
struct malloc_chunk * next;
|
||||
}chunk;
|
||||
|
||||
/* Copied from malloc.h */
|
||||
struct mallinfo
|
||||
{
|
||||
size_t arena; /* total space allocated from system */
|
||||
size_t ordblks; /* number of non-inuse chunks */
|
||||
size_t smblks; /* unused -- always zero */
|
||||
size_t hblks; /* number of mmapped regions */
|
||||
size_t hblkhd; /* total space in mmapped regions */
|
||||
size_t usmblks; /* unused -- always zero */
|
||||
size_t fsmblks; /* unused -- always zero */
|
||||
size_t uordblks; /* total allocated space */
|
||||
size_t fordblks; /* total non-inuse space */
|
||||
size_t keepcost; /* top-most, releasable (via malloc_trim) space */
|
||||
};
|
||||
|
||||
#define CHUNK_OFFSET ((malloc_size_t)(&(((struct malloc_chunk *)0)->next)))
|
||||
|
||||
/* size of smallest possible chunk. A memory piece smaller than this size
|
||||
* won't be able to create a chunk */
|
||||
#define MALLOC_MINCHUNK (CHUNK_OFFSET + MALLOC_PADDING + MALLOC_MINSIZE)
|
||||
|
||||
static chunk * get_chunk_from_ptr(void * ptr)
|
||||
/* Forward data declarations */
|
||||
extern chunk * free_list;
|
||||
extern char * sbrk_start;
|
||||
extern struct mallinfo current_mallinfo;
|
||||
|
||||
/* Forward function declarations */
|
||||
extern void * nano_malloc(RARG malloc_size_t);
|
||||
extern void nano_free (RARG void * free_p);
|
||||
extern void nano_cfree(RARG void * ptr);
|
||||
extern void * nano_calloc(RARG malloc_size_t n, malloc_size_t elem);
|
||||
extern struct mallinfo nano_mallinfo(RONEARG);
|
||||
extern void nano_malloc_stats(RONEARG);
|
||||
extern malloc_size_t nano_malloc_usable_size(RARG void * ptr);
|
||||
extern void * nano_realloc(RARG void * ptr, malloc_size_t size);
|
||||
extern void * nano_memalign(RARG size_t align, size_t s);
|
||||
extern int nano_mallopt(RARG int parameter_number, int parameter_value);
|
||||
extern void * nano_valloc(RARG size_t s);
|
||||
extern void * nano_pvalloc(RARG size_t s);
|
||||
|
||||
static inline chunk * get_chunk_from_ptr(void * ptr)
|
||||
{
|
||||
chunk * c = (chunk *)((char *)ptr - CHUNK_OFFSET);
|
||||
/* Skip the padding area */
|
||||
|
@ -155,8 +193,12 @@ static chunk * get_chunk_from_ptr(void * ptr)
|
|||
}
|
||||
|
||||
#ifdef DEFINE_MALLOC
|
||||
/* List list header of free blocks */
|
||||
chunk * free_list = NULL;
|
||||
|
||||
/* Starting point of memory allocated from system */
|
||||
char * sbrk_start = NULL;
|
||||
|
||||
/** Function sbrk_aligned
|
||||
* Algorithm:
|
||||
* Use sbrk() to obtain more memory and ensure it is CHUNK_ALIGN aligned
|
||||
|
@ -167,6 +209,8 @@ static void* sbrk_aligned(RARG malloc_size_t s)
|
|||
{
|
||||
char *p, *align_p;
|
||||
|
||||
if (sbrk_start == NULL) sbrk_start = _sbrk_r(RCALL 0);
|
||||
|
||||
p = _sbrk_r(RCALL s);
|
||||
|
||||
/* sbrk returns -1 if fail to allocate */
|
||||
|
@ -280,7 +324,6 @@ void * nano_malloc(RARG malloc_size_t s)
|
|||
#ifdef DEFINE_FREE
|
||||
#define MALLOC_CHECK_DOUBLE_FREE
|
||||
|
||||
extern chunk * free_list;
|
||||
/** Function nano_free
|
||||
* Implementation of libc free.
|
||||
* Algorithm:
|
||||
|
@ -381,8 +424,6 @@ void nano_free (RARG void * free_p)
|
|||
#endif /* DEFINE_FREE */
|
||||
|
||||
#ifdef DEFINE_CFREE
|
||||
void nano_free (RARG void * free_p);
|
||||
|
||||
void nano_cfree(RARG void * ptr)
|
||||
{
|
||||
nano_free(RCALL ptr);
|
||||
|
@ -390,8 +431,6 @@ void nano_cfree(RARG void * ptr)
|
|||
#endif /* DEFINE_CFREE */
|
||||
|
||||
#ifdef DEFINE_CALLOC
|
||||
void * nano_malloc(RARG malloc_size_t s);
|
||||
|
||||
/* Function nano_calloc
|
||||
* Implement calloc simply by calling malloc and set zero */
|
||||
void * nano_calloc(RARG malloc_size_t n, malloc_size_t elem)
|
||||
|
@ -403,10 +442,6 @@ void * nano_calloc(RARG malloc_size_t n, malloc_size_t elem)
|
|||
#endif /* DEFINE_CALLOC */
|
||||
|
||||
#ifdef DEFINE_REALLOC
|
||||
void * nano_malloc(RARG malloc_size_t s);
|
||||
void nano_free (RARG void * free_p);
|
||||
malloc_size_t nano_malloc_usable_size(RARG void * ptr);
|
||||
|
||||
/* Function nano_realloc
|
||||
* Implement realloc by malloc + memcpy */
|
||||
void * nano_realloc(RARG void * ptr, malloc_size_t size)
|
||||
|
@ -438,32 +473,49 @@ void * nano_realloc(RARG void * ptr, malloc_size_t size)
|
|||
#endif /* DEFINE_REALLOC */
|
||||
|
||||
#ifdef DEFINE_MALLINFO
|
||||
struct mallinfo
|
||||
{
|
||||
int arena; /* total space allocated from system */
|
||||
int ordblks; /* number of non-inuse chunks */
|
||||
int smblks; /* unused -- always zero */
|
||||
int hblks; /* number of mmapped regions */
|
||||
int hblkhd; /* total space in mmapped regions */
|
||||
int usmblks; /* unused -- always zero */
|
||||
int fsmblks; /* unused -- always zero */
|
||||
int uordblks; /* total allocated space */
|
||||
int fordblks; /* total non-inuse space */
|
||||
int keepcost; /* top-most, releasable (via malloc_trim) space */
|
||||
};
|
||||
|
||||
static struct mallinfo current_mallinfo={0,0,0,0,0,0,0,0,0,0};
|
||||
struct mallinfo current_mallinfo={0,0,0,0,0,0,0,0,0,0};
|
||||
|
||||
struct mallinfo nano_mallinfo(RONEARG)
|
||||
{
|
||||
char * sbrk_now;
|
||||
chunk * pf;
|
||||
size_t free_size = 0;
|
||||
size_t total_size;
|
||||
|
||||
MALLOC_LOCK;
|
||||
|
||||
if (sbrk_start == NULL) total_size = 0;
|
||||
else {
|
||||
sbrk_now = _sbrk_r(RCALL 0);
|
||||
|
||||
if (sbrk_now == (void *)-1)
|
||||
total_size = (size_t)-1;
|
||||
else
|
||||
total_size = (size_t) (sbrk_now - sbrk_start);
|
||||
}
|
||||
|
||||
for (pf = free_list; pf; pf = pf->next)
|
||||
free_size += pf->size;
|
||||
|
||||
current_mallinfo.arena = total_size;
|
||||
current_mallinfo.fordblks = free_size;
|
||||
current_mallinfo.uordblks = total_size - free_size;
|
||||
|
||||
MALLOC_UNLOCK;
|
||||
return current_mallinfo;
|
||||
}
|
||||
|
||||
#endif /* DEFINE_MALLINFO */
|
||||
|
||||
#ifdef DEFINE_MALLOC_STATS
|
||||
void nano_malloc_stats(RONEARG)
|
||||
{
|
||||
nano_mallinfo(RONECALL);
|
||||
fiprintf(stderr, "max system bytes = %10u\n",
|
||||
current_mallinfo.arena);
|
||||
fiprintf(stderr, "system bytes = %10u\n",
|
||||
current_mallinfo.arena);
|
||||
fiprintf(stderr, "in use bytes = %10u\n",
|
||||
current_mallinfo.uordblks);
|
||||
}
|
||||
#endif /* DEFINE_MALLOC_STATS */
|
||||
|
||||
|
@ -484,8 +536,6 @@ malloc_size_t nano_malloc_usable_size(RARG void * ptr)
|
|||
#endif /* DEFINE_MALLOC_USABLE_SIZE */
|
||||
|
||||
#ifdef DEFINE_MEMALIGN
|
||||
void * nano_malloc(RARG malloc_size_t s);
|
||||
|
||||
/* Function nano_memalign
|
||||
* Allocate memory block aligned at specific boundary.
|
||||
* align: required alignment. Must be power of 2. Return NULL
|
||||
|
@ -563,8 +613,6 @@ int nano_mallopt(RARG int parameter_number, int parameter_value)
|
|||
#endif /* DEFINE_MALLOPT */
|
||||
|
||||
#ifdef DEFINE_VALLOC
|
||||
void * nano_memalign(RARG size_t align, size_t s);
|
||||
|
||||
void * nano_valloc(RARG size_t s)
|
||||
{
|
||||
return nano_memalign(RCALL MALLOC_PAGE_ALIGN, s);
|
||||
|
@ -572,8 +620,6 @@ void * nano_valloc(RARG size_t s)
|
|||
#endif /* DEFINE_VALLOC */
|
||||
|
||||
#ifdef DEFINE_PVALLOC
|
||||
void * nano_valloc(RARG size_t s);
|
||||
|
||||
void * nano_pvalloc(RARG size_t s)
|
||||
{
|
||||
return nano_valloc(RCALL ALIGN_TO(s, MALLOC_PAGE_ALIGN));
|
||||
|
|
Loading…
Reference in New Issue