acid-drop

- Hacking the planet from a LilyGo T-Deck using custom firmware
git clone git://git.acid.vegas/acid-drop.git
Log | Files | Refs | Archive | README | LICENSE

lv_mem.c (13575B)

      1 /**
      2  * @file lv_mem.c
      3  * General and portable implementation of malloc and free.
      4  * The dynamic memory monitoring is also supported.
      5  */
      6 
      7 /*********************
      8  *      INCLUDES
      9  *********************/
     10 #include "lv_mem.h"
     11 #include "lv_tlsf.h"
     12 #include "lv_gc.h"
     13 #include "lv_assert.h"
     14 #include "lv_log.h"
     15 
     16 #if LV_MEM_CUSTOM != 0
     17     #include LV_MEM_CUSTOM_INCLUDE
     18 #endif
     19 
     20 #ifdef LV_MEM_POOL_INCLUDE
     21     #include LV_MEM_POOL_INCLUDE
     22 #endif
     23 
     24 /*********************
     25  *      DEFINES
     26  *********************/
     27 /*memset the allocated memories to 0xaa and freed memories to 0xbb (just for testing purposes)*/
     28 #ifndef LV_MEM_ADD_JUNK
     29     #define LV_MEM_ADD_JUNK  0
     30 #endif
     31 
     32 #ifdef LV_ARCH_64
     33     #define MEM_UNIT         uint64_t
     34     #define ALIGN_MASK       0x7
     35 #else
     36     #define MEM_UNIT         uint32_t
     37     #define ALIGN_MASK       0x3
     38 #endif
     39 
     40 #define ZERO_MEM_SENTINEL  0xa1b2c3d4
     41 
     42 /**********************
     43  *      TYPEDEFS
     44  **********************/
     45 
     46 /**********************
     47  *  STATIC PROTOTYPES
     48  **********************/
     49 #if LV_MEM_CUSTOM == 0
     50     static void lv_mem_walker(void * ptr, size_t size, int used, void * user);
     51 #endif
     52 
     53 /**********************
     54  *  STATIC VARIABLES
     55  **********************/
     56 #if LV_MEM_CUSTOM == 0
     57     static lv_tlsf_t tlsf;
     58 #endif
     59 
     60 static uint32_t zero_mem = ZERO_MEM_SENTINEL; /*Give the address of this variable if 0 byte should be allocated*/
     61 
     62 /**********************
     63  *      MACROS
     64  **********************/
     65 #if LV_LOG_TRACE_MEM
     66     #define MEM_TRACE(...) LV_LOG_TRACE(__VA_ARGS__)
     67 #else
     68     #define MEM_TRACE(...)
     69 #endif
     70 
     71 #define COPY32 *d32 = *s32; d32++; s32++;
     72 #define COPY8 *d8 = *s8; d8++; s8++;
     73 #define SET32(x) *d32 = x; d32++;
     74 #define SET8(x) *d8 = x; d8++;
     75 #define REPEAT8(expr) expr expr expr expr expr expr expr expr
     76 
     77 /**********************
     78  *   GLOBAL FUNCTIONS
     79  **********************/
     80 
     81 /**
     82  * Initialize the dyn_mem module (work memory and other variables)
     83  */
     84 void lv_mem_init(void)
     85 {
     86 #if LV_MEM_CUSTOM == 0
     87 
     88 #if LV_MEM_ADR == 0
     89 #ifdef LV_MEM_POOL_ALLOC
     90     tlsf = lv_tlsf_create_with_pool((void *)LV_MEM_POOL_ALLOC(LV_MEM_SIZE), LV_MEM_SIZE);
     91 #else
     92     /*Allocate a large array to store the dynamically allocated data*/
     93     static LV_ATTRIBUTE_LARGE_RAM_ARRAY MEM_UNIT work_mem_int[LV_MEM_SIZE / sizeof(MEM_UNIT)];
     94     tlsf = lv_tlsf_create_with_pool((void *)work_mem_int, LV_MEM_SIZE);
     95 #endif
     96 #else
     97     tlsf = lv_tlsf_create_with_pool((void *)LV_MEM_ADR, LV_MEM_SIZE);
     98 #endif
     99 #endif
    100 
    101 #if LV_MEM_ADD_JUNK
    102     LV_LOG_WARN("LV_MEM_ADD_JUNK is enabled which makes LVGL much slower");
    103 #endif
    104 }
    105 
    106 /**
    107  * Clean up the memory buffer which frees all the allocated memories.
    108  * @note It work only if `LV_MEM_CUSTOM == 0`
    109  */
    110 void lv_mem_deinit(void)
    111 {
    112 #if LV_MEM_CUSTOM == 0
    113     lv_tlsf_destroy(tlsf);
    114     lv_mem_init();
    115 #endif
    116 }
    117 
    118 /**
    119  * Allocate a memory dynamically
    120  * @param size size of the memory to allocate in bytes
    121  * @return pointer to the allocated memory
    122  */
    123 void * lv_mem_alloc(size_t size)
    124 {
    125     MEM_TRACE("allocating %lu bytes", (unsigned long)size);
    126     if(size == 0) {
    127         MEM_TRACE("using zero_mem");
    128         return &zero_mem;
    129     }
    130 
    131 #if LV_MEM_CUSTOM == 0
    132     void * alloc = lv_tlsf_malloc(tlsf, size);
    133 #else
    134     void * alloc = LV_MEM_CUSTOM_ALLOC(size);
    135 #endif
    136 
    137     if(alloc == NULL) {
    138         LV_LOG_ERROR("couldn't allocate memory (%lu bytes)", (unsigned long)size);
    139         lv_mem_monitor_t mon;
    140         lv_mem_monitor(&mon);
    141         LV_LOG_ERROR("used: %6d (%3d %%), frag: %3d %%, biggest free: %6d",
    142                      (int)(mon.total_size - mon.free_size), mon.used_pct, mon.frag_pct,
    143                      (int)mon.free_biggest_size);
    144     }
    145 #if LV_MEM_ADD_JUNK
    146     else {
    147         lv_memset(alloc, 0xaa, size);
    148     }
    149 #endif
    150 
    151     MEM_TRACE("allocated at %p", alloc);
    152     return alloc;
    153 }
    154 
    155 /**
    156  * Free an allocated data
    157  * @param data pointer to an allocated memory
    158  */
    159 void lv_mem_free(void * data)
    160 {
    161     MEM_TRACE("freeing %p", data);
    162     if(data == &zero_mem) return;
    163     if(data == NULL) return;
    164 
    165 #if LV_MEM_CUSTOM == 0
    166 #  if LV_MEM_ADD_JUNK
    167     lv_memset(data, 0xbb, lv_tlsf_block_size(data));
    168 #  endif
    169     lv_tlsf_free(tlsf, data);
    170 #else
    171     LV_MEM_CUSTOM_FREE(data);
    172 #endif
    173 }
    174 
    175 /**
    176  * Reallocate a memory with a new size. The old content will be kept.
    177  * @param data pointer to an allocated memory.
    178  * Its content will be copied to the new memory block and freed
    179  * @param new_size the desired new size in byte
    180  * @return pointer to the new memory
    181  */
    182 void * lv_mem_realloc(void * data_p, size_t new_size)
    183 {
    184     MEM_TRACE("reallocating %p with %lu size", data_p, (unsigned long)new_size);
    185     if(new_size == 0) {
    186         MEM_TRACE("using zero_mem");
    187         lv_mem_free(data_p);
    188         return &zero_mem;
    189     }
    190 
    191     if(data_p == &zero_mem) return lv_mem_alloc(new_size);
    192 
    193 #if LV_MEM_CUSTOM == 0
    194     void * new_p = lv_tlsf_realloc(tlsf, data_p, new_size);
    195 #else
    196     void * new_p = LV_MEM_CUSTOM_REALLOC(data_p, new_size);
    197 #endif
    198     if(new_p == NULL) {
    199         LV_LOG_ERROR("couldn't allocate memory");
    200         return NULL;
    201     }
    202 
    203     MEM_TRACE("allocated at %p", new_p);
    204     return new_p;
    205 }
    206 
    207 lv_res_t lv_mem_test(void)
    208 {
    209     if(zero_mem != ZERO_MEM_SENTINEL) {
    210         LV_LOG_WARN("zero_mem is written");
    211         return LV_RES_INV;
    212     }
    213 
    214 #if LV_MEM_CUSTOM == 0
    215     if(lv_tlsf_check(tlsf)) {
    216         LV_LOG_WARN("failed");
    217         return LV_RES_INV;
    218     }
    219 
    220     if(lv_tlsf_check_pool(lv_tlsf_get_pool(tlsf))) {
    221         LV_LOG_WARN("pool failed");
    222         return LV_RES_INV;
    223     }
    224 #endif
    225     MEM_TRACE("passed");
    226     return LV_RES_OK;
    227 }
    228 
    229 /**
    230  * Give information about the work memory of dynamic allocation
    231  * @param mon_p pointer to a lv_mem_monitor_t variable,
    232  *              the result of the analysis will be stored here
    233  */
    234 void lv_mem_monitor(lv_mem_monitor_t * mon_p)
    235 {
    236     /*Init the data*/
    237     lv_memset(mon_p, 0, sizeof(lv_mem_monitor_t));
    238 #if LV_MEM_CUSTOM == 0
    239     MEM_TRACE("begin");
    240 
    241     lv_tlsf_walk_pool(lv_tlsf_get_pool(tlsf), lv_mem_walker, mon_p);
    242 
    243     mon_p->total_size = LV_MEM_SIZE;
    244     mon_p->used_pct = 100 - (100U * mon_p->free_size) / mon_p->total_size;
    245     if(mon_p->free_size > 0) {
    246         mon_p->frag_pct = mon_p->free_biggest_size * 100U / mon_p->free_size;
    247         mon_p->frag_pct = 100 - mon_p->frag_pct;
    248     }
    249     else {
    250         mon_p->frag_pct = 0; /*no fragmentation if all the RAM is used*/
    251     }
    252 
    253     MEM_TRACE("finished");
    254 #endif
    255 }
    256 
    257 
    258 /**
    259  * Get a temporal buffer with the given size.
    260  * @param size the required size
    261  */
    262 void * lv_mem_buf_get(uint32_t size)
    263 {
    264     if(size == 0) return NULL;
    265 
    266     MEM_TRACE("begin, getting %d bytes", size);
    267 
    268     /*Try to find a free buffer with suitable size*/
    269     int8_t i_guess = -1;
    270     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
    271         if(LV_GC_ROOT(lv_mem_buf[i]).used == 0 && LV_GC_ROOT(lv_mem_buf[i]).size >= size) {
    272             if(LV_GC_ROOT(lv_mem_buf[i]).size == size) {
    273                 LV_GC_ROOT(lv_mem_buf[i]).used = 1;
    274                 return LV_GC_ROOT(lv_mem_buf[i]).p;
    275             }
    276             else if(i_guess < 0) {
    277                 i_guess = i;
    278             }
    279             /*If size of `i` is closer to `size` prefer it*/
    280             else if(LV_GC_ROOT(lv_mem_buf[i]).size < LV_GC_ROOT(lv_mem_buf[i_guess]).size) {
    281                 i_guess = i;
    282             }
    283         }
    284     }
    285 
    286     if(i_guess >= 0) {
    287         LV_GC_ROOT(lv_mem_buf[i_guess]).used = 1;
    288         MEM_TRACE("returning already allocated buffer (buffer id: %d, address: %p)", i_guess,
    289                   LV_GC_ROOT(lv_mem_buf[i_guess]).p);
    290         return LV_GC_ROOT(lv_mem_buf[i_guess]).p;
    291     }
    292 
    293     /*Reallocate a free buffer*/
    294     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
    295         if(LV_GC_ROOT(lv_mem_buf[i]).used == 0) {
    296             /*if this fails you probably need to increase your LV_MEM_SIZE/heap size*/
    297             void * buf = lv_mem_realloc(LV_GC_ROOT(lv_mem_buf[i]).p, size);
    298             LV_ASSERT_MSG(buf != NULL, "Out of memory, can't allocate a new buffer (increase your LV_MEM_SIZE/heap size)");
    299             if(buf == NULL) return NULL;
    300 
    301             LV_GC_ROOT(lv_mem_buf[i]).used = 1;
    302             LV_GC_ROOT(lv_mem_buf[i]).size = size;
    303             LV_GC_ROOT(lv_mem_buf[i]).p    = buf;
    304             MEM_TRACE("allocated (buffer id: %d, address: %p)", i, LV_GC_ROOT(lv_mem_buf[i]).p);
    305             return LV_GC_ROOT(lv_mem_buf[i]).p;
    306         }
    307     }
    308 
    309     LV_LOG_ERROR("no more buffers. (increase LV_MEM_BUF_MAX_NUM)");
    310     LV_ASSERT_MSG(false, "No more buffers. Increase LV_MEM_BUF_MAX_NUM.");
    311     return NULL;
    312 }
    313 
    314 /**
    315  * Release a memory buffer
    316  * @param p buffer to release
    317  */
    318 void lv_mem_buf_release(void * p)
    319 {
    320     MEM_TRACE("begin (address: %p)", p);
    321 
    322     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
    323         if(LV_GC_ROOT(lv_mem_buf[i]).p == p) {
    324             LV_GC_ROOT(lv_mem_buf[i]).used = 0;
    325             return;
    326         }
    327     }
    328 
    329     LV_LOG_ERROR("p is not a known buffer");
    330 }
    331 
    332 /**
    333  * Free all memory buffers
    334  */
    335 void lv_mem_buf_free_all(void)
    336 {
    337     for(uint8_t i = 0; i < LV_MEM_BUF_MAX_NUM; i++) {
    338         if(LV_GC_ROOT(lv_mem_buf[i]).p) {
    339             lv_mem_free(LV_GC_ROOT(lv_mem_buf[i]).p);
    340             LV_GC_ROOT(lv_mem_buf[i]).p = NULL;
    341             LV_GC_ROOT(lv_mem_buf[i]).used = 0;
    342             LV_GC_ROOT(lv_mem_buf[i]).size = 0;
    343         }
    344     }
    345 }
    346 
    347 #if LV_MEMCPY_MEMSET_STD == 0
    348 /**
    349  * Same as `memcpy` but optimized for 4 byte operation.
    350  * @param dst pointer to the destination buffer
    351  * @param src pointer to the source buffer
    352  * @param len number of byte to copy
    353  */
    354 LV_ATTRIBUTE_FAST_MEM void * lv_memcpy(void * dst, const void * src, size_t len)
    355 {
    356     uint8_t * d8 = dst;
    357     const uint8_t * s8 = src;
    358 
    359     lv_uintptr_t d_align = (lv_uintptr_t)d8 & ALIGN_MASK;
    360     lv_uintptr_t s_align = (lv_uintptr_t)s8 & ALIGN_MASK;
    361 
    362     /*Byte copy for unaligned memories*/
    363     if(s_align != d_align) {
    364         while(len > 32) {
    365             REPEAT8(COPY8);
    366             REPEAT8(COPY8);
    367             REPEAT8(COPY8);
    368             REPEAT8(COPY8);
    369             len -= 32;
    370         }
    371         while(len) {
    372             COPY8
    373             len--;
    374         }
    375         return dst;
    376     }
    377 
    378     /*Make the memories aligned*/
    379     if(d_align) {
    380         d_align = ALIGN_MASK + 1 - d_align;
    381         while(d_align && len) {
    382             COPY8;
    383             d_align--;
    384             len--;
    385         }
    386     }
    387 
    388     uint32_t * d32 = (uint32_t *)d8;
    389     const uint32_t * s32 = (uint32_t *)s8;
    390     while(len > 32) {
    391         REPEAT8(COPY32)
    392         len -= 32;
    393     }
    394 
    395     while(len > 4) {
    396         COPY32;
    397         len -= 4;
    398     }
    399 
    400     d8 = (uint8_t *)d32;
    401     s8 = (const uint8_t *)s32;
    402     while(len) {
    403         COPY8
    404         len--;
    405     }
    406 
    407     return dst;
    408 }
    409 
    410 /**
    411  * Same as `memset` but optimized for 4 byte operation.
    412  * @param dst pointer to the destination buffer
    413  * @param v value to set [0..255]
    414  * @param len number of byte to set
    415  */
    416 LV_ATTRIBUTE_FAST_MEM void lv_memset(void * dst, uint8_t v, size_t len)
    417 {
    418 
    419     uint8_t * d8 = (uint8_t *)dst;
    420 
    421     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
    422 
    423     /*Make the address aligned*/
    424     if(d_align) {
    425         d_align = ALIGN_MASK + 1 - d_align;
    426         while(d_align && len) {
    427             SET8(v);
    428             len--;
    429             d_align--;
    430         }
    431     }
    432 
    433     uint32_t v32 = (uint32_t)v + ((uint32_t)v << 8) + ((uint32_t)v << 16) + ((uint32_t)v << 24);
    434 
    435     uint32_t * d32 = (uint32_t *)d8;
    436 
    437     while(len > 32) {
    438         REPEAT8(SET32(v32));
    439         len -= 32;
    440     }
    441 
    442     while(len > 4) {
    443         SET32(v32);
    444         len -= 4;
    445     }
    446 
    447     d8 = (uint8_t *)d32;
    448     while(len) {
    449         SET8(v);
    450         len--;
    451     }
    452 }
    453 
    454 /**
    455  * Same as `memset(dst, 0x00, len)` but optimized for 4 byte operation.
    456  * @param dst pointer to the destination buffer
    457  * @param len number of byte to set
    458  */
    459 LV_ATTRIBUTE_FAST_MEM void lv_memset_00(void * dst, size_t len)
    460 {
    461     uint8_t * d8 = (uint8_t *)dst;
    462     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
    463 
    464     /*Make the address aligned*/
    465     if(d_align) {
    466         d_align = ALIGN_MASK + 1 - d_align;
    467         while(d_align && len) {
    468             SET8(0);
    469             len--;
    470             d_align--;
    471         }
    472     }
    473 
    474     uint32_t * d32 = (uint32_t *)d8;
    475     while(len > 32) {
    476         REPEAT8(SET32(0));
    477         len -= 32;
    478     }
    479 
    480     while(len > 4) {
    481         SET32(0);
    482         len -= 4;
    483     }
    484 
    485     d8 = (uint8_t *)d32;
    486     while(len) {
    487         SET8(0);
    488         len--;
    489     }
    490 }
    491 
    492 /**
    493  * Same as `memset(dst, 0xFF, len)` but optimized for 4 byte operation.
    494  * @param dst pointer to the destination buffer
    495  * @param len number of byte to set
    496  */
    497 LV_ATTRIBUTE_FAST_MEM void lv_memset_ff(void * dst, size_t len)
    498 {
    499     uint8_t * d8 = (uint8_t *)dst;
    500     uintptr_t d_align = (lv_uintptr_t) d8 & ALIGN_MASK;
    501 
    502     /*Make the address aligned*/
    503     if(d_align) {
    504         d_align = ALIGN_MASK + 1 - d_align;
    505         while(d_align && len) {
    506             SET8(0xFF);
    507             len--;
    508             d_align--;
    509         }
    510     }
    511 
    512     uint32_t * d32 = (uint32_t *)d8;
    513     while(len > 32) {
    514         REPEAT8(SET32(0xFFFFFFFF));
    515         len -= 32;
    516     }
    517 
    518     while(len > 4) {
    519         SET32(0xFFFFFFFF);
    520         len -= 4;
    521     }
    522 
    523     d8 = (uint8_t *)d32;
    524     while(len) {
    525         SET8(0xFF);
    526         len--;
    527     }
    528 }
    529 
    530 #endif /*LV_MEMCPY_MEMSET_STD*/
    531 
    532 /**********************
    533  *   STATIC FUNCTIONS
    534  **********************/
    535 
    536 #if LV_MEM_CUSTOM == 0
    537 static void lv_mem_walker(void * ptr, size_t size, int used, void * user)
    538 {
    539     LV_UNUSED(ptr);
    540 
    541     lv_mem_monitor_t * mon_p = user;
    542     if(used) {
    543         mon_p->used_cnt++;
    544     }
    545     else {
    546         mon_p->free_cnt++;
    547         mon_p->free_size += size;
    548         if(size > mon_p->free_biggest_size)
    549             mon_p->free_biggest_size = size;
    550     }
    551 }
    552 #endif