1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
82
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
113
114
115
124
125
128
129
130
131
134
135
136
137
138
147
148
150
151
152
153
154
155
156
159
160
163
164
168
169
174
175
176
177
178
179
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
222
223
227
228
229
230
231
232
233
235
236
237
238
239
240
241
242
243
250
251
252
253
254
255
256
257
258
277
282
283
284
285
286
287
288
289
292
293
294
296
297
298
299
300
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
335
336
337
338
339
340
341
342
343
344
349
350
351
352
353
354
355
356
357
359
360
361
362
365
366
367
368
369
370
371
372
373
377
378
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
427
428
429
430
431
446
449
450
451
455
456
457
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
497
498
499
500
510
511
512
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
545
546
547
550
551
552
553
554
555
556
557
558
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
597
598
599
600
601
602
603
604
605
606
607
608
609
610
616
617
618
619
620
621
622
626
633
634
635
636
637
638
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
709
710
714
715
716
717
720
721
722
723
724
730
731
732
733
734
735
736
737
738
739
740
741
745
749
750
751
752
753
754
755
756
757
758
759
760
761
762
765
766
767
768
769
770
771
772
773
776
779
780
781
782
789
790
791
792
795
796
797
798
799
802
803
804
805
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
843
844
846
847
851
852
853
854
857
858
859
860
861
862
863
864
865
866
867
870
871
872
873
874
885
886
887
888
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
916
917
928
929
930
931
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1008
1009
1010
1011
1015
1016
1017
/* ... */
/* ... */
#include "lwip/opt.h"
#include "lwip/mem.h"
#include "lwip/def.h"
#include "lwip/sys.h"
#include "lwip/stats.h"
#include "lwip/err.h"
#include <string.h>
7 includes
#if MEM_LIBC_MALLOC
#include <stdlib.h>
#endif
#ifndef LWIP_MEM_ILLEGAL_FREE
#define LWIP_MEM_ILLEGAL_FREE(msg) LWIP_ASSERT(msg, 0)
#endif
#define MEM_STATS_INC_LOCKED(x) SYS_ARCH_LOCKED(MEM_STATS_INC(x))
#define MEM_STATS_INC_USED_LOCKED(x, y) SYS_ARCH_LOCKED(MEM_STATS_INC_USED(x, y))
#define MEM_STATS_DEC_USED_LOCKED(x, y) SYS_ARCH_LOCKED(MEM_STATS_DEC_USED(x, y))
#if MEM_OVERFLOW_CHECK
#define MEM_SANITY_OFFSET MEM_SANITY_REGION_BEFORE_ALIGNED
#define MEM_SANITY_OVERHEAD (MEM_SANITY_REGION_BEFORE_ALIGNED + MEM_SANITY_REGION_AFTER_ALIGNED)
/* ... */#else
#define MEM_SANITY_OFFSET 0
#define MEM_SANITY_OVERHEAD 0
/* ... */#endif
#if MEM_OVERFLOW_CHECK || MEMP_OVERFLOW_CHECK
/* ... */
void
mem_overflow_check_raw(void *p, size_t size, const char *descr1, const char *descr2)
{
#if MEM_SANITY_REGION_AFTER_ALIGNED || MEM_SANITY_REGION_BEFORE_ALIGNED
u16_t k;
u8_t *m;
#if MEM_SANITY_REGION_AFTER_ALIGNED > 0
m = (u8_t *)p + size;
for (k = 0; k < MEM_SANITY_REGION_AFTER_ALIGNED; k++) {
if (m[k] != 0xcd) {
char errstr[128];
snprintf(errstr, sizeof(errstr), "detected mem overflow in %s%s", descr1, descr2);
LWIP_ASSERT(errstr, 0);
}if (m[k] != 0xcd) { ... }
}for (k = 0; k < MEM_SANITY_REGION_AFTER_ALIGNED; k++) { ... }
/* ... */#endif
#if MEM_SANITY_REGION_BEFORE_ALIGNED > 0
m = (u8_t *)p - MEM_SANITY_REGION_BEFORE_ALIGNED;
for (k = 0; k < MEM_SANITY_REGION_BEFORE_ALIGNED; k++) {
if (m[k] != 0xcd) {
char errstr[128];
snprintf(errstr, sizeof(errstr), "detected mem underflow in %s%s", descr1, descr2);
LWIP_ASSERT(errstr, 0);
}if (m[k] != 0xcd) { ... }
}for (k = 0; k < MEM_SANITY_REGION_BEFORE_ALIGNED; k++) { ... }
/* ... */#endif /* ... */
#else
LWIP_UNUSED_ARG(p);
LWIP_UNUSED_ARG(desc);
LWIP_UNUSED_ARG(descr);/* ... */
#endif
}mem_overflow_check_raw (void *p, size_t size, const char *descr1, const char *descr2) { ... }
/* ... */
void
mem_overflow_init_raw(void *p, size_t size)
{
#if MEM_SANITY_REGION_BEFORE_ALIGNED > 0 || MEM_SANITY_REGION_AFTER_ALIGNED > 0
u8_t *m;
#if MEM_SANITY_REGION_BEFORE_ALIGNED > 0
m = (u8_t *)p - MEM_SANITY_REGION_BEFORE_ALIGNED;
memset(m, 0xcd, MEM_SANITY_REGION_BEFORE_ALIGNED);/* ... */
#endif
#if MEM_SANITY_REGION_AFTER_ALIGNED > 0
m = (u8_t *)p + size;
memset(m, 0xcd, MEM_SANITY_REGION_AFTER_ALIGNED);/* ... */
#endif/* ... */
#else
LWIP_UNUSED_ARG(p);
LWIP_UNUSED_ARG(desc);/* ... */
#endif
}mem_overflow_init_raw (void *p, size_t size) { ... }
/* ... */#endif
#if MEM_LIBC_MALLOC || MEM_USE_POOLS
/* ... */
void
mem_init(void)
{
}mem_init (void) { ... }
/* ... */
void *
mem_trim(void *mem, mem_size_t size)
{
LWIP_UNUSED_ARG(size);
return mem;
}mem_trim (void *mem, mem_size_t size) { ... }
/* ... */#endif
#if MEM_LIBC_MALLOC
/* ... */
#ifndef mem_clib_free
#define mem_clib_free free
#endif
#ifndef mem_clib_malloc
#define mem_clib_malloc malloc
#endif
#ifndef mem_clib_calloc
#define mem_clib_calloc calloc
#endif
#if LWIP_STATS && MEM_STATS
#define MEM_LIBC_STATSHELPER_SIZE LWIP_MEM_ALIGN_SIZE(sizeof(mem_size_t))
#else
#define MEM_LIBC_STATSHELPER_SIZE 0
#endif
/* ... */
void *
mem_malloc(mem_size_t size)
{
void *ret = mem_clib_malloc(size + MEM_LIBC_STATSHELPER_SIZE);
if (ret == NULL) {
MEM_STATS_INC_LOCKED(err);
}if (ret == NULL) { ... } else {
LWIP_ASSERT("malloc() must return aligned memory", LWIP_MEM_ALIGN(ret) == ret);
#if LWIP_STATS && MEM_STATS
*(mem_size_t *)ret = size;
ret = (u8_t *)ret + MEM_LIBC_STATSHELPER_SIZE;
MEM_STATS_INC_USED_LOCKED(used, size);/* ... */
#endif
}else { ... }
return ret;
}mem_malloc (mem_size_t size) { ... }
/* ... */
void
mem_free(void *rmem)
{
LWIP_ASSERT("rmem != NULL", (rmem != NULL));
LWIP_ASSERT("rmem == MEM_ALIGN(rmem)", (rmem == LWIP_MEM_ALIGN(rmem)));
#if LWIP_STATS && MEM_STATS
rmem = (u8_t *)rmem - MEM_LIBC_STATSHELPER_SIZE;
MEM_STATS_DEC_USED_LOCKED(used, *(mem_size_t *)rmem);/* ... */
#endif
mem_clib_free(rmem);
}mem_free (void *rmem) { ... }
/* ... */
#elif MEM_USE_POOLS
/* ... */
void *
mem_malloc(mem_size_t size)
{
void *ret;
struct memp_malloc_helper *element = NULL;
memp_t poolnr;
mem_size_t required_size = size + LWIP_MEM_ALIGN_SIZE(sizeof(struct memp_malloc_helper));
for (poolnr = MEMP_POOL_FIRST; poolnr <= MEMP_POOL_LAST; poolnr = (memp_t)(poolnr + 1)) {
/* ... */
if (required_size <= memp_pools[poolnr]->size) {
element = (struct memp_malloc_helper *)memp_malloc(poolnr);
if (element == NULL) {
#if MEM_USE_POOLS_TRY_BIGGER_POOL
if (poolnr < MEMP_POOL_LAST) {
continue;
}if (poolnr < MEMP_POOL_LAST) { ... }
/* ... */#endif
MEM_STATS_INC_LOCKED(err);
return NULL;
}if (element == NULL) { ... }
break;
}if (required_size <= memp_pools[poolnr]->size) { ... }
}for (poolnr = MEMP_POOL_FIRST; poolnr <= MEMP_POOL_LAST; poolnr = (memp_t)(poolnr + 1)) { ... }
if (poolnr > MEMP_POOL_LAST) {
LWIP_ASSERT("mem_malloc(): no pool is that big!", 0);
MEM_STATS_INC_LOCKED(err);
return NULL;
}if (poolnr > MEMP_POOL_LAST) { ... }
element->poolnr = poolnr;
ret = (u8_t *)element + LWIP_MEM_ALIGN_SIZE(sizeof(struct memp_malloc_helper));
#if MEMP_OVERFLOW_CHECK || (LWIP_STATS && MEM_STATS)
element->size = (u16_t)size;
MEM_STATS_INC_USED_LOCKED(used, element->size);/* ... */
#endif
#if MEMP_OVERFLOW_CHECK
memset((u8_t *)ret + size, 0xcd, memp_pools[poolnr]->size - size);/* ... */
#endif
return ret;
}mem_malloc (mem_size_t size) { ... }
/* ... */
void
mem_free(void *rmem)
{
struct memp_malloc_helper *hmem;
LWIP_ASSERT("rmem != NULL", (rmem != NULL));
LWIP_ASSERT("rmem == MEM_ALIGN(rmem)", (rmem == LWIP_MEM_ALIGN(rmem)));
hmem = (struct memp_malloc_helper *)(void *)((u8_t *)rmem - LWIP_MEM_ALIGN_SIZE(sizeof(struct memp_malloc_helper)));
LWIP_ASSERT("hmem != NULL", (hmem != NULL));
LWIP_ASSERT("hmem == MEM_ALIGN(hmem)", (hmem == LWIP_MEM_ALIGN(hmem)));
LWIP_ASSERT("hmem->poolnr < MEMP_MAX", (hmem->poolnr < MEMP_MAX));
MEM_STATS_DEC_USED_LOCKED(used, hmem->size);
#if MEMP_OVERFLOW_CHECK
{
u16_t i;
LWIP_ASSERT("MEM_USE_POOLS: invalid chunk size",
hmem->size <= memp_pools[hmem->poolnr]->size);
for (i = hmem->size; i < memp_pools[hmem->poolnr]->size; i++) {
u8_t data = *((u8_t *)rmem + i);
LWIP_ASSERT("MEM_USE_POOLS: mem overflow detected", data == 0xcd);
}for (i = hmem->size; i < memp_pools[hmem->poolnr]->size; i++) { ... }
...}/* ... */
#endif
memp_free(hmem->poolnr, hmem);
}mem_free (void *rmem) { ... }
/* ... */
#else
/* ... */
struct mem {
mem_size_t next;
mem_size_t prev;
u8_t used;
#if MEM_OVERFLOW_CHECK
mem_size_t user_size;/* ... */
#endif
...};
/* ... */
#ifndef MIN_SIZE
#define MIN_SIZE 12
#endif
#define MIN_SIZE_ALIGNED LWIP_MEM_ALIGN_SIZE(MIN_SIZE)
#define SIZEOF_STRUCT_MEM LWIP_MEM_ALIGN_SIZE(sizeof(struct mem))
#define MEM_SIZE_ALIGNED LWIP_MEM_ALIGN_SIZE(MEM_SIZE)
/* ... */
#ifndef LWIP_RAM_HEAP_POINTER
LWIP_DECLARE_MEMORY_ALIGNED(ram_heap, MEM_SIZE_ALIGNED + (2U * SIZEOF_STRUCT_MEM));
#define LWIP_RAM_HEAP_POINTER ram_heap
/* ... */#endif
static u8_t *ram;
static struct mem *ram_end;
#if !NO_SYS
static sys_mutex_t mem_mutex;
#endif
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
static volatile u8_t mem_free_count;
#define LWIP_MEM_FREE_DECL_PROTECT() SYS_ARCH_DECL_PROTECT(lev_free)
#define LWIP_MEM_FREE_PROTECT() SYS_ARCH_PROTECT(lev_free)
#define LWIP_MEM_FREE_UNPROTECT() SYS_ARCH_UNPROTECT(lev_free)
#define LWIP_MEM_ALLOC_DECL_PROTECT() SYS_ARCH_DECL_PROTECT(lev_alloc)
#define LWIP_MEM_ALLOC_PROTECT() SYS_ARCH_PROTECT(lev_alloc)
#define LWIP_MEM_ALLOC_UNPROTECT() SYS_ARCH_UNPROTECT(lev_alloc)
#define LWIP_MEM_LFREE_VOLATILE volatile
7 defines
/* ... */#else
#define LWIP_MEM_FREE_DECL_PROTECT()
#define LWIP_MEM_FREE_PROTECT() sys_mutex_lock(&mem_mutex)
#define LWIP_MEM_FREE_UNPROTECT() sys_mutex_unlock(&mem_mutex)
#define LWIP_MEM_ALLOC_DECL_PROTECT()
#define LWIP_MEM_ALLOC_PROTECT()
#define LWIP_MEM_ALLOC_UNPROTECT()
#define LWIP_MEM_LFREE_VOLATILE
7 defines
/* ... */#endif
static struct mem * LWIP_MEM_LFREE_VOLATILE lfree;
#if MEM_SANITY_CHECK
static void mem_sanity(void);
#define MEM_SANITY() mem_sanity()
/* ... */#else
#define MEM_SANITY()
#endif
#if MEM_OVERFLOW_CHECK
static void
mem_overflow_init_element(struct mem *mem, mem_size_t user_size)
{
void *p = (u8_t *)mem + SIZEOF_STRUCT_MEM + MEM_SANITY_OFFSET;
mem->user_size = user_size;
mem_overflow_init_raw(p, user_size);
}mem_overflow_init_element (struct mem *mem, mem_size_t user_size) { ... }
static void
mem_overflow_check_element(struct mem *mem)
{
void *p = (u8_t *)mem + SIZEOF_STRUCT_MEM + MEM_SANITY_OFFSET;
mem_overflow_check_raw(p, mem->user_size, "heap", "");
}mem_overflow_check_element (struct mem *mem) { ... }
/* ... */#else
#define mem_overflow_init_element(mem, size)
#define mem_overflow_check_element(mem)
/* ... */#endif
static struct mem *
ptr_to_mem(mem_size_t ptr)
{
return (struct mem *)(void *)&ram[ptr];
}{ ... }
static mem_size_t
mem_to_ptr(void *mem)
{
return (mem_size_t)((u8_t *)mem - ram);
}{ ... }
/* ... */
static void
plug_holes(struct mem *mem)
{
struct mem *nmem;
struct mem *pmem;
LWIP_ASSERT("plug_holes: mem >= ram", (u8_t *)mem >= ram);
LWIP_ASSERT("plug_holes: mem < ram_end", (u8_t *)mem < (u8_t *)ram_end);
LWIP_ASSERT("plug_holes: mem->used == 0", mem->used == 0);
LWIP_ASSERT("plug_holes: mem->next <= MEM_SIZE_ALIGNED", mem->next <= MEM_SIZE_ALIGNED);
nmem = ptr_to_mem(mem->next);
if (mem != nmem && nmem->used == 0 && (u8_t *)nmem != (u8_t *)ram_end) {
if (lfree == nmem) {
lfree = mem;
}if (lfree == nmem) { ... }
mem->next = nmem->next;
if (nmem->next != MEM_SIZE_ALIGNED) {
ptr_to_mem(nmem->next)->prev = mem_to_ptr(mem);
}if (nmem->next != MEM_SIZE_ALIGNED) { ... }
}if (mem != nmem && nmem->used == 0 && (u8_t *)nmem != (u8_t *)ram_end) { ... }
pmem = ptr_to_mem(mem->prev);
if (pmem != mem && pmem->used == 0) {
if (lfree == mem) {
lfree = pmem;
}if (lfree == mem) { ... }
pmem->next = mem->next;
if (mem->next != MEM_SIZE_ALIGNED) {
ptr_to_mem(mem->next)->prev = mem_to_ptr(pmem);
}if (mem->next != MEM_SIZE_ALIGNED) { ... }
}if (pmem != mem && pmem->used == 0) { ... }
}{ ... }
/* ... */
void
mem_init(void)
{
struct mem *mem;
LWIP_ASSERT("Sanity check alignment",
(SIZEOF_STRUCT_MEM & (MEM_ALIGNMENT - 1)) == 0);
ram = (u8_t *)LWIP_MEM_ALIGN(LWIP_RAM_HEAP_POINTER);
mem = (struct mem *)(void *)ram;
mem->next = MEM_SIZE_ALIGNED;
mem->prev = 0;
mem->used = 0;
ram_end = ptr_to_mem(MEM_SIZE_ALIGNED);
ram_end->used = 1;
ram_end->next = MEM_SIZE_ALIGNED;
ram_end->prev = MEM_SIZE_ALIGNED;
MEM_SANITY();
lfree = (struct mem *)(void *)ram;
MEM_STATS_AVAIL(avail, MEM_SIZE_ALIGNED);
if (sys_mutex_new(&mem_mutex) != ERR_OK) {
LWIP_ASSERT("failed to create mem_mutex", 0);
}if (sys_mutex_new(&mem_mutex) != ERR_OK) { ... }
}{ ... }
/* ... */
static int
mem_link_valid(struct mem *mem)
{
struct mem *nmem, *pmem;
mem_size_t rmem_idx;
rmem_idx = mem_to_ptr(mem);
nmem = ptr_to_mem(mem->next);
pmem = ptr_to_mem(mem->prev);
if ((mem->next > MEM_SIZE_ALIGNED) || (mem->prev > MEM_SIZE_ALIGNED) ||
((mem->prev != rmem_idx) && (pmem->next != rmem_idx)) ||
((nmem != ram_end) && (nmem->prev != rmem_idx))) {
return 0;
}if ((mem->next > MEM_SIZE_ALIGNED) || (mem->prev > MEM_SIZE_ALIGNED) || ((mem->prev != rmem_idx) && (pmem->next != rmem_idx)) || ((nmem != ram_end) && (nmem->prev != rmem_idx))) { ... }
return 1;
}{ ... }
#if MEM_SANITY_CHECK
static void
mem_sanity(void)
{
struct mem *mem;
u8_t last_used;
mem = (struct mem *)ram;
LWIP_ASSERT("heap element used valid", (mem->used == 0) || (mem->used == 1));
last_used = mem->used;
LWIP_ASSERT("heap element prev ptr valid", mem->prev == 0);
LWIP_ASSERT("heap element next ptr valid", mem->next <= MEM_SIZE_ALIGNED);
LWIP_ASSERT("heap element next ptr aligned", LWIP_MEM_ALIGN(ptr_to_mem(mem->next) == ptr_to_mem(mem->next)));
for (mem = ptr_to_mem(mem->next);
((u8_t *)mem > ram) && (mem < ram_end);
mem = ptr_to_mem(mem->next)) {
LWIP_ASSERT("heap element aligned", LWIP_MEM_ALIGN(mem) == mem);
LWIP_ASSERT("heap element prev ptr valid", mem->prev <= MEM_SIZE_ALIGNED);
LWIP_ASSERT("heap element next ptr valid", mem->next <= MEM_SIZE_ALIGNED);
LWIP_ASSERT("heap element prev ptr aligned", LWIP_MEM_ALIGN(ptr_to_mem(mem->prev) == ptr_to_mem(mem->prev)));
LWIP_ASSERT("heap element next ptr aligned", LWIP_MEM_ALIGN(ptr_to_mem(mem->next) == ptr_to_mem(mem->next)));
if (last_used == 0) {
LWIP_ASSERT("heap element unused?", mem->used == 1);
}if (last_used == 0) { ... } else {
LWIP_ASSERT("heap element unused member", (mem->used == 0) || (mem->used == 1));
}else { ... }
LWIP_ASSERT("heap element link valid", mem_link_valid(mem));
last_used = mem->used;
}for (mem = ptr_to_mem(mem->next); ((u8_t *)mem > ram) && (mem < ram_end); mem = ptr_to_mem(mem->next)) { ... }
LWIP_ASSERT("heap end ptr sanity", mem == ptr_to_mem(MEM_SIZE_ALIGNED));
LWIP_ASSERT("heap element used valid", mem->used == 1);
LWIP_ASSERT("heap element prev ptr valid", mem->prev == MEM_SIZE_ALIGNED);
LWIP_ASSERT("heap element next ptr valid", mem->next == MEM_SIZE_ALIGNED);
}mem_sanity (void) { ... }
/* ... */#endif
/* ... */
void
mem_free(void *rmem)
{
struct mem *mem;
LWIP_MEM_FREE_DECL_PROTECT();
if (rmem == NULL) {
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_TRACE | LWIP_DBG_LEVEL_SERIOUS, ("mem_free(p == NULL) was called.\n"));
return;
}if (rmem == NULL) { ... }
if ((((mem_ptr_t)rmem) & (MEM_ALIGNMENT - 1)) != 0) {
LWIP_MEM_ILLEGAL_FREE("mem_free: sanity check alignment");
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_LEVEL_SEVERE, ("mem_free: sanity check alignment\n"));
MEM_STATS_INC_LOCKED(illegal);
return;
}if ((((mem_ptr_t)rmem) & (MEM_ALIGNMENT - 1)) != 0) { ... }
mem = (struct mem *)(void *)((u8_t *)rmem - (SIZEOF_STRUCT_MEM + MEM_SANITY_OFFSET));
if ((u8_t *)mem < ram || (u8_t *)rmem + MIN_SIZE_ALIGNED > (u8_t *)ram_end) {
LWIP_MEM_ILLEGAL_FREE("mem_free: illegal memory");
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_LEVEL_SEVERE, ("mem_free: illegal memory\n"));
MEM_STATS_INC_LOCKED(illegal);
return;
}if ((u8_t *)mem < ram || (u8_t *)rmem + MIN_SIZE_ALIGNED > (u8_t *)ram_end) { ... }
#if MEM_OVERFLOW_CHECK
mem_overflow_check_element(mem);
#endif
LWIP_MEM_FREE_PROTECT();
if (!mem->used) {
LWIP_MEM_ILLEGAL_FREE("mem_free: illegal memory: double free");
LWIP_MEM_FREE_UNPROTECT();
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_LEVEL_SEVERE, ("mem_free: illegal memory: double free?\n"));
MEM_STATS_INC_LOCKED(illegal);
return;
}if (!mem->used) { ... }
if (!mem_link_valid(mem)) {
LWIP_MEM_ILLEGAL_FREE("mem_free: illegal memory: non-linked: double free");
LWIP_MEM_FREE_UNPROTECT();
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_LEVEL_SEVERE, ("mem_free: illegal memory: non-linked: double free?\n"));
MEM_STATS_INC_LOCKED(illegal);
return;
}if (!mem_link_valid(mem)) { ... }
mem->used = 0;
if (mem < lfree) {
lfree = mem;
}if (mem < lfree) { ... }
MEM_STATS_DEC_USED(used, mem->next - (mem_size_t)(((u8_t *)mem - ram)));
plug_holes(mem);
MEM_SANITY();
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
mem_free_count = 1;
#endif
LWIP_MEM_FREE_UNPROTECT();
}{ ... }
/* ... */
void *
mem_trim(void *rmem, mem_size_t new_size)
{
mem_size_t size, newsize;
mem_size_t ptr, ptr2;
struct mem *mem, *mem2;
LWIP_MEM_FREE_DECL_PROTECT();
/* ... */
newsize = (mem_size_t)LWIP_MEM_ALIGN_SIZE(new_size);
if (newsize < MIN_SIZE_ALIGNED) {
newsize = MIN_SIZE_ALIGNED;
}if (newsize < MIN_SIZE_ALIGNED) { ... }
#if MEM_OVERFLOW_CHECK
newsize += MEM_SANITY_REGION_BEFORE_ALIGNED + MEM_SANITY_REGION_AFTER_ALIGNED;
#endif
if ((newsize > MEM_SIZE_ALIGNED) || (newsize < new_size)) {
return NULL;
}if ((newsize > MEM_SIZE_ALIGNED) || (newsize < new_size)) { ... }
LWIP_ASSERT("mem_trim: legal memory", (u8_t *)rmem >= (u8_t *)ram &&
(u8_t *)rmem < (u8_t *)ram_end);
if ((u8_t *)rmem < (u8_t *)ram || (u8_t *)rmem >= (u8_t *)ram_end) {
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_LEVEL_SEVERE, ("mem_trim: illegal memory\n"));
MEM_STATS_INC_LOCKED(illegal);
return rmem;
}if ((u8_t *)rmem < (u8_t *)ram || (u8_t *)rmem >= (u8_t *)ram_end) { ... }
mem = (struct mem *)(void *)((u8_t *)rmem - (SIZEOF_STRUCT_MEM + MEM_SANITY_OFFSET));
#if MEM_OVERFLOW_CHECK
mem_overflow_check_element(mem);
#endif
ptr = mem_to_ptr(mem);
size = (mem_size_t)((mem_size_t)(mem->next - ptr) - (SIZEOF_STRUCT_MEM + MEM_SANITY_OVERHEAD));
LWIP_ASSERT("mem_trim can only shrink memory", newsize <= size);
if (newsize > size) {
return NULL;
}if (newsize > size) { ... }
if (newsize == size) {
return rmem;
}if (newsize == size) { ... }
LWIP_MEM_FREE_PROTECT();
mem2 = ptr_to_mem(mem->next);
if (mem2->used == 0) {
mem_size_t next;
LWIP_ASSERT("invalid next ptr", mem->next != MEM_SIZE_ALIGNED);
next = mem2->next;
ptr2 = (mem_size_t)(ptr + SIZEOF_STRUCT_MEM + newsize);
if (lfree == mem2) {
lfree = ptr_to_mem(ptr2);
}if (lfree == mem2) { ... }
mem2 = ptr_to_mem(ptr2);
mem2->used = 0;
mem2->next = next;
mem2->prev = ptr;
mem->next = ptr2;
/* ... */
if (mem2->next != MEM_SIZE_ALIGNED) {
ptr_to_mem(mem2->next)->prev = ptr2;
}if (mem2->next != MEM_SIZE_ALIGNED) { ... }
MEM_STATS_DEC_USED(used, (size - newsize));
}if (mem2->used == 0) { ... } else if (newsize + SIZEOF_STRUCT_MEM + MIN_SIZE_ALIGNED <= size) {
/* ... */
ptr2 = (mem_size_t)(ptr + SIZEOF_STRUCT_MEM + newsize);
LWIP_ASSERT("invalid next ptr", mem->next != MEM_SIZE_ALIGNED);
mem2 = ptr_to_mem(ptr2);
if (mem2 < lfree) {
lfree = mem2;
}if (mem2 < lfree) { ... }
mem2->used = 0;
mem2->next = mem->next;
mem2->prev = ptr;
mem->next = ptr2;
if (mem2->next != MEM_SIZE_ALIGNED) {
ptr_to_mem(mem2->next)->prev = ptr2;
}if (mem2->next != MEM_SIZE_ALIGNED) { ... }
MEM_STATS_DEC_USED(used, (size - newsize));
}else if (newsize + SIZEOF_STRUCT_MEM + MIN_SIZE_ALIGNED <= size) { ... }
/* ... */
#if MEM_OVERFLOW_CHECK
mem_overflow_init_element(mem, new_size);
#endif
MEM_SANITY();
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
mem_free_count = 1;
#endif
LWIP_MEM_FREE_UNPROTECT();
return rmem;
}{ ... }
/* ... */
void *
mem_malloc(mem_size_t size_in)
{
mem_size_t ptr, ptr2, size;
struct mem *mem, *mem2;
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
u8_t local_mem_free_count = 0;
#endif
LWIP_MEM_ALLOC_DECL_PROTECT();
if (size_in == 0) {
return NULL;
}if (size_in == 0) { ... }
/* ... */
size = (mem_size_t)LWIP_MEM_ALIGN_SIZE(size_in);
if (size < MIN_SIZE_ALIGNED) {
size = MIN_SIZE_ALIGNED;
}if (size < MIN_SIZE_ALIGNED) { ... }
#if MEM_OVERFLOW_CHECK
size += MEM_SANITY_REGION_BEFORE_ALIGNED + MEM_SANITY_REGION_AFTER_ALIGNED;
#endif
if ((size > MEM_SIZE_ALIGNED) || (size < size_in)) {
return NULL;
}if ((size > MEM_SIZE_ALIGNED) || (size < size_in)) { ... }
sys_mutex_lock(&mem_mutex);
LWIP_MEM_ALLOC_PROTECT();
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
do {
local_mem_free_count = 0;
#endif
/* ... */
for (ptr = mem_to_ptr(lfree); ptr < MEM_SIZE_ALIGNED - size;
ptr = ptr_to_mem(ptr)->next) {
mem = ptr_to_mem(ptr);
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
mem_free_count = 0;
LWIP_MEM_ALLOC_UNPROTECT();
LWIP_MEM_ALLOC_PROTECT();
if (mem_free_count != 0) {
/* ... */
local_mem_free_count = 1;
break;
}if (mem_free_count != 0) { ... }
/* ... */#endif
if ((!mem->used) &&
(mem->next - (ptr + SIZEOF_STRUCT_MEM)) >= size) {
/* ... */
if (mem->next - (ptr + SIZEOF_STRUCT_MEM) >= (size + SIZEOF_STRUCT_MEM + MIN_SIZE_ALIGNED)) {
/* ... */
ptr2 = (mem_size_t)(ptr + SIZEOF_STRUCT_MEM + size);
LWIP_ASSERT("invalid next ptr",ptr2 != MEM_SIZE_ALIGNED);
mem2 = ptr_to_mem(ptr2);
mem2->used = 0;
mem2->next = mem->next;
mem2->prev = ptr;
mem->next = ptr2;
mem->used = 1;
if (mem2->next != MEM_SIZE_ALIGNED) {
ptr_to_mem(mem2->next)->prev = ptr2;
}if (mem2->next != MEM_SIZE_ALIGNED) { ... }
MEM_STATS_INC_USED(used, (size + SIZEOF_STRUCT_MEM));
}if (mem->next - (ptr + SIZEOF_STRUCT_MEM) >= (size + SIZEOF_STRUCT_MEM + MIN_SIZE_ALIGNED)) { ... } else {
/* ... */
mem->used = 1;
MEM_STATS_INC_USED(used, mem->next - mem_to_ptr(mem));
}else { ... }
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
mem_malloc_adjust_lfree:
#endif
if (mem == lfree) {
struct mem *cur = lfree;
while (cur->used && cur != ram_end) {
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
mem_free_count = 0;
LWIP_MEM_ALLOC_UNPROTECT();
LWIP_MEM_ALLOC_PROTECT();
if (mem_free_count != 0) {
/* ... */
goto mem_malloc_adjust_lfree;
}if (mem_free_count != 0) { ... }
/* ... */#endif
cur = ptr_to_mem(cur->next);
}while (cur->used && cur != ram_end) { ... }
lfree = cur;
LWIP_ASSERT("mem_malloc: !lfree->used", ((lfree == ram_end) || (!lfree->used)));
}if (mem == lfree) { ... }
LWIP_MEM_ALLOC_UNPROTECT();
sys_mutex_unlock(&mem_mutex);
LWIP_ASSERT("mem_malloc: allocated memory not above ram_end.",
(mem_ptr_t)mem + SIZEOF_STRUCT_MEM + size <= (mem_ptr_t)ram_end);
LWIP_ASSERT("mem_malloc: allocated memory properly aligned.",
((mem_ptr_t)mem + SIZEOF_STRUCT_MEM) % MEM_ALIGNMENT == 0);
LWIP_ASSERT("mem_malloc: sanity check alignment",
(((mem_ptr_t)mem) & (MEM_ALIGNMENT - 1)) == 0);
#if MEM_OVERFLOW_CHECK
mem_overflow_init_element(mem, size_in);
#endif
MEM_SANITY();
return (u8_t *)mem + SIZEOF_STRUCT_MEM + MEM_SANITY_OFFSET;
}if ((!mem->used) && (mem->next - (ptr + SIZEOF_STRUCT_MEM)) >= size) { ... }
}for (ptr = mem_to_ptr(lfree); ptr < MEM_SIZE_ALIGNED - size; ptr = ptr_to_mem(ptr)->next) { ... }
#if LWIP_ALLOW_MEM_FREE_FROM_OTHER_CONTEXT
...} while (local_mem_free_count != 0);
#endif
MEM_STATS_INC(err);
LWIP_MEM_ALLOC_UNPROTECT();
sys_mutex_unlock(&mem_mutex);
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_LEVEL_SERIOUS, ("mem_malloc: could not allocate %"S16_F" bytes\n", (s16_t)size));
return NULL;
}{ ... }
/* ... */#endif
#if MEM_LIBC_MALLOC && (!LWIP_STATS || !MEM_STATS)
void *
mem_calloc(mem_size_t count, mem_size_t size)
{
return mem_clib_calloc(count, size);
}mem_calloc (mem_size_t count, mem_size_t size) { ... }
/* ... */
#else
/* ... */
void *
mem_calloc(mem_size_t count, mem_size_t size)
{
void *p;
size_t alloc_size = (size_t)count * (size_t)size;
if ((size_t)(mem_size_t)alloc_size != alloc_size) {
LWIP_DEBUGF(MEM_DEBUG | LWIP_DBG_LEVEL_SERIOUS, ("mem_calloc: could not allocate %"SZT_F" bytes\n", alloc_size));
return NULL;
}if ((size_t)(mem_size_t)alloc_size != alloc_size) { ... }
p = mem_malloc((mem_size_t)alloc_size);
if (p) {
memset(p, 0, alloc_size);
}if (p) { ... }
return p;
}{ ... }
#endif/* ... */