mirror of
https://github.com/fluencelabs/redis
synced 2025-06-12 08:41:21 +00:00
4
deps/jemalloc/test/integration/MALLOCX_ARENA.c
vendored
Executable file → Normal file
4
deps/jemalloc/test/integration/MALLOCX_ARENA.c
vendored
Executable file → Normal file
@ -19,8 +19,8 @@ thd_start(void *arg)
|
||||
size_t sz;
|
||||
|
||||
sz = sizeof(arena_ind);
|
||||
assert_d_eq(mallctl("arenas.extend", (void *)&arena_ind, &sz, NULL, 0),
|
||||
0, "Error in arenas.extend");
|
||||
assert_d_eq(mallctl("arenas.extend", &arena_ind, &sz, NULL, 0), 0,
|
||||
"Error in arenas.extend");
|
||||
|
||||
if (thread_ind % 4 != 3) {
|
||||
size_t mib[3];
|
||||
|
20
deps/jemalloc/test/integration/aligned_alloc.c
vendored
20
deps/jemalloc/test/integration/aligned_alloc.c
vendored
@ -1,20 +1,9 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
#define CHUNK 0x400000
|
||||
#define MAXALIGN (((size_t)1) << 23)
|
||||
|
||||
/*
|
||||
* On systems which can't merge extents, tests that call this function generate
|
||||
* a lot of dirty memory very quickly. Purging between cycles mitigates
|
||||
* potential OOM on e.g. 32-bit Windows.
|
||||
*/
|
||||
static void
|
||||
purge(void)
|
||||
{
|
||||
|
||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected mallctl error");
|
||||
}
|
||||
/* #define MAXALIGN ((size_t)UINT64_C(0x80000000000)) */
|
||||
#define MAXALIGN ((size_t)0x2000000LU)
|
||||
#define NITER 4
|
||||
|
||||
TEST_BEGIN(test_alignment_errors)
|
||||
{
|
||||
@ -85,7 +74,6 @@ TEST_END
|
||||
|
||||
TEST_BEGIN(test_alignment_and_size)
|
||||
{
|
||||
#define NITER 4
|
||||
size_t alignment, size, total;
|
||||
unsigned i;
|
||||
void *ps[NITER];
|
||||
@ -122,9 +110,7 @@ TEST_BEGIN(test_alignment_and_size)
|
||||
}
|
||||
}
|
||||
}
|
||||
purge();
|
||||
}
|
||||
#undef NITER
|
||||
}
|
||||
TEST_END
|
||||
|
||||
|
17
deps/jemalloc/test/integration/allocated.c
vendored
Executable file → Normal file
17
deps/jemalloc/test/integration/allocated.c
vendored
Executable file → Normal file
@ -18,14 +18,14 @@ thd_start(void *arg)
|
||||
size_t sz, usize;
|
||||
|
||||
sz = sizeof(a0);
|
||||
if ((err = mallctl("thread.allocated", (void *)&a0, &sz, NULL, 0))) {
|
||||
if ((err = mallctl("thread.allocated", &a0, &sz, NULL, 0))) {
|
||||
if (err == ENOENT)
|
||||
goto label_ENOENT;
|
||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||
strerror(err));
|
||||
}
|
||||
sz = sizeof(ap0);
|
||||
if ((err = mallctl("thread.allocatedp", (void *)&ap0, &sz, NULL, 0))) {
|
||||
if ((err = mallctl("thread.allocatedp", &ap0, &sz, NULL, 0))) {
|
||||
if (err == ENOENT)
|
||||
goto label_ENOENT;
|
||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||
@ -36,15 +36,14 @@ thd_start(void *arg)
|
||||
"storage");
|
||||
|
||||
sz = sizeof(d0);
|
||||
if ((err = mallctl("thread.deallocated", (void *)&d0, &sz, NULL, 0))) {
|
||||
if ((err = mallctl("thread.deallocated", &d0, &sz, NULL, 0))) {
|
||||
if (err == ENOENT)
|
||||
goto label_ENOENT;
|
||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||
strerror(err));
|
||||
}
|
||||
sz = sizeof(dp0);
|
||||
if ((err = mallctl("thread.deallocatedp", (void *)&dp0, &sz, NULL,
|
||||
0))) {
|
||||
if ((err = mallctl("thread.deallocatedp", &dp0, &sz, NULL, 0))) {
|
||||
if (err == ENOENT)
|
||||
goto label_ENOENT;
|
||||
test_fail("%s(): Error in mallctl(): %s", __func__,
|
||||
@ -58,9 +57,9 @@ thd_start(void *arg)
|
||||
assert_ptr_not_null(p, "Unexpected malloc() error");
|
||||
|
||||
sz = sizeof(a1);
|
||||
mallctl("thread.allocated", (void *)&a1, &sz, NULL, 0);
|
||||
mallctl("thread.allocated", &a1, &sz, NULL, 0);
|
||||
sz = sizeof(ap1);
|
||||
mallctl("thread.allocatedp", (void *)&ap1, &sz, NULL, 0);
|
||||
mallctl("thread.allocatedp", &ap1, &sz, NULL, 0);
|
||||
assert_u64_eq(*ap1, a1,
|
||||
"Dereferenced \"thread.allocatedp\" value should equal "
|
||||
"\"thread.allocated\" value");
|
||||
@ -75,9 +74,9 @@ thd_start(void *arg)
|
||||
free(p);
|
||||
|
||||
sz = sizeof(d1);
|
||||
mallctl("thread.deallocated", (void *)&d1, &sz, NULL, 0);
|
||||
mallctl("thread.deallocated", &d1, &sz, NULL, 0);
|
||||
sz = sizeof(dp1);
|
||||
mallctl("thread.deallocatedp", (void *)&dp1, &sz, NULL, 0);
|
||||
mallctl("thread.deallocatedp", &dp1, &sz, NULL, 0);
|
||||
assert_u64_eq(*dp1, d1,
|
||||
"Dereferenced \"thread.deallocatedp\" value should equal "
|
||||
"\"thread.deallocated\" value");
|
||||
|
98
deps/jemalloc/test/integration/chunk.c
vendored
98
deps/jemalloc/test/integration/chunk.c
vendored
@ -121,10 +121,6 @@ TEST_BEGIN(test_chunk)
|
||||
{
|
||||
void *p;
|
||||
size_t old_size, new_size, large0, large1, huge0, huge1, huge2, sz;
|
||||
unsigned arena_ind;
|
||||
int flags;
|
||||
size_t hooks_mib[3], purge_mib[3];
|
||||
size_t hooks_miblen, purge_miblen;
|
||||
chunk_hooks_t new_hooks = {
|
||||
chunk_alloc,
|
||||
chunk_dalloc,
|
||||
@ -136,21 +132,11 @@ TEST_BEGIN(test_chunk)
|
||||
};
|
||||
bool xallocx_success_a, xallocx_success_b, xallocx_success_c;
|
||||
|
||||
sz = sizeof(unsigned);
|
||||
assert_d_eq(mallctl("arenas.extend", (void *)&arena_ind, &sz, NULL, 0),
|
||||
0, "Unexpected mallctl() failure");
|
||||
flags = MALLOCX_ARENA(arena_ind) | MALLOCX_TCACHE_NONE;
|
||||
|
||||
/* Install custom chunk hooks. */
|
||||
hooks_miblen = sizeof(hooks_mib)/sizeof(size_t);
|
||||
assert_d_eq(mallctlnametomib("arena.0.chunk_hooks", hooks_mib,
|
||||
&hooks_miblen), 0, "Unexpected mallctlnametomib() failure");
|
||||
hooks_mib[1] = (size_t)arena_ind;
|
||||
old_size = sizeof(chunk_hooks_t);
|
||||
new_size = sizeof(chunk_hooks_t);
|
||||
assert_d_eq(mallctlbymib(hooks_mib, hooks_miblen, (void *)&old_hooks,
|
||||
&old_size, (void *)&new_hooks, new_size), 0,
|
||||
"Unexpected chunk_hooks error");
|
||||
assert_d_eq(mallctl("arena.0.chunk_hooks", &old_hooks, &old_size,
|
||||
&new_hooks, new_size), 0, "Unexpected chunk_hooks error");
|
||||
orig_hooks = old_hooks;
|
||||
assert_ptr_ne(old_hooks.alloc, chunk_alloc, "Unexpected alloc error");
|
||||
assert_ptr_ne(old_hooks.dalloc, chunk_dalloc,
|
||||
@ -165,63 +151,59 @@ TEST_BEGIN(test_chunk)
|
||||
|
||||
/* Get large size classes. */
|
||||
sz = sizeof(size_t);
|
||||
assert_d_eq(mallctl("arenas.lrun.0.size", (void *)&large0, &sz, NULL,
|
||||
0), 0, "Unexpected arenas.lrun.0.size failure");
|
||||
assert_d_eq(mallctl("arenas.lrun.1.size", (void *)&large1, &sz, NULL,
|
||||
0), 0, "Unexpected arenas.lrun.1.size failure");
|
||||
assert_d_eq(mallctl("arenas.lrun.0.size", &large0, &sz, NULL, 0), 0,
|
||||
"Unexpected arenas.lrun.0.size failure");
|
||||
assert_d_eq(mallctl("arenas.lrun.1.size", &large1, &sz, NULL, 0), 0,
|
||||
"Unexpected arenas.lrun.1.size failure");
|
||||
|
||||
/* Get huge size classes. */
|
||||
assert_d_eq(mallctl("arenas.hchunk.0.size", (void *)&huge0, &sz, NULL,
|
||||
0), 0, "Unexpected arenas.hchunk.0.size failure");
|
||||
assert_d_eq(mallctl("arenas.hchunk.1.size", (void *)&huge1, &sz, NULL,
|
||||
0), 0, "Unexpected arenas.hchunk.1.size failure");
|
||||
assert_d_eq(mallctl("arenas.hchunk.2.size", (void *)&huge2, &sz, NULL,
|
||||
0), 0, "Unexpected arenas.hchunk.2.size failure");
|
||||
assert_d_eq(mallctl("arenas.hchunk.0.size", &huge0, &sz, NULL, 0), 0,
|
||||
"Unexpected arenas.hchunk.0.size failure");
|
||||
assert_d_eq(mallctl("arenas.hchunk.1.size", &huge1, &sz, NULL, 0), 0,
|
||||
"Unexpected arenas.hchunk.1.size failure");
|
||||
assert_d_eq(mallctl("arenas.hchunk.2.size", &huge2, &sz, NULL, 0), 0,
|
||||
"Unexpected arenas.hchunk.2.size failure");
|
||||
|
||||
/* Test dalloc/decommit/purge cascade. */
|
||||
purge_miblen = sizeof(purge_mib)/sizeof(size_t);
|
||||
assert_d_eq(mallctlnametomib("arena.0.purge", purge_mib, &purge_miblen),
|
||||
0, "Unexpected mallctlnametomib() failure");
|
||||
purge_mib[1] = (size_t)arena_ind;
|
||||
do_dalloc = false;
|
||||
do_decommit = false;
|
||||
p = mallocx(huge0 * 2, flags);
|
||||
p = mallocx(huge0 * 2, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
did_dalloc = false;
|
||||
did_decommit = false;
|
||||
did_purge = false;
|
||||
did_split = false;
|
||||
xallocx_success_a = (xallocx(p, huge0, 0, flags) == huge0);
|
||||
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
|
||||
0, "Unexpected arena.%u.purge error", arena_ind);
|
||||
xallocx_success_a = (xallocx(p, huge0, 0, 0) == huge0);
|
||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected arena.0.purge error");
|
||||
if (xallocx_success_a) {
|
||||
assert_true(did_dalloc, "Expected dalloc");
|
||||
assert_false(did_decommit, "Unexpected decommit");
|
||||
assert_true(did_purge, "Expected purge");
|
||||
}
|
||||
assert_true(did_split, "Expected split");
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
do_dalloc = true;
|
||||
|
||||
/* Test decommit/commit and observe split/merge. */
|
||||
do_dalloc = false;
|
||||
do_decommit = true;
|
||||
p = mallocx(huge0 * 2, flags);
|
||||
p = mallocx(huge0 * 2, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
did_decommit = false;
|
||||
did_commit = false;
|
||||
did_split = false;
|
||||
did_merge = false;
|
||||
xallocx_success_b = (xallocx(p, huge0, 0, flags) == huge0);
|
||||
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
|
||||
0, "Unexpected arena.%u.purge error", arena_ind);
|
||||
xallocx_success_b = (xallocx(p, huge0, 0, 0) == huge0);
|
||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected arena.0.purge error");
|
||||
if (xallocx_success_b)
|
||||
assert_true(did_split, "Expected split");
|
||||
xallocx_success_c = (xallocx(p, huge0 * 2, 0, flags) == huge0 * 2);
|
||||
xallocx_success_c = (xallocx(p, huge0 * 2, 0, 0) == huge0 * 2);
|
||||
assert_b_eq(did_decommit, did_commit, "Expected decommit/commit match");
|
||||
if (xallocx_success_b && xallocx_success_c)
|
||||
assert_true(did_merge, "Expected merge");
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
do_dalloc = true;
|
||||
do_decommit = false;
|
||||
|
||||
@ -232,43 +214,43 @@ TEST_BEGIN(test_chunk)
|
||||
* successful xallocx() from size=huge2 to size=huge1 is
|
||||
* guaranteed to leave trailing purgeable memory.
|
||||
*/
|
||||
p = mallocx(huge2, flags);
|
||||
p = mallocx(huge2, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
did_purge = false;
|
||||
assert_zu_eq(xallocx(p, huge1, 0, flags), huge1,
|
||||
assert_zu_eq(xallocx(p, huge1, 0, 0), huge1,
|
||||
"Unexpected xallocx() failure");
|
||||
assert_true(did_purge, "Expected purge");
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
}
|
||||
|
||||
/* Test decommit for large allocations. */
|
||||
do_decommit = true;
|
||||
p = mallocx(large1, flags);
|
||||
p = mallocx(large1, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
|
||||
0, "Unexpected arena.%u.purge error", arena_ind);
|
||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected arena.0.purge error");
|
||||
did_decommit = false;
|
||||
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
|
||||
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
|
||||
"Unexpected xallocx() failure");
|
||||
assert_d_eq(mallctlbymib(purge_mib, purge_miblen, NULL, NULL, NULL, 0),
|
||||
0, "Unexpected arena.%u.purge error", arena_ind);
|
||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected arena.0.purge error");
|
||||
did_commit = false;
|
||||
assert_zu_eq(xallocx(p, large1, 0, flags), large1,
|
||||
assert_zu_eq(xallocx(p, large1, 0, 0), large1,
|
||||
"Unexpected xallocx() failure");
|
||||
assert_b_eq(did_decommit, did_commit, "Expected decommit/commit match");
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
do_decommit = false;
|
||||
|
||||
/* Make sure non-huge allocation succeeds. */
|
||||
p = mallocx(42, flags);
|
||||
p = mallocx(42, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
|
||||
/* Restore chunk hooks. */
|
||||
assert_d_eq(mallctlbymib(hooks_mib, hooks_miblen, NULL, NULL,
|
||||
(void *)&old_hooks, new_size), 0, "Unexpected chunk_hooks error");
|
||||
assert_d_eq(mallctlbymib(hooks_mib, hooks_miblen, (void *)&old_hooks,
|
||||
&old_size, NULL, 0), 0, "Unexpected chunk_hooks error");
|
||||
assert_d_eq(mallctl("arena.0.chunk_hooks", NULL, NULL, &old_hooks,
|
||||
new_size), 0, "Unexpected chunk_hooks error");
|
||||
assert_d_eq(mallctl("arena.0.chunk_hooks", &old_hooks, &old_size,
|
||||
NULL, 0), 0, "Unexpected chunk_hooks error");
|
||||
assert_ptr_eq(old_hooks.alloc, orig_hooks.alloc,
|
||||
"Unexpected alloc error");
|
||||
assert_ptr_eq(old_hooks.dalloc, orig_hooks.dalloc,
|
||||
|
108
deps/jemalloc/test/integration/mallocx.c
vendored
Executable file → Normal file
108
deps/jemalloc/test/integration/mallocx.c
vendored
Executable file → Normal file
@ -1,9 +1,5 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
#ifdef JEMALLOC_FILL
|
||||
const char *malloc_conf = "junk:false";
|
||||
#endif
|
||||
|
||||
static unsigned
|
||||
get_nsizes_impl(const char *cmd)
|
||||
{
|
||||
@ -11,7 +7,7 @@ get_nsizes_impl(const char *cmd)
|
||||
size_t z;
|
||||
|
||||
z = sizeof(unsigned);
|
||||
assert_d_eq(mallctl(cmd, (void *)&ret, &z, NULL, 0), 0,
|
||||
assert_d_eq(mallctl(cmd, &ret, &z, NULL, 0), 0,
|
||||
"Unexpected mallctl(\"%s\", ...) failure", cmd);
|
||||
|
||||
return (ret);
|
||||
@ -37,7 +33,7 @@ get_size_impl(const char *cmd, size_t ind)
|
||||
0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd);
|
||||
mib[2] = ind;
|
||||
z = sizeof(size_t);
|
||||
assert_d_eq(mallctlbymib(mib, miblen, (void *)&ret, &z, NULL, 0),
|
||||
assert_d_eq(mallctlbymib(mib, miblen, &ret, &z, NULL, 0),
|
||||
0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind);
|
||||
|
||||
return (ret);
|
||||
@ -50,84 +46,43 @@ get_huge_size(size_t ind)
|
||||
return (get_size_impl("arenas.hchunk.0.size", ind));
|
||||
}
|
||||
|
||||
/*
|
||||
* On systems which can't merge extents, tests that call this function generate
|
||||
* a lot of dirty memory very quickly. Purging between cycles mitigates
|
||||
* potential OOM on e.g. 32-bit Windows.
|
||||
*/
|
||||
static void
|
||||
purge(void)
|
||||
{
|
||||
|
||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected mallctl error");
|
||||
}
|
||||
|
||||
TEST_BEGIN(test_overflow)
|
||||
{
|
||||
size_t hugemax;
|
||||
|
||||
hugemax = get_huge_size(get_nhuge()-1);
|
||||
|
||||
assert_ptr_null(mallocx(hugemax+1, 0),
|
||||
"Expected OOM for mallocx(size=%#zx, 0)", hugemax+1);
|
||||
|
||||
assert_ptr_null(mallocx(ZU(PTRDIFF_MAX)+1, 0),
|
||||
"Expected OOM for mallocx(size=%#zx, 0)", ZU(PTRDIFF_MAX)+1);
|
||||
|
||||
assert_ptr_null(mallocx(SIZE_T_MAX, 0),
|
||||
"Expected OOM for mallocx(size=%#zx, 0)", SIZE_T_MAX);
|
||||
|
||||
assert_ptr_null(mallocx(1, MALLOCX_ALIGN(ZU(PTRDIFF_MAX)+1)),
|
||||
"Expected OOM for mallocx(size=1, MALLOCX_ALIGN(%#zx))",
|
||||
ZU(PTRDIFF_MAX)+1);
|
||||
}
|
||||
TEST_END
|
||||
|
||||
TEST_BEGIN(test_oom)
|
||||
{
|
||||
size_t hugemax;
|
||||
bool oom;
|
||||
void *ptrs[3];
|
||||
unsigned i;
|
||||
size_t hugemax, size, alignment;
|
||||
|
||||
hugemax = get_huge_size(get_nhuge()-1);
|
||||
|
||||
/*
|
||||
* It should be impossible to allocate three objects that each consume
|
||||
* nearly half the virtual address space.
|
||||
* It should be impossible to allocate two objects that each consume
|
||||
* more than half the virtual address space.
|
||||
*/
|
||||
hugemax = get_huge_size(get_nhuge()-1);
|
||||
oom = false;
|
||||
for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
|
||||
ptrs[i] = mallocx(hugemax, 0);
|
||||
if (ptrs[i] == NULL)
|
||||
oom = true;
|
||||
{
|
||||
void *p;
|
||||
|
||||
p = mallocx(hugemax, 0);
|
||||
if (p != NULL) {
|
||||
assert_ptr_null(mallocx(hugemax, 0),
|
||||
"Expected OOM for mallocx(size=%#zx, 0)", hugemax);
|
||||
dallocx(p, 0);
|
||||
}
|
||||
}
|
||||
assert_true(oom,
|
||||
"Expected OOM during series of calls to mallocx(size=%zu, 0)",
|
||||
hugemax);
|
||||
for (i = 0; i < sizeof(ptrs) / sizeof(void *); i++) {
|
||||
if (ptrs[i] != NULL)
|
||||
dallocx(ptrs[i], 0);
|
||||
}
|
||||
purge();
|
||||
|
||||
#if LG_SIZEOF_PTR == 3
|
||||
assert_ptr_null(mallocx(0x8000000000000000ULL,
|
||||
MALLOCX_ALIGN(0x8000000000000000ULL)),
|
||||
"Expected OOM for mallocx()");
|
||||
assert_ptr_null(mallocx(0x8000000000000000ULL,
|
||||
MALLOCX_ALIGN(0x80000000)),
|
||||
"Expected OOM for mallocx()");
|
||||
size = ZU(0x8000000000000000);
|
||||
alignment = ZU(0x8000000000000000);
|
||||
#else
|
||||
assert_ptr_null(mallocx(0x80000000UL, MALLOCX_ALIGN(0x80000000UL)),
|
||||
"Expected OOM for mallocx()");
|
||||
size = ZU(0x80000000);
|
||||
alignment = ZU(0x80000000);
|
||||
#endif
|
||||
assert_ptr_null(mallocx(size, MALLOCX_ALIGN(alignment)),
|
||||
"Expected OOM for mallocx(size=%#zx, MALLOCX_ALIGN(%#zx)", size,
|
||||
alignment);
|
||||
}
|
||||
TEST_END
|
||||
|
||||
TEST_BEGIN(test_basic)
|
||||
{
|
||||
#define MAXSZ (((size_t)1) << 23)
|
||||
#define MAXSZ (((size_t)1) << 26)
|
||||
size_t sz;
|
||||
|
||||
for (sz = 1; sz < MAXSZ; sz = nallocx(sz, 0) + 1) {
|
||||
@ -136,28 +91,23 @@ TEST_BEGIN(test_basic)
|
||||
nsz = nallocx(sz, 0);
|
||||
assert_zu_ne(nsz, 0, "Unexpected nallocx() error");
|
||||
p = mallocx(sz, 0);
|
||||
assert_ptr_not_null(p,
|
||||
"Unexpected mallocx(size=%zx, flags=0) error", sz);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
rsz = sallocx(p, 0);
|
||||
assert_zu_ge(rsz, sz, "Real size smaller than expected");
|
||||
assert_zu_eq(nsz, rsz, "nallocx()/sallocx() size mismatch");
|
||||
dallocx(p, 0);
|
||||
|
||||
p = mallocx(sz, 0);
|
||||
assert_ptr_not_null(p,
|
||||
"Unexpected mallocx(size=%zx, flags=0) error", sz);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
dallocx(p, 0);
|
||||
|
||||
nsz = nallocx(sz, MALLOCX_ZERO);
|
||||
assert_zu_ne(nsz, 0, "Unexpected nallocx() error");
|
||||
p = mallocx(sz, MALLOCX_ZERO);
|
||||
assert_ptr_not_null(p,
|
||||
"Unexpected mallocx(size=%zx, flags=MALLOCX_ZERO) error",
|
||||
nsz);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
rsz = sallocx(p, 0);
|
||||
assert_zu_eq(nsz, rsz, "nallocx()/sallocx() rsize mismatch");
|
||||
dallocx(p, 0);
|
||||
purge();
|
||||
}
|
||||
#undef MAXSZ
|
||||
}
|
||||
@ -165,7 +115,7 @@ TEST_END
|
||||
|
||||
TEST_BEGIN(test_alignment_and_size)
|
||||
{
|
||||
#define MAXALIGN (((size_t)1) << 23)
|
||||
#define MAXALIGN (((size_t)1) << 25)
|
||||
#define NITER 4
|
||||
size_t nsz, rsz, sz, alignment, total;
|
||||
unsigned i;
|
||||
@ -215,7 +165,6 @@ TEST_BEGIN(test_alignment_and_size)
|
||||
}
|
||||
}
|
||||
}
|
||||
purge();
|
||||
}
|
||||
#undef MAXALIGN
|
||||
#undef NITER
|
||||
@ -227,7 +176,6 @@ main(void)
|
||||
{
|
||||
|
||||
return (test(
|
||||
test_overflow,
|
||||
test_oom,
|
||||
test_basic,
|
||||
test_alignment_and_size));
|
||||
|
8
deps/jemalloc/test/integration/overflow.c
vendored
Executable file → Normal file
8
deps/jemalloc/test/integration/overflow.c
vendored
Executable file → Normal file
@ -8,8 +8,8 @@ TEST_BEGIN(test_overflow)
|
||||
void *p;
|
||||
|
||||
sz = sizeof(unsigned);
|
||||
assert_d_eq(mallctl("arenas.nhchunks", (void *)&nhchunks, &sz, NULL, 0),
|
||||
0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("arenas.nhchunks", &nhchunks, &sz, NULL, 0), 0,
|
||||
"Unexpected mallctl() error");
|
||||
|
||||
miblen = sizeof(mib) / sizeof(size_t);
|
||||
assert_d_eq(mallctlnametomib("arenas.hchunk.0.size", mib, &miblen), 0,
|
||||
@ -17,8 +17,8 @@ TEST_BEGIN(test_overflow)
|
||||
mib[2] = nhchunks - 1;
|
||||
|
||||
sz = sizeof(size_t);
|
||||
assert_d_eq(mallctlbymib(mib, miblen, (void *)&max_size_class, &sz,
|
||||
NULL, 0), 0, "Unexpected mallctlbymib() error");
|
||||
assert_d_eq(mallctlbymib(mib, miblen, &max_size_class, &sz, NULL, 0), 0,
|
||||
"Unexpected mallctlbymib() error");
|
||||
|
||||
assert_ptr_null(malloc(max_size_class + 1),
|
||||
"Expected OOM due to over-sized allocation request");
|
||||
|
20
deps/jemalloc/test/integration/posix_memalign.c
vendored
20
deps/jemalloc/test/integration/posix_memalign.c
vendored
@ -1,20 +1,9 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
#define CHUNK 0x400000
|
||||
#define MAXALIGN (((size_t)1) << 23)
|
||||
|
||||
/*
|
||||
* On systems which can't merge extents, tests that call this function generate
|
||||
* a lot of dirty memory very quickly. Purging between cycles mitigates
|
||||
* potential OOM on e.g. 32-bit Windows.
|
||||
*/
|
||||
static void
|
||||
purge(void)
|
||||
{
|
||||
|
||||
assert_d_eq(mallctl("arena.0.purge", NULL, NULL, NULL, 0), 0,
|
||||
"Unexpected mallctl error");
|
||||
}
|
||||
/* #define MAXALIGN ((size_t)UINT64_C(0x80000000000)) */
|
||||
#define MAXALIGN ((size_t)0x2000000LU)
|
||||
#define NITER 4
|
||||
|
||||
TEST_BEGIN(test_alignment_errors)
|
||||
{
|
||||
@ -77,7 +66,6 @@ TEST_END
|
||||
|
||||
TEST_BEGIN(test_alignment_and_size)
|
||||
{
|
||||
#define NITER 4
|
||||
size_t alignment, size, total;
|
||||
unsigned i;
|
||||
int err;
|
||||
@ -116,9 +104,7 @@ TEST_BEGIN(test_alignment_and_size)
|
||||
}
|
||||
}
|
||||
}
|
||||
purge();
|
||||
}
|
||||
#undef NITER
|
||||
}
|
||||
TEST_END
|
||||
|
||||
|
86
deps/jemalloc/test/integration/rallocx.c
vendored
Executable file → Normal file
86
deps/jemalloc/test/integration/rallocx.c
vendored
Executable file → Normal file
@ -1,51 +1,5 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
static unsigned
|
||||
get_nsizes_impl(const char *cmd)
|
||||
{
|
||||
unsigned ret;
|
||||
size_t z;
|
||||
|
||||
z = sizeof(unsigned);
|
||||
assert_d_eq(mallctl(cmd, (void *)&ret, &z, NULL, 0), 0,
|
||||
"Unexpected mallctl(\"%s\", ...) failure", cmd);
|
||||
|
||||
return (ret);
|
||||
}
|
||||
|
||||
static unsigned
|
||||
get_nhuge(void)
|
||||
{
|
||||
|
||||
return (get_nsizes_impl("arenas.nhchunks"));
|
||||
}
|
||||
|
||||
static size_t
|
||||
get_size_impl(const char *cmd, size_t ind)
|
||||
{
|
||||
size_t ret;
|
||||
size_t z;
|
||||
size_t mib[4];
|
||||
size_t miblen = 4;
|
||||
|
||||
z = sizeof(size_t);
|
||||
assert_d_eq(mallctlnametomib(cmd, mib, &miblen),
|
||||
0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd);
|
||||
mib[2] = ind;
|
||||
z = sizeof(size_t);
|
||||
assert_d_eq(mallctlbymib(mib, miblen, (void *)&ret, &z, NULL, 0),
|
||||
0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind);
|
||||
|
||||
return (ret);
|
||||
}
|
||||
|
||||
static size_t
|
||||
get_huge_size(size_t ind)
|
||||
{
|
||||
|
||||
return (get_size_impl("arenas.hchunk.0.size", ind));
|
||||
}
|
||||
|
||||
TEST_BEGIN(test_grow_and_shrink)
|
||||
{
|
||||
void *p, *q;
|
||||
@ -184,22 +138,22 @@ TEST_END
|
||||
TEST_BEGIN(test_lg_align_and_zero)
|
||||
{
|
||||
void *p, *q;
|
||||
unsigned lg_align;
|
||||
size_t sz;
|
||||
size_t lg_align, sz;
|
||||
#define MAX_LG_ALIGN 25
|
||||
#define MAX_VALIDATE (ZU(1) << 22)
|
||||
|
||||
lg_align = 0;
|
||||
lg_align = ZU(0);
|
||||
p = mallocx(1, MALLOCX_LG_ALIGN(lg_align)|MALLOCX_ZERO);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
|
||||
for (lg_align++; lg_align <= MAX_LG_ALIGN; lg_align++) {
|
||||
q = rallocx(p, 1, MALLOCX_LG_ALIGN(lg_align)|MALLOCX_ZERO);
|
||||
assert_ptr_not_null(q,
|
||||
"Unexpected rallocx() error for lg_align=%u", lg_align);
|
||||
"Unexpected rallocx() error for lg_align=%zu", lg_align);
|
||||
assert_ptr_null(
|
||||
(void *)((uintptr_t)q & ((ZU(1) << lg_align)-1)),
|
||||
"%p inadequately aligned for lg_align=%u", q, lg_align);
|
||||
"%p inadequately aligned for lg_align=%zu",
|
||||
q, lg_align);
|
||||
sz = sallocx(q, 0);
|
||||
if ((sz << 1) <= MAX_VALIDATE) {
|
||||
assert_false(validate_fill(q, 0, 0, sz),
|
||||
@ -219,33 +173,6 @@ TEST_BEGIN(test_lg_align_and_zero)
|
||||
}
|
||||
TEST_END
|
||||
|
||||
TEST_BEGIN(test_overflow)
|
||||
{
|
||||
size_t hugemax;
|
||||
void *p;
|
||||
|
||||
hugemax = get_huge_size(get_nhuge()-1);
|
||||
|
||||
p = mallocx(1, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() failure");
|
||||
|
||||
assert_ptr_null(rallocx(p, hugemax+1, 0),
|
||||
"Expected OOM for rallocx(p, size=%#zx, 0)", hugemax+1);
|
||||
|
||||
assert_ptr_null(rallocx(p, ZU(PTRDIFF_MAX)+1, 0),
|
||||
"Expected OOM for rallocx(p, size=%#zx, 0)", ZU(PTRDIFF_MAX)+1);
|
||||
|
||||
assert_ptr_null(rallocx(p, SIZE_T_MAX, 0),
|
||||
"Expected OOM for rallocx(p, size=%#zx, 0)", SIZE_T_MAX);
|
||||
|
||||
assert_ptr_null(rallocx(p, 1, MALLOCX_ALIGN(ZU(PTRDIFF_MAX)+1)),
|
||||
"Expected OOM for rallocx(p, size=1, MALLOCX_ALIGN(%#zx))",
|
||||
ZU(PTRDIFF_MAX)+1);
|
||||
|
||||
dallocx(p, 0);
|
||||
}
|
||||
TEST_END
|
||||
|
||||
int
|
||||
main(void)
|
||||
{
|
||||
@ -254,6 +181,5 @@ main(void)
|
||||
test_grow_and_shrink,
|
||||
test_zero,
|
||||
test_align,
|
||||
test_lg_align_and_zero,
|
||||
test_overflow));
|
||||
test_lg_align_and_zero));
|
||||
}
|
||||
|
4
deps/jemalloc/test/integration/sdallocx.c
vendored
4
deps/jemalloc/test/integration/sdallocx.c
vendored
@ -1,7 +1,7 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
#define MAXALIGN (((size_t)1) << 22)
|
||||
#define NITER 3
|
||||
#define MAXALIGN (((size_t)1) << 25)
|
||||
#define NITER 4
|
||||
|
||||
TEST_BEGIN(test_basic)
|
||||
{
|
||||
|
10
deps/jemalloc/test/integration/thread_arena.c
vendored
Executable file → Normal file
10
deps/jemalloc/test/integration/thread_arena.c
vendored
Executable file → Normal file
@ -16,8 +16,8 @@ thd_start(void *arg)
|
||||
free(p);
|
||||
|
||||
size = sizeof(arena_ind);
|
||||
if ((err = mallctl("thread.arena", (void *)&arena_ind, &size,
|
||||
(void *)&main_arena_ind, sizeof(main_arena_ind)))) {
|
||||
if ((err = mallctl("thread.arena", &arena_ind, &size, &main_arena_ind,
|
||||
sizeof(main_arena_ind)))) {
|
||||
char buf[BUFERROR_BUF];
|
||||
|
||||
buferror(err, buf, sizeof(buf));
|
||||
@ -25,8 +25,7 @@ thd_start(void *arg)
|
||||
}
|
||||
|
||||
size = sizeof(arena_ind);
|
||||
if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, NULL,
|
||||
0))) {
|
||||
if ((err = mallctl("thread.arena", &arena_ind, &size, NULL, 0))) {
|
||||
char buf[BUFERROR_BUF];
|
||||
|
||||
buferror(err, buf, sizeof(buf));
|
||||
@ -51,8 +50,7 @@ TEST_BEGIN(test_thread_arena)
|
||||
assert_ptr_not_null(p, "Error in malloc()");
|
||||
|
||||
size = sizeof(arena_ind);
|
||||
if ((err = mallctl("thread.arena", (void *)&arena_ind, &size, NULL,
|
||||
0))) {
|
||||
if ((err = mallctl("thread.arena", &arena_ind, &size, NULL, 0))) {
|
||||
char buf[BUFERROR_BUF];
|
||||
|
||||
buferror(err, buf, sizeof(buf));
|
||||
|
39
deps/jemalloc/test/integration/thread_tcache_enabled.c
vendored
Executable file → Normal file
39
deps/jemalloc/test/integration/thread_tcache_enabled.c
vendored
Executable file → Normal file
@ -16,8 +16,7 @@ thd_start(void *arg)
|
||||
bool e0, e1;
|
||||
|
||||
sz = sizeof(bool);
|
||||
if ((err = mallctl("thread.tcache.enabled", (void *)&e0, &sz, NULL,
|
||||
0))) {
|
||||
if ((err = mallctl("thread.tcache.enabled", &e0, &sz, NULL, 0))) {
|
||||
if (err == ENOENT) {
|
||||
assert_false(config_tcache,
|
||||
"ENOENT should only be returned if tcache is "
|
||||
@ -28,53 +27,53 @@ thd_start(void *arg)
|
||||
|
||||
if (e0) {
|
||||
e1 = false;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz),
|
||||
0, "Unexpected mallctl() error");
|
||||
assert_true(e0, "tcache should be enabled");
|
||||
}
|
||||
|
||||
e1 = true;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_false(e0, "tcache should be disabled");
|
||||
|
||||
e1 = true;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_true(e0, "tcache should be enabled");
|
||||
|
||||
e1 = false;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_true(e0, "tcache should be enabled");
|
||||
|
||||
e1 = false;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_false(e0, "tcache should be disabled");
|
||||
|
||||
free(malloc(1));
|
||||
e1 = true;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_false(e0, "tcache should be disabled");
|
||||
|
||||
free(malloc(1));
|
||||
e1 = true;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_true(e0, "tcache should be enabled");
|
||||
|
||||
free(malloc(1));
|
||||
e1 = false;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_true(e0, "tcache should be enabled");
|
||||
|
||||
free(malloc(1));
|
||||
e1 = false;
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", (void *)&e0, &sz,
|
||||
(void *)&e1, sz), 0, "Unexpected mallctl() error");
|
||||
assert_d_eq(mallctl("thread.tcache.enabled", &e0, &sz, &e1, sz), 0,
|
||||
"Unexpected mallctl() error");
|
||||
assert_false(e0, "tcache should be disabled");
|
||||
|
||||
free(malloc(1));
|
||||
|
120
deps/jemalloc/test/integration/xallocx.c
vendored
Executable file → Normal file
120
deps/jemalloc/test/integration/xallocx.c
vendored
Executable file → Normal file
@ -1,28 +1,5 @@
|
||||
#include "test/jemalloc_test.h"
|
||||
|
||||
#ifdef JEMALLOC_FILL
|
||||
const char *malloc_conf = "junk:false";
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Use a separate arena for xallocx() extension/contraction tests so that
|
||||
* internal allocation e.g. by heap profiling can't interpose allocations where
|
||||
* xallocx() would ordinarily be able to extend.
|
||||
*/
|
||||
static unsigned
|
||||
arena_ind(void)
|
||||
{
|
||||
static unsigned ind = 0;
|
||||
|
||||
if (ind == 0) {
|
||||
size_t sz = sizeof(ind);
|
||||
assert_d_eq(mallctl("arenas.extend", (void *)&ind, &sz, NULL,
|
||||
0), 0, "Unexpected mallctl failure creating arena");
|
||||
}
|
||||
|
||||
return (ind);
|
||||
}
|
||||
|
||||
TEST_BEGIN(test_same_size)
|
||||
{
|
||||
void *p;
|
||||
@ -78,7 +55,7 @@ get_nsizes_impl(const char *cmd)
|
||||
size_t z;
|
||||
|
||||
z = sizeof(unsigned);
|
||||
assert_d_eq(mallctl(cmd, (void *)&ret, &z, NULL, 0), 0,
|
||||
assert_d_eq(mallctl(cmd, &ret, &z, NULL, 0), 0,
|
||||
"Unexpected mallctl(\"%s\", ...) failure", cmd);
|
||||
|
||||
return (ret);
|
||||
@ -118,7 +95,7 @@ get_size_impl(const char *cmd, size_t ind)
|
||||
0, "Unexpected mallctlnametomib(\"%s\", ...) failure", cmd);
|
||||
mib[2] = ind;
|
||||
z = sizeof(size_t);
|
||||
assert_d_eq(mallctlbymib(mib, miblen, (void *)&ret, &z, NULL, 0),
|
||||
assert_d_eq(mallctlbymib(mib, miblen, &ret, &z, NULL, 0),
|
||||
0, "Unexpected mallctlbymib([\"%s\", %zu], ...) failure", cmd, ind);
|
||||
|
||||
return (ret);
|
||||
@ -241,7 +218,6 @@ TEST_END
|
||||
|
||||
TEST_BEGIN(test_extra_large)
|
||||
{
|
||||
int flags = MALLOCX_ARENA(arena_ind());
|
||||
size_t smallmax, large0, large1, large2, huge0, hugemax;
|
||||
void *p;
|
||||
|
||||
@ -253,122 +229,121 @@ TEST_BEGIN(test_extra_large)
|
||||
huge0 = get_huge_size(0);
|
||||
hugemax = get_huge_size(get_nhuge()-1);
|
||||
|
||||
p = mallocx(large2, flags);
|
||||
p = mallocx(large2, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
|
||||
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
|
||||
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size decrease with zero extra. */
|
||||
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
|
||||
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_eq(xallocx(p, smallmax, 0, flags), large0,
|
||||
assert_zu_eq(xallocx(p, smallmax, 0, 0), large0,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
|
||||
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size decrease with non-zero extra. */
|
||||
assert_zu_eq(xallocx(p, large0, large2 - large0, flags), large2,
|
||||
assert_zu_eq(xallocx(p, large0, large2 - large0, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_eq(xallocx(p, large1, large2 - large1, flags), large2,
|
||||
assert_zu_eq(xallocx(p, large1, large2 - large1, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_eq(xallocx(p, large0, large1 - large0, flags), large1,
|
||||
assert_zu_eq(xallocx(p, large0, large1 - large0, 0), large1,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_eq(xallocx(p, smallmax, large0 - smallmax, flags), large0,
|
||||
assert_zu_eq(xallocx(p, smallmax, large0 - smallmax, 0), large0,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
|
||||
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size increase with zero extra. */
|
||||
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
|
||||
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_eq(xallocx(p, huge0, 0, flags), large2,
|
||||
assert_zu_eq(xallocx(p, huge0, 0, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
|
||||
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size increase with non-zero extra. */
|
||||
assert_zu_lt(xallocx(p, large0, huge0 - large0, flags), huge0,
|
||||
assert_zu_lt(xallocx(p, large0, huge0 - large0, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_eq(xallocx(p, large0, 0, flags), large0,
|
||||
assert_zu_eq(xallocx(p, large0, 0, 0), large0,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size increase with non-zero extra. */
|
||||
assert_zu_eq(xallocx(p, large0, large2 - large0, flags), large2,
|
||||
assert_zu_eq(xallocx(p, large0, large2 - large0, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_eq(xallocx(p, large2, 0, flags), large2,
|
||||
assert_zu_eq(xallocx(p, large2, 0, 0), large2,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size+extra overflow. */
|
||||
assert_zu_lt(xallocx(p, large2, hugemax - large2 + 1, flags), huge0,
|
||||
assert_zu_lt(xallocx(p, large2, hugemax - large2 + 1, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
}
|
||||
TEST_END
|
||||
|
||||
TEST_BEGIN(test_extra_huge)
|
||||
{
|
||||
int flags = MALLOCX_ARENA(arena_ind());
|
||||
size_t largemax, huge1, huge2, huge3, hugemax;
|
||||
size_t largemax, huge0, huge1, huge2, hugemax;
|
||||
void *p;
|
||||
|
||||
/* Get size classes. */
|
||||
largemax = get_large_size(get_nlarge()-1);
|
||||
huge0 = get_huge_size(0);
|
||||
huge1 = get_huge_size(1);
|
||||
huge2 = get_huge_size(2);
|
||||
huge3 = get_huge_size(3);
|
||||
hugemax = get_huge_size(get_nhuge()-1);
|
||||
|
||||
p = mallocx(huge3, flags);
|
||||
p = mallocx(huge2, 0);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
|
||||
assert_zu_eq(xallocx(p, huge3, 0, flags), huge3,
|
||||
assert_zu_eq(xallocx(p, huge2, 0, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size decrease with zero extra. */
|
||||
assert_zu_ge(xallocx(p, huge1, 0, flags), huge1,
|
||||
assert_zu_ge(xallocx(p, huge0, 0, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_ge(xallocx(p, largemax, 0, flags), huge1,
|
||||
assert_zu_ge(xallocx(p, largemax, 0, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_eq(xallocx(p, huge3, 0, flags), huge3,
|
||||
assert_zu_eq(xallocx(p, huge2, 0, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size decrease with non-zero extra. */
|
||||
assert_zu_eq(xallocx(p, huge1, huge3 - huge1, flags), huge3,
|
||||
assert_zu_eq(xallocx(p, huge0, huge2 - huge0, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_eq(xallocx(p, huge2, huge3 - huge2, flags), huge3,
|
||||
assert_zu_eq(xallocx(p, huge1, huge2 - huge1, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_eq(xallocx(p, huge1, huge2 - huge1, flags), huge2,
|
||||
assert_zu_eq(xallocx(p, huge0, huge1 - huge0, 0), huge1,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_ge(xallocx(p, largemax, huge1 - largemax, flags), huge1,
|
||||
assert_zu_ge(xallocx(p, largemax, huge0 - largemax, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_ge(xallocx(p, huge1, 0, flags), huge1,
|
||||
assert_zu_ge(xallocx(p, huge0, 0, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size increase with zero extra. */
|
||||
assert_zu_le(xallocx(p, huge3, 0, flags), huge3,
|
||||
assert_zu_le(xallocx(p, huge2, 0, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
assert_zu_le(xallocx(p, hugemax+1, 0, flags), huge3,
|
||||
assert_zu_le(xallocx(p, hugemax+1, 0, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_ge(xallocx(p, huge1, 0, flags), huge1,
|
||||
assert_zu_ge(xallocx(p, huge0, 0, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size increase with non-zero extra. */
|
||||
assert_zu_le(xallocx(p, huge1, SIZE_T_MAX - huge1, flags), hugemax,
|
||||
assert_zu_le(xallocx(p, huge0, SIZE_T_MAX - huge0, 0), hugemax,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_ge(xallocx(p, huge1, 0, flags), huge1,
|
||||
assert_zu_ge(xallocx(p, huge0, 0, 0), huge0,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size increase with non-zero extra. */
|
||||
assert_zu_le(xallocx(p, huge1, huge3 - huge1, flags), huge3,
|
||||
assert_zu_le(xallocx(p, huge0, huge2 - huge0, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
assert_zu_eq(xallocx(p, huge3, 0, flags), huge3,
|
||||
assert_zu_eq(xallocx(p, huge2, 0, 0), huge2,
|
||||
"Unexpected xallocx() behavior");
|
||||
/* Test size+extra overflow. */
|
||||
assert_zu_le(xallocx(p, huge3, hugemax - huge3 + 1, flags), hugemax,
|
||||
assert_zu_le(xallocx(p, huge2, hugemax - huge2 + 1, 0), hugemax,
|
||||
"Unexpected xallocx() behavior");
|
||||
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
}
|
||||
TEST_END
|
||||
|
||||
@ -413,13 +388,12 @@ validate_fill(const void *p, uint8_t c, size_t offset, size_t len)
|
||||
static void
|
||||
test_zero(size_t szmin, size_t szmax)
|
||||
{
|
||||
int flags = MALLOCX_ARENA(arena_ind()) | MALLOCX_ZERO;
|
||||
size_t sz, nsz;
|
||||
void *p;
|
||||
#define FILL_BYTE 0x7aU
|
||||
|
||||
sz = szmax;
|
||||
p = mallocx(sz, flags);
|
||||
p = mallocx(sz, MALLOCX_ZERO);
|
||||
assert_ptr_not_null(p, "Unexpected mallocx() error");
|
||||
assert_false(validate_fill(p, 0x00, 0, sz), "Memory not filled: sz=%zu",
|
||||
sz);
|
||||
@ -434,14 +408,14 @@ test_zero(size_t szmin, size_t szmax)
|
||||
|
||||
/* Shrink in place so that we can expect growing in place to succeed. */
|
||||
sz = szmin;
|
||||
assert_zu_eq(xallocx(p, sz, 0, flags), sz,
|
||||
assert_zu_eq(xallocx(p, sz, 0, MALLOCX_ZERO), sz,
|
||||
"Unexpected xallocx() error");
|
||||
assert_false(validate_fill(p, FILL_BYTE, 0, sz),
|
||||
"Memory not filled: sz=%zu", sz);
|
||||
|
||||
for (sz = szmin; sz < szmax; sz = nsz) {
|
||||
nsz = nallocx(sz+1, flags);
|
||||
assert_zu_eq(xallocx(p, sz+1, 0, flags), nsz,
|
||||
nsz = nallocx(sz+1, MALLOCX_ZERO);
|
||||
assert_zu_eq(xallocx(p, sz+1, 0, MALLOCX_ZERO), nsz,
|
||||
"Unexpected xallocx() failure");
|
||||
assert_false(validate_fill(p, FILL_BYTE, 0, sz),
|
||||
"Memory not filled: sz=%zu", sz);
|
||||
@ -452,7 +426,7 @@ test_zero(size_t szmin, size_t szmax)
|
||||
"Memory not filled: nsz=%zu", nsz);
|
||||
}
|
||||
|
||||
dallocx(p, flags);
|
||||
dallocx(p, 0);
|
||||
}
|
||||
|
||||
TEST_BEGIN(test_zero_large)
|
||||
|
Reference in New Issue
Block a user