|
@@ -128,7 +128,7 @@ void *qemu_memalign(size_t alignment, size_t size)
|
|
void *qemu_anon_ram_alloc(size_t size, uint64_t *alignment)
|
|
void *qemu_anon_ram_alloc(size_t size, uint64_t *alignment)
|
|
{
|
|
{
|
|
size_t align = QEMU_VMALLOC_ALIGN;
|
|
size_t align = QEMU_VMALLOC_ALIGN;
|
|
- size_t total = size + align - getpagesize();
|
|
|
|
|
|
+ size_t total = size + align;
|
|
void *ptr = mmap(0, total, PROT_NONE, MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
|
|
void *ptr = mmap(0, total, PROT_NONE, MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
|
|
size_t offset = QEMU_ALIGN_UP((uintptr_t)ptr, align) - (uintptr_t)ptr;
|
|
size_t offset = QEMU_ALIGN_UP((uintptr_t)ptr, align) - (uintptr_t)ptr;
|
|
void *ptr1;
|
|
void *ptr1;
|
|
@@ -154,8 +154,8 @@ void *qemu_anon_ram_alloc(size_t size, uint64_t *alignment)
|
|
if (offset > 0) {
|
|
if (offset > 0) {
|
|
munmap(ptr - offset, offset);
|
|
munmap(ptr - offset, offset);
|
|
}
|
|
}
|
|
- if (total > size) {
|
|
|
|
- munmap(ptr + size, total - size);
|
|
|
|
|
|
+ if (total > size + getpagesize()) {
|
|
|
|
+ munmap(ptr + size + getpagesize(), total - size - getpagesize());
|
|
}
|
|
}
|
|
|
|
|
|
trace_qemu_anon_ram_alloc(size, ptr);
|
|
trace_qemu_anon_ram_alloc(size, ptr);
|
|
@@ -172,7 +172,7 @@ void qemu_anon_ram_free(void *ptr, size_t size)
|
|
{
|
|
{
|
|
trace_qemu_anon_ram_free(ptr, size);
|
|
trace_qemu_anon_ram_free(ptr, size);
|
|
if (ptr) {
|
|
if (ptr) {
|
|
- munmap(ptr, size);
|
|
|
|
|
|
+ munmap(ptr, size + getpagesize());
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|