Signed-off-by: Paul Gofman pgofman@codeweavers.com --- dlls/ntdll/unix/virtual.c | 134 ++++++++++++++++++++++++++++++-------- 1 file changed, 106 insertions(+), 28 deletions(-)
diff --git a/dlls/ntdll/unix/virtual.c b/dlls/ntdll/unix/virtual.c index 5d626ee85c4..43c149094d7 100644 --- a/dlls/ntdll/unix/virtual.c +++ b/dlls/ntdll/unix/virtual.c @@ -99,6 +99,7 @@ struct file_view #define VPROT_WRITEWATCH 0x40 /* per-mapping protection flags */ #define VPROT_SYSTEM 0x0200 /* system view (underlying mmap not under our control) */ +#define VPROT_NATIVE 0x0400
/* Conversion from VPROT_* to Win32 flags */ static const BYTE VIRTUAL_Win32Flags[16] = @@ -824,7 +825,9 @@ static void dump_view( struct file_view *view ) BYTE prot = get_page_vprot( addr );
TRACE( "View: %p - %p", addr, addr + view->size - 1 ); - if (view->protect & VPROT_SYSTEM) + if (view->protect & VPROT_NATIVE) + TRACE(" (native)\n"); + else if (view->protect & VPROT_SYSTEM) TRACE( " (builtin image)\n" ); else if (view->protect & SEC_IMAGE) TRACE( " (image)\n" ); @@ -966,6 +969,16 @@ static struct file_view *find_view_range( const void *addr, size_t size ) return NULL; }
+struct alloc_area +{ + char *map_area_start, *map_area_end, *result; + size_t size; + ptrdiff_t step; + int unix_prot; + BOOL top_down; + char *native_mapped; + size_t native_mapped_size; +};
/*********************************************************************** * try_map_free_area @@ -973,26 +986,34 @@ static struct file_view *find_view_range( const void *addr, size_t size ) * Try mmaping some expected free memory region, eventually stepping and * retrying inside it, and return where it actually succeeded, or NULL. */ -static void* try_map_free_area( void *base, void *end, ptrdiff_t step, - void *start, size_t size, int unix_prot ) +static void* try_map_free_area( struct alloc_area *area, void *base, void *end, void *start ) { + ptrdiff_t step = area->step; void *ptr;
- while (start && base <= start && (char*)start + size <= (char*)end) + while (start && base <= start && (char*)start + area->size <= (char*)end) { - if ((ptr = wine_anon_mmap( start, size, unix_prot, MAP_FIXED_NOREPLACE )) == start) + if ((ptr = wine_anon_mmap( start, area->size, area->unix_prot, + MAP_FIXED_NOREPLACE )) == start) return start; TRACE( "Found free area is already mapped, start %p.\n", start );
if (ptr == (void *)-1 && errno != EEXIST) { ERR( "wine_anon_mmap() error %s, range %p-%p, unix_prot %#x.\n", - strerror(errno), start, (char *)start + size, unix_prot ); + strerror(errno), start, (char *)start + area->size, area->unix_prot ); return NULL; }
if (ptr != (void *)-1) - munmap( ptr, size ); + munmap( ptr, area->size ); + + if (!area->native_mapped && step) + { + area->native_mapped = start; + area->native_mapped_size = step > 0 ? step : -step; + area->native_mapped_size = min(area->native_mapped_size, (char *)end - (char *)start); + }
if ((step > 0 && (char *)end - (char *)start < step) || (step < 0 && (char *)start - (char *)base < -step) || @@ -1446,15 +1467,6 @@ static inline void *unmap_extra_space( void *ptr, size_t total_size, size_t want return ptr; }
-struct alloc_area -{ - char *map_area_start, *map_area_end, *result; - size_t size; - ptrdiff_t step; - int unix_prot; - BOOL top_down; -}; - static int CDECL alloc_area_in_reserved_or_between_callback( void *start, SIZE_T size, void *arg ) { char *intersect_start, *intersect_end; @@ -1480,8 +1492,8 @@ static int CDECL alloc_area_in_reserved_or_between_callback( void *start, SIZE_T
if (alloc_start >= intersect_end) { - if ((area->result = try_map_free_area( area->map_area_start, alloc_start + size, area->step, - alloc_start, area->size, area->unix_prot ))) + if ((area->result = try_map_free_area( area, area->map_area_start, + alloc_start + size, alloc_start ))) return 1; }
@@ -1515,8 +1527,8 @@ static int CDECL alloc_area_in_reserved_or_between_callback( void *start, SIZE_T
if (intersect_start - area->map_area_start >= area->size) { - if ((area->result = try_map_free_area( area->map_area_start, intersect_start, area->step, - area->map_area_start, area->size, area->unix_prot ))) + if ((area->result = try_map_free_area( area, area->map_area_start, + intersect_start, area->map_area_start ))) return 1; }
@@ -1571,8 +1583,7 @@ static void *alloc_free_area_in_range( struct alloc_area *area, char *base, char if (start >= area->map_area_end || start < area->map_area_start) return NULL;
- return try_map_free_area( area->map_area_start, start + area->size, area->step, - start, area->size, area->unix_prot ); + return try_map_free_area( area, area->map_area_start, start + area->size, start ); } else { @@ -1581,8 +1592,7 @@ static void *alloc_free_area_in_range( struct alloc_area *area, char *base, char || area->map_area_end - start < area->size) return NULL;
- return try_map_free_area( start, area->map_area_end, area->step, - start, area->size, area->unix_prot ); + return try_map_free_area( area, start, area->map_area_end, start ); } }
@@ -1592,6 +1602,7 @@ static void *alloc_free_area( void *limit, size_t size, BOOL top_down, int unix_ char *reserve_start, *reserve_end; struct alloc_area area; char *base, *end; + NTSTATUS status; int ranges_inc;
TRACE("limit %p, size %p, top_down %#x.\n", limit, (void *)size, top_down); @@ -1647,16 +1658,67 @@ static void *alloc_free_area( void *limit, size_t size, BOOL top_down, int unix_ { /* range is split in two by the preloader reservation, try first part. */ if ((area.result = alloc_free_area_in_range( &area, base, reserve_start ))) - return area.result; + break; /* then fall through to try second part. */ base = reserve_end; } }
if ((area.result = alloc_free_area_in_range( &area, base, end ))) - return area.result; + break; } - return NULL; + + if (area.native_mapped) + { + char *native_mapped_start, *native_mapped_end; + + TRACE("Excluding %p - %p from free list.\n", + area.native_mapped, (char *)area.native_mapped + area.native_mapped_size ); + + native_mapped_start = ROUND_ADDR(area.native_mapped, granularity_mask); + native_mapped_end = ROUND_ADDR((char *)area.native_mapped + area.native_mapped_size + granularity_mask, + granularity_mask); + + if (area.result >= native_mapped_end || area.result + size < native_mapped_start) + /* In case of top down allocation try_map_free_area() result area can overlap the + * area previously marked as native if the latter was unmapped behind our back. */ + { + struct file_view *prev, *next; + + prev = find_view_range( native_mapped_start - 1, native_mapped_end - native_mapped_start + 2 ); + if (prev && (char *)prev->base >= native_mapped_end) + { + next = prev; + prev = WINE_RB_ENTRY_VALUE( wine_rb_prev( &next->entry ), struct file_view, entry ); + } + else if (prev) + { + next = WINE_RB_ENTRY_VALUE( wine_rb_next( &prev->entry ), struct file_view, entry ); + } + else + { + next = NULL; + } + + if (prev && prev->protect & VPROT_NATIVE && (char *)prev->base + prev->size >= native_mapped_start) + { + assert( (char *)prev->base + prev->size == native_mapped_start ); + native_mapped_start = prev->base; + delete_view( prev ); + } + if (next && next->protect & VPROT_NATIVE && native_mapped_end >= (char *)next->base) + { + assert( native_mapped_end == (char *)next->base ); + native_mapped_end = (char *)next->base + next->size; + delete_view( next ); + } + if ((status = create_view( &next, native_mapped_start, native_mapped_end - native_mapped_start, + VPROT_SYSTEM | VPROT_NATIVE ))) + ERR("Could not cretae view for natively mapped area, status %#x.\n", status); + } + } + + return area.result; }
/*********************************************************************** @@ -1716,6 +1778,17 @@ static NTSTATUS map_fixed_area( void *base, size_t size, unsigned int vprot ) return STATUS_SUCCESS; }
+static void clear_native_views(void) +{ + struct file_view *view, *next_view; + + WINE_RB_FOR_EACH_ENTRY_DESTRUCTOR( view, next_view, &views_tree, struct file_view, entry ) + { + if (view->protect & VPROT_NATIVE) + delete_view( view ); + } +} + /*********************************************************************** * map_view * @@ -1739,7 +1812,12 @@ static NTSTATUS map_view( struct file_view **view_ret, void *base, size_t size, else if (!(ptr = alloc_free_area( (void*)(get_zero_bits_64_mask( zero_bits_64 ) & (UINT_PTR)user_space_limit), size, top_down, get_unix_prot( vprot ) ))) { - return STATUS_NO_MEMORY; + WARN("Allocation failed, clearing native views.\n"); + + clear_native_views(); + if (!(ptr = alloc_free_area( (void*)(get_zero_bits_64_mask( zero_bits_64 ) + & (UINT_PTR)user_space_limit), size, top_down, get_unix_prot( vprot ) ))) + return STATUS_NO_MEMORY; } status = create_view( view_ret, ptr, size, vprot ); if (status != STATUS_SUCCESS) unmap_area( ptr, size );