1169695Skan/* Mudflap: narrow-pointer bounds-checking by tree rewriting. 2169695Skan Copyright (C) 2002, 2003, 2004 Free Software Foundation, Inc. 3169695Skan Contributed by Frank Ch. Eigler <fche@redhat.com> 4169695Skan and Graydon Hoare <graydon@redhat.com> 5169695Skan 6169695SkanThis file is part of GCC. 7169695Skan 8169695SkanGCC is free software; you can redistribute it and/or modify it under 9169695Skanthe terms of the GNU General Public License as published by the Free 10169695SkanSoftware Foundation; either version 2, or (at your option) any later 11169695Skanversion. 12169695Skan 13169695SkanIn addition to the permissions in the GNU General Public License, the 14169695SkanFree Software Foundation gives you unlimited permission to link the 15169695Skancompiled version of this file into combinations with other programs, 16169695Skanand to distribute those combinations without any restriction coming 17169695Skanfrom the use of this file. (The General Public License restrictions 18169695Skando apply in other respects; for example, they cover modification of 19169695Skanthe file, and distribution when not linked into a combine 20169695Skanexecutable.) 21169695Skan 22169695SkanGCC is distributed in the hope that it will be useful, but WITHOUT ANY 23169695SkanWARRANTY; without even the implied warranty of MERCHANTABILITY or 24169695SkanFITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 25169695Skanfor more details. 26169695Skan 27169695SkanYou should have received a copy of the GNU General Public License 28169695Skanalong with GCC; see the file COPYING. If not, write to the Free 29169695SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 30169695Skan02110-1301, USA. */ 31169695Skan 32169695Skan 33169695Skan#include "config.h" 34169695Skan 35169695Skan#ifndef HAVE_SOCKLEN_T 36169695Skan#define socklen_t int 37169695Skan#endif 38169695Skan 39169695Skan 40169695Skan/* These attempt to coax various unix flavours to declare all our 41169695Skan needed tidbits in the system headers. */ 42169695Skan#if !defined(__FreeBSD__) && !defined(__APPLE__) 43169695Skan#define _POSIX_SOURCE 44169695Skan#endif /* Some BSDs break <sys/socket.h> if this is defined. */ 45169695Skan#define _GNU_SOURCE 46169695Skan#define _XOPEN_SOURCE 47169695Skan#define _BSD_TYPES 48169695Skan#define __EXTENSIONS__ 49169695Skan#define _ALL_SOURCE 50169695Skan#define _LARGE_FILE_API 51169695Skan#define _XOPEN_SOURCE_EXTENDED 1 52169695Skan 53169695Skan#include <string.h> 54169695Skan#include <stdio.h> 55169695Skan#include <stdlib.h> 56169695Skan#include <sys/time.h> 57169695Skan#include <sys/types.h> 58169695Skan#include <unistd.h> 59169695Skan#include <assert.h> 60169695Skan#include <errno.h> 61169695Skan#include <limits.h> 62169695Skan#include <time.h> 63169695Skan 64169695Skan#include "mf-runtime.h" 65169695Skan#include "mf-impl.h" 66169695Skan 67169695Skan#ifdef _MUDFLAP 68169695Skan#error "Do not compile this file with -fmudflap!" 69169695Skan#endif 70169695Skan 71169695Skan 72169695Skan/* Memory allocation related hook functions. Some of these are 73169695Skan intercepted via linker wrapping or symbol interposition. Others 74169695Skan use plain macros in mf-runtime.h. */ 75169695Skan 76169695Skan 77169695Skan#if PIC 78169695Skan/* A special bootstrap variant. */ 79169695Skanvoid * 80169695Skan__mf_0fn_malloc (size_t c) 81169695Skan{ 82169695Skan enum foo { BS = 4096, NB=10 }; 83169695Skan static char bufs[NB][BS]; 84169695Skan static unsigned bufs_used[NB]; 85169695Skan unsigned i; 86169695Skan 87169695Skan for (i=0; i<NB; i++) 88169695Skan { 89169695Skan if (! bufs_used[i] && c < BS) 90169695Skan { 91169695Skan bufs_used[i] = 1; 92169695Skan return & bufs[i][0]; 93169695Skan } 94169695Skan } 95169695Skan return NULL; 96169695Skan} 97169695Skan#endif 98169695Skan 99169695Skan 100169695Skan#undef malloc 101169695SkanWRAPPER(void *, malloc, size_t c) 102169695Skan{ 103169695Skan size_t size_with_crumple_zones; 104169695Skan DECLARE(void *, malloc, size_t c); 105169695Skan void *result; 106169695Skan BEGIN_PROTECT (malloc, c); 107169695Skan 108169695Skan size_with_crumple_zones = 109169695Skan CLAMPADD(c,CLAMPADD(__mf_opts.crumple_zone, 110169695Skan __mf_opts.crumple_zone)); 111169695Skan BEGIN_MALLOC_PROTECT (); 112169695Skan result = (char *) CALL_REAL (malloc, size_with_crumple_zones); 113169695Skan END_MALLOC_PROTECT (); 114169695Skan 115169695Skan if (LIKELY(result)) 116169695Skan { 117169695Skan result += __mf_opts.crumple_zone; 118169695Skan __mf_register (result, c, __MF_TYPE_HEAP, "malloc region"); 119169695Skan /* XXX: register __MF_TYPE_NOACCESS for crumple zones. */ 120169695Skan } 121169695Skan 122169695Skan return result; 123169695Skan} 124169695Skan 125169695Skan 126169695Skan#ifdef PIC 127169695Skan/* A special bootstrap variant. */ 128169695Skanvoid * 129169695Skan__mf_0fn_calloc (size_t c, size_t n) 130169695Skan{ 131169695Skan return __mf_0fn_malloc (c * n); 132169695Skan} 133169695Skan#endif 134169695Skan 135169695Skan 136169695Skan#undef calloc 137169695SkanWRAPPER(void *, calloc, size_t c, size_t n) 138169695Skan{ 139169695Skan size_t size_with_crumple_zones; 140169695Skan DECLARE(void *, calloc, size_t, size_t); 141169695Skan DECLARE(void *, malloc, size_t); 142169695Skan DECLARE(void *, memset, void *, int, size_t); 143169695Skan char *result; 144169695Skan BEGIN_PROTECT (calloc, c, n); 145169695Skan 146169695Skan size_with_crumple_zones = 147169695Skan CLAMPADD((c * n), /* XXX: CLAMPMUL */ 148169695Skan CLAMPADD(__mf_opts.crumple_zone, 149169695Skan __mf_opts.crumple_zone)); 150169695Skan BEGIN_MALLOC_PROTECT (); 151169695Skan result = (char *) CALL_REAL (malloc, size_with_crumple_zones); 152169695Skan END_MALLOC_PROTECT (); 153169695Skan 154169695Skan if (LIKELY(result)) 155169695Skan memset (result, 0, size_with_crumple_zones); 156169695Skan 157169695Skan if (LIKELY(result)) 158169695Skan { 159169695Skan result += __mf_opts.crumple_zone; 160169695Skan __mf_register (result, c*n /* XXX: clamp */, __MF_TYPE_HEAP_I, "calloc region"); 161169695Skan /* XXX: register __MF_TYPE_NOACCESS for crumple zones. */ 162169695Skan } 163169695Skan 164169695Skan return result; 165169695Skan} 166169695Skan 167169695Skan 168169695Skan#if PIC 169169695Skan/* A special bootstrap variant. */ 170169695Skanvoid * 171169695Skan__mf_0fn_realloc (void *buf, size_t c) 172169695Skan{ 173169695Skan return NULL; 174169695Skan} 175169695Skan#endif 176169695Skan 177169695Skan 178169695Skan#undef realloc 179169695SkanWRAPPER(void *, realloc, void *buf, size_t c) 180169695Skan{ 181169695Skan DECLARE(void * , realloc, void *, size_t); 182169695Skan size_t size_with_crumple_zones; 183169695Skan char *base = buf; 184169695Skan unsigned saved_wipe_heap; 185169695Skan char *result; 186169695Skan BEGIN_PROTECT (realloc, buf, c); 187169695Skan 188169695Skan if (LIKELY(buf)) 189169695Skan base -= __mf_opts.crumple_zone; 190169695Skan 191169695Skan size_with_crumple_zones = 192169695Skan CLAMPADD(c, CLAMPADD(__mf_opts.crumple_zone, 193169695Skan __mf_opts.crumple_zone)); 194169695Skan BEGIN_MALLOC_PROTECT (); 195169695Skan result = (char *) CALL_REAL (realloc, base, size_with_crumple_zones); 196169695Skan END_MALLOC_PROTECT (); 197169695Skan 198169695Skan /* Ensure heap wiping doesn't occur during this peculiar 199169695Skan unregister/reregister pair. */ 200169695Skan LOCKTH (); 201169695Skan __mf_set_state (reentrant); 202169695Skan saved_wipe_heap = __mf_opts.wipe_heap; 203169695Skan __mf_opts.wipe_heap = 0; 204169695Skan 205169695Skan if (LIKELY(buf)) 206169695Skan __mfu_unregister (buf, 0, __MF_TYPE_HEAP_I); 207169695Skan /* NB: underlying region may have been __MF_TYPE_HEAP. */ 208169695Skan 209169695Skan if (LIKELY(result)) 210169695Skan { 211169695Skan result += __mf_opts.crumple_zone; 212169695Skan __mfu_register (result, c, __MF_TYPE_HEAP_I, "realloc region"); 213169695Skan /* XXX: register __MF_TYPE_NOACCESS for crumple zones. */ 214169695Skan } 215169695Skan 216169695Skan /* Restore previous setting. */ 217169695Skan __mf_opts.wipe_heap = saved_wipe_heap; 218169695Skan 219169695Skan __mf_set_state (active); 220169695Skan UNLOCKTH (); 221169695Skan 222169695Skan return result; 223169695Skan} 224169695Skan 225169695Skan 226169695Skan#if PIC 227169695Skan/* A special bootstrap variant. */ 228169695Skanvoid 229169695Skan__mf_0fn_free (void *buf) 230169695Skan{ 231169695Skan return; 232169695Skan} 233169695Skan#endif 234169695Skan 235169695Skan#undef free 236169695SkanWRAPPER(void, free, void *buf) 237169695Skan{ 238169695Skan /* Use a circular queue to delay some number (__mf_opts.free_queue_length) of free()s. */ 239169695Skan static void *free_queue [__MF_FREEQ_MAX]; 240169695Skan static unsigned free_ptr = 0; 241169695Skan static int freeq_initialized = 0; 242169695Skan DECLARE(void, free, void *); 243169695Skan 244169695Skan BEGIN_PROTECT (free, buf); 245169695Skan 246169695Skan if (UNLIKELY(buf == NULL)) 247169695Skan return; 248169695Skan 249169695Skan LOCKTH (); 250169695Skan if (UNLIKELY(!freeq_initialized)) 251169695Skan { 252169695Skan memset (free_queue, 0, 253169695Skan __MF_FREEQ_MAX * sizeof (void *)); 254169695Skan freeq_initialized = 1; 255169695Skan } 256169695Skan UNLOCKTH (); 257169695Skan 258169695Skan __mf_unregister (buf, 0, __MF_TYPE_HEAP_I); 259169695Skan /* NB: underlying region may have been __MF_TYPE_HEAP. */ 260169695Skan 261169695Skan if (UNLIKELY(__mf_opts.free_queue_length > 0)) 262169695Skan { 263169695Skan char *freeme = NULL; 264169695Skan LOCKTH (); 265169695Skan if (free_queue [free_ptr] != NULL) 266169695Skan { 267169695Skan freeme = free_queue [free_ptr]; 268169695Skan freeme -= __mf_opts.crumple_zone; 269169695Skan } 270169695Skan free_queue [free_ptr] = buf; 271169695Skan free_ptr = (free_ptr == (__mf_opts.free_queue_length-1) ? 0 : free_ptr + 1); 272169695Skan UNLOCKTH (); 273169695Skan if (freeme) 274169695Skan { 275169695Skan if (__mf_opts.trace_mf_calls) 276169695Skan { 277169695Skan VERBOSE_TRACE ("freeing deferred pointer %p (crumple %u)\n", 278169695Skan (void *) freeme, 279169695Skan __mf_opts.crumple_zone); 280169695Skan } 281169695Skan BEGIN_MALLOC_PROTECT (); 282169695Skan CALL_REAL (free, freeme); 283169695Skan END_MALLOC_PROTECT (); 284169695Skan } 285169695Skan } 286169695Skan else 287169695Skan { 288169695Skan /* back pointer up a bit to the beginning of crumple zone */ 289169695Skan char *base = (char *)buf; 290169695Skan base -= __mf_opts.crumple_zone; 291169695Skan if (__mf_opts.trace_mf_calls) 292169695Skan { 293169695Skan VERBOSE_TRACE ("freeing pointer %p = %p - %u\n", 294169695Skan (void *) base, 295169695Skan (void *) buf, 296169695Skan __mf_opts.crumple_zone); 297169695Skan } 298169695Skan BEGIN_MALLOC_PROTECT (); 299169695Skan CALL_REAL (free, base); 300169695Skan END_MALLOC_PROTECT (); 301169695Skan } 302169695Skan} 303169695Skan 304169695Skan 305169695Skan/* We can only wrap mmap if the target supports it. Likewise for munmap. 306169695Skan We assume we have both if we have mmap. */ 307169695Skan#ifdef HAVE_MMAP 308169695Skan 309169695Skan#if PIC 310169695Skan/* A special bootstrap variant. */ 311169695Skanvoid * 312169695Skan__mf_0fn_mmap (void *start, size_t l, int prot, int f, int fd, off_t off) 313169695Skan{ 314169695Skan return (void *) -1; 315169695Skan} 316169695Skan#endif 317169695Skan 318169695Skan 319169695Skan#undef mmap 320169695SkanWRAPPER(void *, mmap, 321169695Skan void *start, size_t length, int prot, 322169695Skan int flags, int fd, off_t offset) 323169695Skan{ 324169695Skan DECLARE(void *, mmap, void *, size_t, int, 325169695Skan int, int, off_t); 326169695Skan void *result; 327169695Skan BEGIN_PROTECT (mmap, start, length, prot, flags, fd, offset); 328169695Skan 329169695Skan result = CALL_REAL (mmap, start, length, prot, 330169695Skan flags, fd, offset); 331169695Skan 332169695Skan /* 333169695Skan VERBOSE_TRACE ("mmap (%08lx, %08lx, ...) => %08lx\n", 334169695Skan (uintptr_t) start, (uintptr_t) length, 335169695Skan (uintptr_t) result); 336169695Skan */ 337169695Skan 338169695Skan if (result != (void *)-1) 339169695Skan { 340169695Skan /* Register each page as a heap object. Why not register it all 341169695Skan as a single segment? That's so that a later munmap() call 342169695Skan can unmap individual pages. XXX: would __MF_TYPE_GUESS make 343169695Skan this more automatic? */ 344169695Skan size_t ps = getpagesize (); 345169695Skan uintptr_t base = (uintptr_t) result; 346169695Skan uintptr_t offset; 347169695Skan 348169695Skan for (offset=0; offset<length; offset+=ps) 349169695Skan { 350169695Skan /* XXX: We could map PROT_NONE to __MF_TYPE_NOACCESS. */ 351169695Skan /* XXX: Unaccessed HEAP pages are reported as leaks. Is this 352169695Skan appropriate for unaccessed mmap pages? */ 353169695Skan __mf_register ((void *) CLAMPADD (base, offset), ps, 354169695Skan __MF_TYPE_HEAP_I, "mmap page"); 355169695Skan } 356169695Skan } 357169695Skan 358169695Skan return result; 359169695Skan} 360169695Skan 361169695Skan 362169695Skan#if PIC 363169695Skan/* A special bootstrap variant. */ 364169695Skanint 365169695Skan__mf_0fn_munmap (void *start, size_t length) 366169695Skan{ 367169695Skan return -1; 368169695Skan} 369169695Skan#endif 370169695Skan 371169695Skan 372169695Skan#undef munmap 373169695SkanWRAPPER(int , munmap, void *start, size_t length) 374169695Skan{ 375169695Skan DECLARE(int, munmap, void *, size_t); 376169695Skan int result; 377169695Skan BEGIN_PROTECT (munmap, start, length); 378169695Skan 379169695Skan result = CALL_REAL (munmap, start, length); 380169695Skan 381169695Skan /* 382169695Skan VERBOSE_TRACE ("munmap (%08lx, %08lx, ...) => %08lx\n", 383169695Skan (uintptr_t) start, (uintptr_t) length, 384169695Skan (uintptr_t) result); 385169695Skan */ 386169695Skan 387169695Skan if (result == 0) 388169695Skan { 389169695Skan /* Unregister each page as a heap object. */ 390169695Skan size_t ps = getpagesize (); 391169695Skan uintptr_t base = (uintptr_t) start & (~ (ps - 1)); /* page align */ 392169695Skan uintptr_t offset; 393169695Skan 394169695Skan for (offset=0; offset<length; offset+=ps) 395169695Skan __mf_unregister ((void *) CLAMPADD (base, offset), ps, __MF_TYPE_HEAP_I); 396169695Skan } 397169695Skan return result; 398169695Skan} 399169695Skan#endif /* HAVE_MMAP */ 400169695Skan 401169695Skan 402169695Skan/* This wrapper is a little different, as it's called indirectly from 403169695Skan __mf_fini also to clean up pending allocations. */ 404169695Skanvoid * 405169695Skan__mf_wrap_alloca_indirect (size_t c) 406169695Skan{ 407169695Skan DECLARE (void *, malloc, size_t); 408169695Skan DECLARE (void, free, void *); 409169695Skan 410169695Skan /* This struct, a linked list, tracks alloca'd objects. The newest 411169695Skan object is at the head of the list. If we detect that we've 412169695Skan popped a few levels of stack, then the listed objects are freed 413169695Skan as needed. NB: The tracking struct is allocated with 414169695Skan real_malloc; the user data with wrap_malloc. 415169695Skan */ 416169695Skan struct alloca_tracking { void *ptr; void *stack; struct alloca_tracking* next; }; 417169695Skan static struct alloca_tracking *alloca_history = NULL; 418169695Skan 419169695Skan void *stack = __builtin_frame_address (0); 420169695Skan void *result; 421169695Skan struct alloca_tracking *track; 422169695Skan 423169695Skan TRACE ("%s\n", __PRETTY_FUNCTION__); 424169695Skan VERBOSE_TRACE ("alloca stack level %p\n", (void *) stack); 425169695Skan 426169695Skan /* XXX: thread locking! */ 427169695Skan 428169695Skan /* Free any previously alloca'd blocks that belong to deeper-nested functions, 429169695Skan which must therefore have exited by now. */ 430169695Skan 431169695Skan#define DEEPER_THAN < /* XXX: for x86; steal find_stack_direction() from libiberty/alloca.c */ 432169695Skan 433169695Skan while (alloca_history && 434169695Skan ((uintptr_t) alloca_history->stack DEEPER_THAN (uintptr_t) stack)) 435169695Skan { 436169695Skan struct alloca_tracking *next = alloca_history->next; 437169695Skan __mf_unregister (alloca_history->ptr, 0, __MF_TYPE_HEAP); 438169695Skan BEGIN_MALLOC_PROTECT (); 439169695Skan CALL_REAL (free, alloca_history->ptr); 440169695Skan CALL_REAL (free, alloca_history); 441169695Skan END_MALLOC_PROTECT (); 442169695Skan alloca_history = next; 443169695Skan } 444169695Skan 445169695Skan /* Allocate new block. */ 446169695Skan result = NULL; 447169695Skan if (LIKELY (c > 0)) /* alloca(0) causes no allocation. */ 448169695Skan { 449169695Skan BEGIN_MALLOC_PROTECT (); 450169695Skan track = (struct alloca_tracking *) CALL_REAL (malloc, 451169695Skan sizeof (struct alloca_tracking)); 452169695Skan END_MALLOC_PROTECT (); 453169695Skan if (LIKELY (track != NULL)) 454169695Skan { 455169695Skan BEGIN_MALLOC_PROTECT (); 456169695Skan result = CALL_REAL (malloc, c); 457169695Skan END_MALLOC_PROTECT (); 458169695Skan if (UNLIKELY (result == NULL)) 459169695Skan { 460169695Skan BEGIN_MALLOC_PROTECT (); 461169695Skan CALL_REAL (free, track); 462169695Skan END_MALLOC_PROTECT (); 463169695Skan /* Too bad. XXX: What about errno? */ 464169695Skan } 465169695Skan else 466169695Skan { 467169695Skan __mf_register (result, c, __MF_TYPE_HEAP, "alloca region"); 468169695Skan track->ptr = result; 469169695Skan track->stack = stack; 470169695Skan track->next = alloca_history; 471169695Skan alloca_history = track; 472169695Skan } 473169695Skan } 474169695Skan } 475169695Skan 476169695Skan return result; 477169695Skan} 478169695Skan 479169695Skan 480169695Skan#undef alloca 481169695SkanWRAPPER(void *, alloca, size_t c) 482169695Skan{ 483169695Skan return __mf_wrap_alloca_indirect (c); 484169695Skan} 485169695Skan 486