1169695Skan/* Mudflap: narrow-pointer bounds-checking by tree rewriting.
2169695Skan   Copyright (C) 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
3169695Skan   Contributed by Frank Ch. Eigler <fche@redhat.com>
4169695Skan   and Graydon Hoare <graydon@redhat.com>
5169695Skan   Splay Tree code originally by Mark Mitchell <mark@markmitchell.com>,
6169695Skan   adapted from libiberty.
7169695Skan
8169695SkanThis file is part of GCC.
9169695Skan
10169695SkanGCC is free software; you can redistribute it and/or modify it under
11169695Skanthe terms of the GNU General Public License as published by the Free
12169695SkanSoftware Foundation; either version 2, or (at your option) any later
13169695Skanversion.
14169695Skan
15169695SkanIn addition to the permissions in the GNU General Public License, the
16169695SkanFree Software Foundation gives you unlimited permission to link the
17169695Skancompiled version of this file into combinations with other programs,
18169695Skanand to distribute those combinations without any restriction coming
19169695Skanfrom the use of this file.  (The General Public License restrictions
20169695Skando apply in other respects; for example, they cover modification of
21169695Skanthe file, and distribution when not linked into a combine
22169695Skanexecutable.)
23169695Skan
24169695SkanGCC is distributed in the hope that it will be useful, but WITHOUT ANY
25169695SkanWARRANTY; without even the implied warranty of MERCHANTABILITY or
26169695SkanFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
27169695Skanfor more details.
28169695Skan
29169695SkanYou should have received a copy of the GNU General Public License
30169695Skanalong with GCC; see the file COPYING.  If not, write to the Free
31169695SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
32169695Skan02110-1301, USA.  */
33169695Skan
34169695Skan#include "config.h"
35169695Skan
36169695Skan/* These attempt to coax various unix flavours to declare all our
37169695Skan   needed tidbits in the system headers.  */
38169695Skan#if !defined(__FreeBSD__) && !defined(__APPLE__)
39169695Skan#define _POSIX_SOURCE
40169695Skan#endif /* Some BSDs break <sys/socket.h> if this is defined. */
41169695Skan#define _GNU_SOURCE
42169695Skan#define _XOPEN_SOURCE
43169695Skan#define _BSD_TYPES
44169695Skan#define __EXTENSIONS__
45169695Skan#define _ALL_SOURCE
46169695Skan#define _LARGE_FILE_API
47169695Skan#define _XOPEN_SOURCE_EXTENDED 1
48169695Skan
49169695Skan#include <stdio.h>
50169695Skan#include <stdlib.h>
51169695Skan#include <sys/types.h>
52169695Skan#include <sys/time.h>
53169695Skan#include <time.h>
54169695Skan#include <unistd.h>
55169695Skan#ifdef HAVE_EXECINFO_H
56169695Skan#include <execinfo.h>
57169695Skan#endif
58169695Skan#ifdef HAVE_SIGNAL_H
59169695Skan#include <signal.h>
60169695Skan#endif
61169695Skan#include <assert.h>
62169695Skan
63169695Skan#include <string.h>
64169695Skan#include <limits.h>
65169695Skan#include <sys/types.h>
66169695Skan#include <signal.h>
67169695Skan#include <errno.h>
68169695Skan#include <ctype.h>
69169695Skan
70169695Skan#include "mf-runtime.h"
71169695Skan#include "mf-impl.h"
72169695Skan
73169695Skan
74169695Skan/* ------------------------------------------------------------------------ */
75169695Skan/* Splay-tree implementation.  */
76169695Skan
77169695Skantypedef uintptr_t mfsplay_tree_key;
78169695Skantypedef void *mfsplay_tree_value;
79169695Skan
80169695Skan/* Forward declaration for a node in the tree.  */
81169695Skantypedef struct mfsplay_tree_node_s *mfsplay_tree_node;
82169695Skan
83169695Skan/* The type of a function used to iterate over the tree.  */
84169695Skantypedef int (*mfsplay_tree_foreach_fn) (mfsplay_tree_node, void *);
85169695Skan
86169695Skan/* The nodes in the splay tree.  */
87169695Skanstruct mfsplay_tree_node_s
88169695Skan{
89169695Skan  /* Data.  */
90169695Skan  mfsplay_tree_key key;
91169695Skan  mfsplay_tree_value value;
92169695Skan  /* Children.  */
93169695Skan  mfsplay_tree_node left;
94169695Skan  mfsplay_tree_node right;
95169695Skan  /* XXX: The addition of a parent pointer may eliminate some recursion.  */
96169695Skan};
97169695Skan
98169695Skan/* The splay tree itself.  */
99169695Skanstruct mfsplay_tree_s
100169695Skan{
101169695Skan  /* The root of the tree.  */
102169695Skan  mfsplay_tree_node root;
103169695Skan
104169695Skan  /* The last key value for which the tree has been splayed, but not
105169695Skan     since modified.  */
106169695Skan  mfsplay_tree_key last_splayed_key;
107169695Skan  int last_splayed_key_p;
108169695Skan
109169695Skan  /* Statistics.  */
110169695Skan  unsigned num_keys;
111169695Skan
112169695Skan  /* Traversal recursion control flags.  */
113169695Skan  unsigned max_depth;
114169695Skan  unsigned depth;
115169695Skan  unsigned rebalance_p;
116169695Skan};
117169695Skantypedef struct mfsplay_tree_s *mfsplay_tree;
118169695Skan
119169695Skanstatic mfsplay_tree mfsplay_tree_new (void);
120169695Skanstatic mfsplay_tree_node mfsplay_tree_insert (mfsplay_tree, mfsplay_tree_key, mfsplay_tree_value);
121169695Skanstatic void mfsplay_tree_remove (mfsplay_tree, mfsplay_tree_key);
122169695Skanstatic mfsplay_tree_node mfsplay_tree_lookup (mfsplay_tree, mfsplay_tree_key);
123169695Skanstatic mfsplay_tree_node mfsplay_tree_predecessor (mfsplay_tree, mfsplay_tree_key);
124169695Skanstatic mfsplay_tree_node mfsplay_tree_successor (mfsplay_tree, mfsplay_tree_key);
125169695Skanstatic int mfsplay_tree_foreach (mfsplay_tree, mfsplay_tree_foreach_fn, void *);
126169695Skanstatic void mfsplay_tree_rebalance (mfsplay_tree sp);
127169695Skan
128169695Skan/* ------------------------------------------------------------------------ */
129169695Skan/* Utility macros */
130169695Skan
131169695Skan#define CTOR  __attribute__ ((constructor))
132169695Skan#define DTOR  __attribute__ ((destructor))
133169695Skan
134169695Skan
135169695Skan/* Codes to describe the context in which a violation occurs. */
136169695Skan#define __MF_VIOL_UNKNOWN 0
137169695Skan#define __MF_VIOL_READ 1
138169695Skan#define __MF_VIOL_WRITE 2
139169695Skan#define __MF_VIOL_REGISTER 3
140169695Skan#define __MF_VIOL_UNREGISTER 4
141169695Skan#define __MF_VIOL_WATCH 5
142169695Skan
143169695Skan/* Protect against recursive calls. */
144169695Skan
145169695Skanstatic void
146169695Skanbegin_recursion_protect1 (const char *pf)
147169695Skan{
148169695Skan  if (__mf_get_state () == reentrant)
149169695Skan    {
150169695Skan      write (2, "mf: erroneous reentrancy detected in `", 38);
151169695Skan      write (2, pf, strlen(pf));
152169695Skan      write (2, "'\n", 2); \
153169695Skan      abort ();
154169695Skan    }
155169695Skan  __mf_set_state (reentrant);
156169695Skan}
157169695Skan
158169695Skan#define BEGIN_RECURSION_PROTECT() \
159169695Skan  begin_recursion_protect1 (__PRETTY_FUNCTION__)
160169695Skan
161169695Skan#define END_RECURSION_PROTECT() \
162169695Skan  __mf_set_state (active)
163169695Skan
164169695Skan/* ------------------------------------------------------------------------ */
165169695Skan/* Required globals.  */
166169695Skan
167169695Skan#define LOOKUP_CACHE_MASK_DFL 1023
168169695Skan#define LOOKUP_CACHE_SIZE_MAX 65536 /* Allows max CACHE_MASK 0xFFFF */
169169695Skan#define LOOKUP_CACHE_SHIFT_DFL 2
170169695Skan
171169695Skanstruct __mf_cache __mf_lookup_cache [LOOKUP_CACHE_SIZE_MAX];
172169695Skanuintptr_t __mf_lc_mask = LOOKUP_CACHE_MASK_DFL;
173169695Skanunsigned char __mf_lc_shift = LOOKUP_CACHE_SHIFT_DFL;
174169695Skan#define LOOKUP_CACHE_SIZE (__mf_lc_mask + 1)
175169695Skan
176169695Skanstruct __mf_options __mf_opts;
177169695Skanint __mf_starting_p = 1;
178169695Skan
179169695Skan#ifdef LIBMUDFLAPTH
180169695Skan#ifdef HAVE_TLS
181169695Skan__thread enum __mf_state_enum __mf_state_1 = reentrant;
182169695Skan#endif
183169695Skan#else
184169695Skanenum __mf_state_enum __mf_state_1 = reentrant;
185169695Skan#endif
186169695Skan
187169695Skan#ifdef LIBMUDFLAPTH
188169695Skanpthread_mutex_t __mf_biglock =
189169695Skan#ifdef PTHREAD_ERRORCHECK_MUTEX_INITIALIZER_NP
190169695Skan       PTHREAD_ERRORCHECK_MUTEX_INITIALIZER_NP;
191169695Skan#else
192169695Skan       PTHREAD_MUTEX_INITIALIZER;
193169695Skan#endif
194169695Skan#endif
195169695Skan
196169695Skan/* Use HAVE_PTHREAD_H here instead of LIBMUDFLAPTH, so that even
197169695Skan   the libmudflap.la (no threading support) can diagnose whether
198169695Skan   the application is linked with -lpthread.  See __mf_usage() below.  */
199169695Skan#if HAVE_PTHREAD_H
200169695Skan#ifdef _POSIX_THREADS
201169695Skan#pragma weak pthread_join
202169695Skan#else
203169695Skan#define pthread_join NULL
204169695Skan#endif
205169695Skan#endif
206169695Skan
207169695Skan
208169695Skan/* ------------------------------------------------------------------------ */
209169695Skan/* stats-related globals.  */
210169695Skan
211169695Skanstatic unsigned long __mf_count_check;
212169695Skanstatic unsigned long __mf_lookup_cache_reusecount [LOOKUP_CACHE_SIZE_MAX];
213169695Skanstatic unsigned long __mf_count_register;
214169695Skanstatic unsigned long __mf_total_register_size [__MF_TYPE_MAX+1];
215169695Skanstatic unsigned long __mf_count_unregister;
216169695Skanstatic unsigned long __mf_total_unregister_size;
217169695Skanstatic unsigned long __mf_count_violation [__MF_VIOL_WATCH+1];
218169695Skanstatic unsigned long __mf_sigusr1_received;
219169695Skanstatic unsigned long __mf_sigusr1_handled;
220169695Skan/* not static */ unsigned long __mf_reentrancy;
221169695Skan#ifdef LIBMUDFLAPTH
222169695Skan/* not static */ unsigned long __mf_lock_contention;
223169695Skan#endif
224169695Skan
225169695Skan
226169695Skan/* ------------------------------------------------------------------------ */
227169695Skan/* mode-check-related globals.  */
228169695Skan
229169695Skantypedef struct __mf_object
230169695Skan{
231169695Skan  uintptr_t low, high; /* __mf_register parameters */
232169695Skan  const char *name;
233169695Skan  char type; /* __MF_TYPE_something */
234169695Skan  char watching_p; /* Trigger a VIOL_WATCH on access? */
235169695Skan  unsigned read_count; /* Number of times __mf_check/read was called on this object.  */
236169695Skan  unsigned write_count; /* Likewise for __mf_check/write.  */
237169695Skan  unsigned liveness; /* A measure of recent checking activity.  */
238169695Skan  unsigned description_epoch; /* Last epoch __mf_describe_object printed this.  */
239169695Skan
240169695Skan  uintptr_t alloc_pc;
241169695Skan  struct timeval alloc_time;
242169695Skan  char **alloc_backtrace;
243169695Skan  size_t alloc_backtrace_size;
244169695Skan#ifdef LIBMUDFLAPTH
245169695Skan  pthread_t alloc_thread;
246169695Skan#endif
247169695Skan
248169695Skan  int deallocated_p;
249169695Skan  uintptr_t dealloc_pc;
250169695Skan  struct timeval dealloc_time;
251169695Skan  char **dealloc_backtrace;
252169695Skan  size_t dealloc_backtrace_size;
253169695Skan#ifdef LIBMUDFLAPTH
254169695Skan  pthread_t dealloc_thread;
255169695Skan#endif
256169695Skan} __mf_object_t;
257169695Skan
258169695Skan/* Live objects: splay trees, separated by type, ordered on .low (base address).  */
259169695Skan/* Actually stored as static vars within lookup function below.  */
260169695Skan
261169695Skan/* Dead objects: circular arrays; _MIN_CEM .. _MAX_CEM only */
262169695Skanstatic unsigned __mf_object_dead_head[__MF_TYPE_MAX_CEM+1]; /* next empty spot */
263169695Skanstatic __mf_object_t *__mf_object_cemetary[__MF_TYPE_MAX_CEM+1][__MF_PERSIST_MAX];
264169695Skan
265169695Skan
266169695Skan/* ------------------------------------------------------------------------ */
267169695Skan/* Forward function declarations */
268169695Skan
269169695Skanvoid __mf_init () CTOR;
270169695Skanstatic void __mf_sigusr1_respond ();
271169695Skanstatic unsigned __mf_find_objects (uintptr_t ptr_low, uintptr_t ptr_high,
272169695Skan                                   __mf_object_t **objs, unsigned max_objs);
273169695Skanstatic unsigned __mf_find_objects2 (uintptr_t ptr_low, uintptr_t ptr_high,
274169695Skan                                    __mf_object_t **objs, unsigned max_objs, int type);
275169695Skanstatic unsigned __mf_find_dead_objects (uintptr_t ptr_low, uintptr_t ptr_high,
276169695Skan                                        __mf_object_t **objs, unsigned max_objs);
277169695Skanstatic void __mf_adapt_cache ();
278169695Skanstatic void __mf_describe_object (__mf_object_t *obj);
279169695Skanstatic unsigned __mf_watch_or_not (void *ptr, size_t sz, char flag);
280169695Skanstatic mfsplay_tree __mf_object_tree (int type);
281169695Skanstatic void __mf_link_object (__mf_object_t *node);
282169695Skanstatic void __mf_unlink_object (__mf_object_t *node);
283169695Skan
284169695Skan
285169695Skan/* ------------------------------------------------------------------------ */
286169695Skan/* Configuration engine */
287169695Skan
288169695Skanstatic void
289169695Skan__mf_set_default_options ()
290169695Skan{
291169695Skan  memset (& __mf_opts, 0, sizeof (__mf_opts));
292169695Skan
293169695Skan  __mf_opts.adapt_cache = 1000003;
294169695Skan  __mf_opts.abbreviate = 1;
295169695Skan  __mf_opts.verbose_violations = 1;
296169695Skan  __mf_opts.free_queue_length = 4;
297169695Skan  __mf_opts.persistent_count = 100;
298169695Skan  __mf_opts.crumple_zone = 32;
299169695Skan  __mf_opts.backtrace = 4;
300169695Skan  __mf_opts.timestamps = 1;
301169695Skan  __mf_opts.mudflap_mode = mode_check;
302169695Skan  __mf_opts.violation_mode = viol_nop;
303169695Skan  __mf_opts.heur_std_data = 1;
304169695Skan#ifdef LIBMUDFLAPTH
305169695Skan  __mf_opts.thread_stack = 0;
306169695Skan#endif
307169695Skan}
308169695Skan
309169695Skanstatic struct option
310169695Skan{
311169695Skan  char *name;
312169695Skan  char *description;
313169695Skan  enum
314169695Skan    {
315169695Skan      set_option,
316169695Skan      read_integer_option,
317169695Skan    } type;
318169695Skan  unsigned value;
319169695Skan  unsigned *target;
320169695Skan}
321169695Skanoptions [] =
322169695Skan  {
323169695Skan    {"mode-nop",
324169695Skan     "mudflaps do nothing",
325169695Skan     set_option, (unsigned)mode_nop, (unsigned *)&__mf_opts.mudflap_mode},
326169695Skan    {"mode-populate",
327169695Skan     "mudflaps populate object tree",
328169695Skan     set_option, (unsigned)mode_populate, (unsigned *)&__mf_opts.mudflap_mode},
329169695Skan    {"mode-check",
330169695Skan     "mudflaps check for memory violations",
331169695Skan     set_option, (unsigned)mode_check, (unsigned *)&__mf_opts.mudflap_mode},
332169695Skan    {"mode-violate",
333169695Skan     "mudflaps always cause violations (diagnostic)",
334169695Skan     set_option, (unsigned)mode_violate, (unsigned *)&__mf_opts.mudflap_mode},
335169695Skan
336169695Skan    {"viol-nop",
337169695Skan     "violations do not change program execution",
338169695Skan     set_option, (unsigned)viol_nop, (unsigned *)&__mf_opts.violation_mode},
339169695Skan    {"viol-abort",
340169695Skan     "violations cause a call to abort()",
341169695Skan     set_option, (unsigned)viol_abort, (unsigned *)&__mf_opts.violation_mode},
342169695Skan    {"viol-segv",
343169695Skan     "violations are promoted to SIGSEGV signals",
344169695Skan     set_option, (unsigned)viol_segv, (unsigned *)&__mf_opts.violation_mode},
345169695Skan    {"viol-gdb",
346169695Skan     "violations fork a gdb process attached to current program",
347169695Skan     set_option, (unsigned)viol_gdb, (unsigned *)&__mf_opts.violation_mode},
348169695Skan    {"trace-calls",
349169695Skan     "trace calls to mudflap runtime library",
350169695Skan     set_option, 1, &__mf_opts.trace_mf_calls},
351169695Skan    {"verbose-trace",
352169695Skan     "trace internal events within mudflap runtime library",
353169695Skan     set_option, 1, &__mf_opts.verbose_trace},
354169695Skan    {"collect-stats",
355169695Skan     "collect statistics on mudflap's operation",
356169695Skan     set_option, 1, &__mf_opts.collect_stats},
357169695Skan#ifdef SIGUSR1
358169695Skan    {"sigusr1-report",
359169695Skan     "print report upon SIGUSR1",
360169695Skan     set_option, 1, &__mf_opts.sigusr1_report},
361169695Skan#endif
362169695Skan    {"internal-checking",
363169695Skan     "perform more expensive internal checking",
364169695Skan     set_option, 1, &__mf_opts.internal_checking},
365169695Skan    {"print-leaks",
366169695Skan     "print any memory leaks at program shutdown",
367169695Skan     set_option, 1, &__mf_opts.print_leaks},
368169695Skan    {"check-initialization",
369169695Skan     "detect uninitialized object reads",
370169695Skan     set_option, 1, &__mf_opts.check_initialization},
371169695Skan    {"verbose-violations",
372169695Skan     "print verbose messages when memory violations occur",
373169695Skan     set_option, 1, &__mf_opts.verbose_violations},
374169695Skan    {"abbreviate",
375169695Skan     "abbreviate repetitive listings",
376169695Skan     set_option, 1, &__mf_opts.abbreviate},
377169695Skan    {"timestamps",
378169695Skan     "track object lifetime timestamps",
379169695Skan     set_option, 1, &__mf_opts.timestamps},
380169695Skan    {"ignore-reads",
381169695Skan     "ignore read accesses - assume okay",
382169695Skan     set_option, 1, &__mf_opts.ignore_reads},
383169695Skan    {"wipe-stack",
384169695Skan     "wipe stack objects at unwind",
385169695Skan     set_option, 1, &__mf_opts.wipe_stack},
386169695Skan    {"wipe-heap",
387169695Skan     "wipe heap objects at free",
388169695Skan     set_option, 1, &__mf_opts.wipe_heap},
389169695Skan    {"heur-proc-map",
390169695Skan     "support /proc/self/map heuristics",
391169695Skan     set_option, 1, &__mf_opts.heur_proc_map},
392169695Skan    {"heur-stack-bound",
393169695Skan     "enable a simple upper stack bound heuristic",
394169695Skan     set_option, 1, &__mf_opts.heur_stack_bound},
395169695Skan    {"heur-start-end",
396169695Skan     "support _start.._end heuristics",
397169695Skan     set_option, 1, &__mf_opts.heur_start_end},
398169695Skan    {"heur-stdlib",
399169695Skan     "register standard library data (argv, errno, stdin, ...)",
400169695Skan     set_option, 1, &__mf_opts.heur_std_data},
401169695Skan    {"free-queue-length",
402169695Skan     "queue N deferred free() calls before performing them",
403169695Skan     read_integer_option, 0, &__mf_opts.free_queue_length},
404169695Skan    {"persistent-count",
405169695Skan     "keep a history of N unregistered regions",
406169695Skan     read_integer_option, 0, &__mf_opts.persistent_count},
407169695Skan    {"crumple-zone",
408169695Skan     "surround allocations with crumple zones of N bytes",
409169695Skan     read_integer_option, 0, &__mf_opts.crumple_zone},
410169695Skan    /* XXX: not type-safe.
411169695Skan    {"lc-mask",
412169695Skan     "set lookup cache size mask to N (2**M - 1)",
413169695Skan     read_integer_option, 0, (int *)(&__mf_lc_mask)},
414169695Skan    {"lc-shift",
415169695Skan     "set lookup cache pointer shift",
416169695Skan     read_integer_option, 0, (int *)(&__mf_lc_shift)},
417169695Skan    */
418169695Skan    {"lc-adapt",
419169695Skan     "adapt mask/shift parameters after N cache misses",
420169695Skan     read_integer_option, 1, &__mf_opts.adapt_cache},
421169695Skan    {"backtrace",
422169695Skan     "keep an N-level stack trace of each call context",
423169695Skan     read_integer_option, 0, &__mf_opts.backtrace},
424169695Skan#ifdef LIBMUDFLAPTH
425169695Skan    {"thread-stack",
426169695Skan     "override thread stacks allocation: N kB",
427169695Skan     read_integer_option, 0, &__mf_opts.thread_stack},
428169695Skan#endif
429169695Skan    {0, 0, set_option, 0, NULL}
430169695Skan  };
431169695Skan
432169695Skanstatic void
433169695Skan__mf_usage ()
434169695Skan{
435169695Skan  struct option *opt;
436169695Skan
437169695Skan  fprintf (stderr,
438169695Skan           "This is a %s%sGCC \"mudflap\" memory-checked binary.\n"
439169695Skan           "Mudflap is Copyright (C) 2002-2004 Free Software Foundation, Inc.\n"
440169695Skan           "\n"
441169695Skan           "The mudflap code can be controlled by an environment variable:\n"
442169695Skan           "\n"
443169695Skan           "$ export MUDFLAP_OPTIONS='<options>'\n"
444169695Skan           "$ <mudflapped_program>\n"
445169695Skan           "\n"
446169695Skan           "where <options> is a space-separated list of \n"
447169695Skan           "any of the following options.  Use `-no-OPTION' to disable options.\n"
448169695Skan           "\n",
449169695Skan#if HAVE_PTHREAD_H
450169695Skan           (pthread_join ? "multi-threaded " : "single-threaded "),
451169695Skan#else
452169695Skan           "",
453169695Skan#endif
454169695Skan#if LIBMUDFLAPTH
455169695Skan           "thread-aware "
456169695Skan#else
457169695Skan           "thread-unaware "
458169695Skan#endif
459169695Skan            );
460169695Skan  /* XXX: The multi-threaded thread-unaware combination is bad.  */
461169695Skan
462169695Skan  for (opt = options; opt->name; opt++)
463169695Skan    {
464169695Skan      int default_p = (opt->value == * opt->target);
465169695Skan
466169695Skan      switch (opt->type)
467169695Skan        {
468169695Skan          char buf[128];
469169695Skan        case set_option:
470169695Skan          fprintf (stderr, "-%-23.23s %s", opt->name, opt->description);
471169695Skan          if (default_p)
472169695Skan            fprintf (stderr, " [active]\n");
473169695Skan          else
474169695Skan            fprintf (stderr, "\n");
475169695Skan          break;
476169695Skan        case read_integer_option:
477169695Skan          strncpy (buf, opt->name, 128);
478169695Skan          strncpy (buf + strlen (opt->name), "=N", 2);
479169695Skan          fprintf (stderr, "-%-23.23s %s", buf, opt->description);
480169695Skan          fprintf (stderr, " [%d]\n", * opt->target);
481169695Skan          break;
482169695Skan        default: abort();
483169695Skan        }
484169695Skan    }
485169695Skan
486169695Skan  fprintf (stderr, "\n");
487169695Skan}
488169695Skan
489169695Skan
490169695Skanint
491169695Skan__mf_set_options (const char *optstr)
492169695Skan{
493169695Skan  int rc;
494169695Skan  LOCKTH ();
495169695Skan  BEGIN_RECURSION_PROTECT ();
496169695Skan  rc = __mfu_set_options (optstr);
497169695Skan  /* XXX: It's not really that easy.  A change to a bunch of parameters
498169695Skan     can require updating auxiliary state or risk crashing:
499169695Skan     free_queue_length, crumple_zone ... */
500169695Skan  END_RECURSION_PROTECT ();
501169695Skan  UNLOCKTH ();
502169695Skan  return rc;
503169695Skan}
504169695Skan
505169695Skan
506169695Skanint
507169695Skan__mfu_set_options (const char *optstr)
508169695Skan{
509169695Skan  struct option *opts = 0;
510169695Skan  char *nxt = 0;
511169695Skan  long tmp = 0;
512169695Skan  int rc = 0;
513169695Skan  const char *saved_optstr = optstr;
514169695Skan
515169695Skan  /* XXX: bounds-check for optstr! */
516169695Skan
517169695Skan  while (*optstr)
518169695Skan    {
519169695Skan      switch (*optstr) {
520169695Skan      case ' ':
521169695Skan      case '\t':
522169695Skan      case '\n':
523169695Skan        optstr++;
524169695Skan        break;
525169695Skan
526169695Skan      case '-':
527169695Skan        if (*optstr+1)
528169695Skan          {
529169695Skan            int negate = 0;
530169695Skan            optstr++;
531169695Skan
532169695Skan            if (*optstr == '?' ||
533169695Skan                strncmp (optstr, "help", 4) == 0)
534169695Skan              {
535169695Skan                /* Caller will print help and exit.  */
536169695Skan                return -1;
537169695Skan              }
538169695Skan
539169695Skan            if (strncmp (optstr, "no-", 3) == 0)
540169695Skan              {
541169695Skan                negate = 1;
542169695Skan                optstr = & optstr[3];
543169695Skan              }
544169695Skan
545169695Skan            for (opts = options; opts->name; opts++)
546169695Skan              {
547169695Skan                if (strncmp (optstr, opts->name, strlen (opts->name)) == 0)
548169695Skan                  {
549169695Skan                    optstr += strlen (opts->name);
550169695Skan                    assert (opts->target);
551169695Skan                    switch (opts->type)
552169695Skan                      {
553169695Skan                      case set_option:
554169695Skan                        if (negate)
555169695Skan                          *(opts->target) = 0;
556169695Skan                        else
557169695Skan                          *(opts->target) = opts->value;
558169695Skan                        break;
559169695Skan                      case read_integer_option:
560169695Skan                        if (! negate && (*optstr == '=' && *(optstr+1)))
561169695Skan                          {
562169695Skan                            optstr++;
563169695Skan                            tmp = strtol (optstr, &nxt, 10);
564169695Skan                            if ((optstr != nxt) && (tmp != LONG_MAX))
565169695Skan                              {
566169695Skan                                optstr = nxt;
567169695Skan                                *(opts->target) = (int)tmp;
568169695Skan                              }
569169695Skan                          }
570169695Skan                        else if (negate)
571169695Skan                          * opts->target = 0;
572169695Skan                        break;
573169695Skan                      }
574169695Skan                  }
575169695Skan              }
576169695Skan          }
577169695Skan        break;
578169695Skan
579169695Skan      default:
580169695Skan        fprintf (stderr,
581169695Skan                 "warning: unrecognized string '%s' in mudflap options\n",
582169695Skan                 optstr);
583169695Skan        optstr += strlen (optstr);
584169695Skan        rc = -1;
585169695Skan        break;
586169695Skan      }
587169695Skan    }
588169695Skan
589169695Skan  /* Special post-processing: bound __mf_lc_mask and free_queue_length for security. */
590169695Skan  __mf_lc_mask &= (LOOKUP_CACHE_SIZE_MAX - 1);
591169695Skan  __mf_opts.free_queue_length &= (__MF_FREEQ_MAX - 1);
592169695Skan
593169695Skan  /* Clear the lookup cache, in case the parameters got changed.  */
594169695Skan  /* XXX: race */
595169695Skan  memset (__mf_lookup_cache, 0, sizeof(__mf_lookup_cache));
596169695Skan  /* void slot 0 */
597169695Skan  __mf_lookup_cache[0].low = MAXPTR;
598169695Skan
599169695Skan  TRACE ("set options from `%s'\n", saved_optstr);
600169695Skan
601169695Skan  /* Call this unconditionally, in case -sigusr1-report was toggled. */
602169695Skan  __mf_sigusr1_respond ();
603169695Skan
604169695Skan  return rc;
605169695Skan}
606169695Skan
607169695Skan
608169695Skan#ifdef PIC
609169695Skan
610169695Skanvoid
611169695Skan__mf_resolve_single_dynamic (struct __mf_dynamic_entry *e)
612169695Skan{
613169695Skan  char *err;
614169695Skan
615169695Skan  assert (e);
616169695Skan  if (e->pointer) return;
617169695Skan
618169695Skan#if HAVE_DLVSYM
619169695Skan  if (e->version != NULL && e->version[0] != '\0') /* non-null/empty */
620169695Skan    e->pointer = dlvsym (RTLD_NEXT, e->name, e->version);
621169695Skan  else
622169695Skan#endif
623169695Skan    e->pointer = dlsym (RTLD_NEXT, e->name);
624169695Skan
625169695Skan  err = dlerror ();
626169695Skan
627169695Skan  if (err)
628169695Skan    {
629169695Skan      fprintf (stderr, "mf: error in dlsym(\"%s\"): %s\n",
630169695Skan               e->name, err);
631169695Skan      abort ();
632169695Skan    }
633169695Skan  if (! e->pointer)
634169695Skan    {
635169695Skan      fprintf (stderr, "mf: dlsym(\"%s\") = NULL\n", e->name);
636169695Skan      abort ();
637169695Skan    }
638169695Skan}
639169695Skan
640169695Skan
641169695Skanstatic void
642169695Skan__mf_resolve_dynamics ()
643169695Skan{
644169695Skan  int i;
645169695Skan  for (i = 0; i < dyn_INITRESOLVE; i++)
646169695Skan    __mf_resolve_single_dynamic (& __mf_dynamic[i]);
647169695Skan}
648169695Skan
649169695Skan
650169695Skan/* NB: order must match enums in mf-impl.h */
651169695Skanstruct __mf_dynamic_entry __mf_dynamic [] =
652169695Skan{
653169695Skan  {NULL, "calloc", NULL},
654169695Skan  {NULL, "free", NULL},
655169695Skan  {NULL, "malloc", NULL},
656169695Skan  {NULL, "mmap", NULL},
657169695Skan  {NULL, "munmap", NULL},
658169695Skan  {NULL, "realloc", NULL},
659169695Skan  {NULL, "DUMMY", NULL}, /* dyn_INITRESOLVE */
660169695Skan#ifdef LIBMUDFLAPTH
661169695Skan  {NULL, "pthread_create", PTHREAD_CREATE_VERSION},
662169695Skan  {NULL, "pthread_join", NULL},
663169695Skan  {NULL, "pthread_exit", NULL}
664169695Skan#endif
665169695Skan};
666169695Skan
667169695Skan#endif /* PIC */
668169695Skan
669169695Skan
670169695Skan
671169695Skan/* ------------------------------------------------------------------------ */
672169695Skan
673169695Skan/* Lookup & manage automatic initialization of the five or so splay trees.  */
674169695Skanstatic mfsplay_tree
675169695Skan__mf_object_tree (int type)
676169695Skan{
677169695Skan  static mfsplay_tree trees [__MF_TYPE_MAX+1];
678169695Skan  assert (type >= 0 && type <= __MF_TYPE_MAX);
679169695Skan  if (UNLIKELY (trees[type] == NULL))
680169695Skan    trees[type] = mfsplay_tree_new ();
681169695Skan  return trees[type];
682169695Skan}
683169695Skan
684169695Skan
685169695Skan/* not static */void
686169695Skan__mf_init ()
687169695Skan{
688169695Skan  char *ov = 0;
689169695Skan
690169695Skan  /* Return if initialization has already been done. */
691169695Skan  if (LIKELY (__mf_starting_p == 0))
692169695Skan    return;
693169695Skan
694169695Skan  /* This initial bootstrap phase requires that __mf_starting_p = 1. */
695169695Skan#ifdef PIC
696169695Skan  __mf_resolve_dynamics ();
697169695Skan#endif
698169695Skan  __mf_starting_p = 0;
699169695Skan
700169695Skan  __mf_set_state (active);
701169695Skan
702169695Skan  __mf_set_default_options ();
703169695Skan
704169695Skan  ov = getenv ("MUDFLAP_OPTIONS");
705169695Skan  if (ov)
706169695Skan    {
707169695Skan      int rc = __mfu_set_options (ov);
708169695Skan      if (rc < 0)
709169695Skan        {
710169695Skan          __mf_usage ();
711169695Skan          exit (1);
712169695Skan        }
713169695Skan    }
714169695Skan
715169695Skan  /* Initialize to a non-zero description epoch. */
716169695Skan  __mf_describe_object (NULL);
717169695Skan
718169695Skan#define REG_RESERVED(obj) \
719169695Skan  __mf_register (& obj, sizeof(obj), __MF_TYPE_NOACCESS, # obj)
720169695Skan
721169695Skan  REG_RESERVED (__mf_lookup_cache);
722169695Skan  REG_RESERVED (__mf_lc_mask);
723169695Skan  REG_RESERVED (__mf_lc_shift);
724169695Skan  /* XXX: others of our statics?  */
725169695Skan
726169695Skan  /* Prevent access to *NULL. */
727169695Skan  __mf_register (MINPTR, 1, __MF_TYPE_NOACCESS, "NULL");
728169695Skan  __mf_lookup_cache[0].low = (uintptr_t) -1;
729169695Skan}
730169695Skan
731169695Skan
732169695Skan
733169695Skanint
734169695Skan__wrap_main (int argc, char* argv[])
735169695Skan{
736169695Skan  extern char **environ;
737169695Skan  extern int main ();
738169695Skan  extern int __real_main ();
739169695Skan  static int been_here = 0;
740169695Skan
741169695Skan  if (__mf_opts.heur_std_data && ! been_here)
742169695Skan    {
743169695Skan      unsigned i;
744169695Skan
745169695Skan      been_here = 1;
746169695Skan      __mf_register (argv, sizeof(char *)*(argc+1), __MF_TYPE_STATIC, "argv[]");
747169695Skan      for (i=0; i<argc; i++)
748169695Skan        {
749169695Skan          unsigned j = strlen (argv[i]);
750169695Skan          __mf_register (argv[i], j+1, __MF_TYPE_STATIC, "argv element");
751169695Skan        }
752169695Skan
753169695Skan      for (i=0; ; i++)
754169695Skan        {
755169695Skan          char *e = environ[i];
756169695Skan          unsigned j;
757169695Skan          if (e == NULL) break;
758169695Skan          j = strlen (environ[i]);
759169695Skan          __mf_register (environ[i], j+1, __MF_TYPE_STATIC, "environ element");
760169695Skan        }
761169695Skan      __mf_register (environ, sizeof(char *)*(i+1), __MF_TYPE_STATIC, "environ[]");
762169695Skan
763169695Skan      __mf_register (& errno, sizeof (errno), __MF_TYPE_STATIC, "errno area");
764169695Skan
765169695Skan      __mf_register (stdin,  sizeof (*stdin),  __MF_TYPE_STATIC, "stdin");
766169695Skan      __mf_register (stdout, sizeof (*stdout), __MF_TYPE_STATIC, "stdout");
767169695Skan      __mf_register (stderr, sizeof (*stderr), __MF_TYPE_STATIC, "stderr");
768169695Skan
769169695Skan      /* Make some effort to register ctype.h static arrays.  */
770169695Skan      /* XXX: e.g., on Solaris, may need to register __ctype, _ctype, __ctype_mask, __toupper, etc. */
771169695Skan      /* On modern Linux GLIBC, these are thread-specific and changeable, and are dealt
772169695Skan         with in mf-hooks2.c.  */
773169695Skan    }
774169695Skan
775169695Skan#ifdef PIC
776169695Skan  return main (argc, argv, environ);
777169695Skan#else
778169695Skan  return __real_main (argc, argv, environ);
779169695Skan#endif
780169695Skan}
781169695Skan
782169695Skan
783169695Skan
784169695Skanextern void __mf_fini () DTOR;
785169695Skanvoid __mf_fini ()
786169695Skan{
787169695Skan  TRACE ("__mf_fini\n");
788169695Skan  __mfu_report ();
789169695Skan
790169695Skan#ifndef PIC
791169695Skan/* Since we didn't populate the tree for allocations in constructors
792169695Skan   before __mf_init, we cannot check destructors after __mf_fini.  */
793169695Skan  __mf_opts.mudflap_mode = mode_nop;
794169695Skan#endif
795169695Skan}
796169695Skan
797169695Skan
798169695Skan
799169695Skan/* ------------------------------------------------------------------------ */
800169695Skan/* __mf_check */
801169695Skan
802169695Skanvoid __mf_check (void *ptr, size_t sz, int type, const char *location)
803169695Skan{
804169695Skan  LOCKTH ();
805169695Skan  BEGIN_RECURSION_PROTECT ();
806169695Skan  __mfu_check (ptr, sz, type, location);
807169695Skan  END_RECURSION_PROTECT ();
808169695Skan  UNLOCKTH ();
809169695Skan}
810169695Skan
811169695Skan
812169695Skanvoid __mfu_check (void *ptr, size_t sz, int type, const char *location)
813169695Skan{
814169695Skan  unsigned entry_idx = __MF_CACHE_INDEX (ptr);
815169695Skan  struct __mf_cache *entry = & __mf_lookup_cache [entry_idx];
816169695Skan  int judgement = 0; /* 0=undecided; <0=violation; >0=okay */
817169695Skan  uintptr_t ptr_low = (uintptr_t) ptr;
818169695Skan  uintptr_t ptr_high = CLAMPSZ (ptr, sz);
819169695Skan  struct __mf_cache old_entry = *entry;
820169695Skan
821169695Skan  if (UNLIKELY (__mf_opts.sigusr1_report))
822169695Skan    __mf_sigusr1_respond ();
823169695Skan  if (UNLIKELY (__mf_opts.ignore_reads && type == 0))
824169695Skan    return;
825169695Skan
826169695Skan  TRACE ("check ptr=%p b=%u size=%lu %s location=`%s'\n",
827169695Skan         ptr, entry_idx, (unsigned long)sz,
828169695Skan         (type == 0 ? "read" : "write"), location);
829169695Skan
830169695Skan  switch (__mf_opts.mudflap_mode)
831169695Skan    {
832169695Skan    case mode_nop:
833169695Skan      /* It is tempting to poison the cache here similarly to
834169695Skan         mode_populate.  However that eliminates a valuable
835169695Skan         distinction between these two modes.  mode_nop is useful to
836169695Skan         let a user count & trace every single check / registration
837169695Skan         call.  mode_populate is useful to let a program run fast
838169695Skan         while unchecked.
839169695Skan      */
840169695Skan      judgement = 1;
841169695Skan      break;
842169695Skan
843169695Skan    case mode_populate:
844169695Skan      entry->low = ptr_low;
845169695Skan      entry->high = ptr_high;
846169695Skan      judgement = 1;
847169695Skan      break;
848169695Skan
849169695Skan    case mode_check:
850169695Skan      {
851169695Skan        unsigned heuristics = 0;
852169695Skan
853169695Skan        /* Advance aging/adaptation counters.  */
854169695Skan        static unsigned adapt_count;
855169695Skan        adapt_count ++;
856169695Skan        if (UNLIKELY (__mf_opts.adapt_cache > 0 &&
857169695Skan                      adapt_count > __mf_opts.adapt_cache))
858169695Skan          {
859169695Skan            adapt_count = 0;
860169695Skan            __mf_adapt_cache ();
861169695Skan          }
862169695Skan
863169695Skan        /* Looping only occurs if heuristics were triggered.  */
864169695Skan        while (judgement == 0)
865169695Skan          {
866169695Skan            DECLARE (void, free, void *p);
867169695Skan            __mf_object_t* ovr_obj[1];
868169695Skan            unsigned obj_count;
869169695Skan            __mf_object_t** all_ovr_obj = NULL;
870169695Skan            __mf_object_t** dealloc_me = NULL;
871169695Skan            unsigned i;
872169695Skan
873169695Skan            /* Find all overlapping objects.  Be optimistic that there is just one.  */
874169695Skan            obj_count = __mf_find_objects (ptr_low, ptr_high, ovr_obj, 1);
875169695Skan            if (UNLIKELY (obj_count > 1))
876169695Skan              {
877169695Skan                /* Allocate a real buffer and do the search again.  */
878169695Skan                DECLARE (void *, malloc, size_t c);
879169695Skan                unsigned n;
880169695Skan                all_ovr_obj = CALL_REAL (malloc, (sizeof (__mf_object_t *) *
881169695Skan                                                   obj_count));
882169695Skan                if (all_ovr_obj == NULL) abort ();
883169695Skan                n = __mf_find_objects (ptr_low, ptr_high, all_ovr_obj, obj_count);
884169695Skan                assert (n == obj_count);
885169695Skan                dealloc_me = all_ovr_obj;
886169695Skan              }
887169695Skan            else
888169695Skan              {
889169695Skan                all_ovr_obj = ovr_obj;
890169695Skan                dealloc_me = NULL;
891169695Skan              }
892169695Skan
893169695Skan            /* Update object statistics.  */
894169695Skan            for (i = 0; i < obj_count; i++)
895169695Skan              {
896169695Skan                __mf_object_t *obj = all_ovr_obj[i];
897169695Skan                assert (obj != NULL);
898169695Skan                if (type == __MF_CHECK_READ)
899169695Skan                  obj->read_count ++;
900169695Skan                else
901169695Skan                  obj->write_count ++;
902169695Skan                obj->liveness ++;
903169695Skan              }
904169695Skan
905169695Skan            /* Iterate over the various objects.  There are a number of special cases.  */
906169695Skan            for (i = 0; i < obj_count; i++)
907169695Skan              {
908169695Skan                  __mf_object_t *obj = all_ovr_obj[i];
909169695Skan
910169695Skan                /* Any __MF_TYPE_NOACCESS hit is bad.  */
911169695Skan                if (UNLIKELY (obj->type == __MF_TYPE_NOACCESS))
912169695Skan                  judgement = -1;
913169695Skan
914169695Skan                /* Any object with a watch flag is bad.  */
915169695Skan                if (UNLIKELY (obj->watching_p))
916169695Skan                  judgement = -2; /* trigger VIOL_WATCH */
917169695Skan
918169695Skan                /* A read from an uninitialized object is bad. */
919169695Skan                if (UNLIKELY (__mf_opts.check_initialization
920169695Skan                              /* reading */
921169695Skan                              && type == __MF_CHECK_READ
922169695Skan                              /* not written */
923169695Skan                              && obj->write_count == 0
924169695Skan                              /* uninitialized (heap) */
925169695Skan                              && obj->type == __MF_TYPE_HEAP))
926169695Skan                  judgement = -1;
927169695Skan              }
928169695Skan
929169695Skan            /* We now know that the access spans no invalid objects.  */
930169695Skan            if (LIKELY (judgement >= 0))
931169695Skan              for (i = 0; i < obj_count; i++)
932169695Skan                {
933169695Skan                  __mf_object_t *obj = all_ovr_obj[i];
934169695Skan
935169695Skan                  /* Is this access entirely contained within this object?  */
936169695Skan                  if (LIKELY (ptr_low >= obj->low && ptr_high <= obj->high))
937169695Skan                    {
938169695Skan                      /* Valid access.  */
939169695Skan                      entry->low = obj->low;
940169695Skan                      entry->high = obj->high;
941169695Skan                      judgement = 1;
942169695Skan                    }
943169695Skan                }
944169695Skan
945169695Skan            /* This access runs off the end of one valid object.  That
946169695Skan                could be okay, if other valid objects fill in all the
947169695Skan                holes.  We allow this only for HEAP and GUESS type
948169695Skan                objects.  Accesses to STATIC and STACK variables
949169695Skan                should not be allowed to span.  */
950169695Skan            if (UNLIKELY ((judgement == 0) && (obj_count > 1)))
951169695Skan              {
952169695Skan                unsigned uncovered = 0;
953169695Skan                for (i = 0; i < obj_count; i++)
954169695Skan                  {
955169695Skan                    __mf_object_t *obj = all_ovr_obj[i];
956169695Skan                    int j, uncovered_low_p, uncovered_high_p;
957169695Skan                    uintptr_t ptr_lower, ptr_higher;
958169695Skan
959169695Skan                    uncovered_low_p = ptr_low < obj->low;
960169695Skan                    ptr_lower = CLAMPSUB (obj->low, 1);
961169695Skan                    uncovered_high_p = ptr_high > obj->high;
962169695Skan                    ptr_higher = CLAMPADD (obj->high, 1);
963169695Skan
964169695Skan                    for (j = 0; j < obj_count; j++)
965169695Skan                      {
966169695Skan                        __mf_object_t *obj2 = all_ovr_obj[j];
967169695Skan
968169695Skan                        if (i == j) continue;
969169695Skan
970169695Skan                        /* Filter out objects that cannot be spanned across.  */
971169695Skan                        if (obj2->type == __MF_TYPE_STACK
972169695Skan                            || obj2->type == __MF_TYPE_STATIC)
973169695Skan                          continue;
974169695Skan
975169695Skan                          /* Consider a side "covered" if obj2 includes
976169695Skan                             the next byte on that side.  */
977169695Skan                          if (uncovered_low_p
978169695Skan                              && (ptr_lower >= obj2->low && ptr_lower <= obj2->high))
979169695Skan                            uncovered_low_p = 0;
980169695Skan                          if (uncovered_high_p
981169695Skan                              && (ptr_high >= obj2->low && ptr_higher <= obj2->high))
982169695Skan                            uncovered_high_p = 0;
983169695Skan                      }
984169695Skan
985169695Skan                    if (uncovered_low_p || uncovered_high_p)
986169695Skan                      uncovered ++;
987169695Skan                  }
988169695Skan
989169695Skan                /* Success if no overlapping objects are uncovered.  */
990169695Skan                if (uncovered == 0)
991169695Skan                  judgement = 1;
992169695Skan                }
993169695Skan
994169695Skan
995169695Skan            if (dealloc_me != NULL)
996169695Skan              CALL_REAL (free, dealloc_me);
997169695Skan
998169695Skan            /* If the judgment is still unknown at this stage, loop
999169695Skan               around at most one more time.  */
1000169695Skan            if (judgement == 0)
1001169695Skan              {
1002169695Skan                if (heuristics++ < 2) /* XXX parametrize this number? */
1003169695Skan                  judgement = __mf_heuristic_check (ptr_low, ptr_high);
1004169695Skan                else
1005169695Skan                  judgement = -1;
1006169695Skan              }
1007169695Skan          }
1008169695Skan
1009169695Skan      }
1010169695Skan      break;
1011169695Skan
1012169695Skan    case mode_violate:
1013169695Skan      judgement = -1;
1014169695Skan      break;
1015169695Skan    }
1016169695Skan
1017169695Skan  if (__mf_opts.collect_stats)
1018169695Skan    {
1019169695Skan      __mf_count_check ++;
1020169695Skan
1021169695Skan      if (LIKELY (old_entry.low != entry->low || old_entry.high != entry->high))
1022169695Skan        /* && (old_entry.low != 0) && (old_entry.high != 0)) */
1023169695Skan        __mf_lookup_cache_reusecount [entry_idx] ++;
1024169695Skan    }
1025169695Skan
1026169695Skan  if (UNLIKELY (judgement < 0))
1027169695Skan    __mf_violation (ptr, sz,
1028169695Skan                    (uintptr_t) __builtin_return_address (0), location,
1029169695Skan                    ((judgement == -1) ?
1030169695Skan                     (type == __MF_CHECK_READ ? __MF_VIOL_READ : __MF_VIOL_WRITE) :
1031169695Skan                     __MF_VIOL_WATCH));
1032169695Skan}
1033169695Skan
1034169695Skan
1035169695Skanstatic __mf_object_t *
1036169695Skan__mf_insert_new_object (uintptr_t low, uintptr_t high, int type,
1037169695Skan                        const char *name, uintptr_t pc)
1038169695Skan{
1039169695Skan  DECLARE (void *, calloc, size_t c, size_t n);
1040169695Skan
1041169695Skan  __mf_object_t *new_obj;
1042169695Skan  new_obj = CALL_REAL (calloc, 1, sizeof(__mf_object_t));
1043169695Skan  new_obj->low = low;
1044169695Skan  new_obj->high = high;
1045169695Skan  new_obj->type = type;
1046169695Skan  new_obj->name = name;
1047169695Skan  new_obj->alloc_pc = pc;
1048169695Skan#if HAVE_GETTIMEOFDAY
1049169695Skan  if (__mf_opts.timestamps)
1050169695Skan    gettimeofday (& new_obj->alloc_time, NULL);
1051169695Skan#endif
1052169695Skan#if LIBMUDFLAPTH
1053169695Skan  new_obj->alloc_thread = pthread_self ();
1054169695Skan#endif
1055169695Skan
1056169695Skan  if (__mf_opts.backtrace > 0 && (type == __MF_TYPE_HEAP || type == __MF_TYPE_HEAP_I))
1057169695Skan    new_obj->alloc_backtrace_size =
1058169695Skan      __mf_backtrace (& new_obj->alloc_backtrace,
1059169695Skan                      (void *) pc, 2);
1060169695Skan
1061169695Skan  __mf_link_object (new_obj);
1062169695Skan  return new_obj;
1063169695Skan}
1064169695Skan
1065169695Skan
1066169695Skanstatic void
1067169695Skan__mf_uncache_object (__mf_object_t *old_obj)
1068169695Skan{
1069169695Skan  /* Remove any low/high pointers for this object from the lookup cache.  */
1070169695Skan
1071169695Skan  /* Can it possibly exist in the cache?  */
1072169695Skan  if (LIKELY (old_obj->read_count + old_obj->write_count))
1073169695Skan    {
1074169695Skan      /* As reported by Herman ten Brugge, we need to scan the entire
1075169695Skan         cache for entries that may hit this object. */
1076169695Skan      uintptr_t low = old_obj->low;
1077169695Skan      uintptr_t high = old_obj->high;
1078169695Skan      struct __mf_cache *entry = & __mf_lookup_cache [0];
1079169695Skan      unsigned i;
1080169695Skan      for (i = 0; i <= __mf_lc_mask; i++, entry++)
1081169695Skan        {
1082169695Skan          /* NB: the "||" in the following test permits this code to
1083169695Skan             tolerate the situation introduced by __mf_check over
1084169695Skan             contiguous objects, where a cache entry spans several
1085169695Skan             objects.  */
1086169695Skan          if (entry->low == low || entry->high == high)
1087169695Skan            {
1088169695Skan              entry->low = MAXPTR;
1089169695Skan              entry->high = MINPTR;
1090169695Skan            }
1091169695Skan        }
1092169695Skan    }
1093169695Skan}
1094169695Skan
1095169695Skan
1096169695Skanvoid
1097169695Skan__mf_register (void *ptr, size_t sz, int type, const char *name)
1098169695Skan{
1099169695Skan  LOCKTH ();
1100169695Skan  BEGIN_RECURSION_PROTECT ();
1101169695Skan  __mfu_register (ptr, sz, type, name);
1102169695Skan  END_RECURSION_PROTECT ();
1103169695Skan  UNLOCKTH ();
1104169695Skan}
1105169695Skan
1106169695Skan
1107169695Skanvoid
1108169695Skan__mfu_register (void *ptr, size_t sz, int type, const char *name)
1109169695Skan{
1110169695Skan  TRACE ("register ptr=%p size=%lu type=%x name='%s'\n",
1111169695Skan         ptr, (unsigned long) sz, type, name ? name : "");
1112169695Skan
1113169695Skan  if (__mf_opts.collect_stats)
1114169695Skan    {
1115169695Skan      __mf_count_register ++;
1116169695Skan      __mf_total_register_size [(type < 0) ? 0 :
1117169695Skan                                (type > __MF_TYPE_MAX) ? 0 :
1118169695Skan                                type] += sz;
1119169695Skan    }
1120169695Skan
1121169695Skan  if (UNLIKELY (__mf_opts.sigusr1_report))
1122169695Skan    __mf_sigusr1_respond ();
1123169695Skan
1124169695Skan  switch (__mf_opts.mudflap_mode)
1125169695Skan    {
1126169695Skan    case mode_nop:
1127169695Skan      break;
1128169695Skan
1129169695Skan    case mode_violate:
1130169695Skan      __mf_violation (ptr, sz, (uintptr_t) __builtin_return_address (0), NULL,
1131169695Skan                      __MF_VIOL_REGISTER);
1132169695Skan      break;
1133169695Skan
1134169695Skan    case mode_populate:
1135169695Skan      /* Clear the cache.  */
1136169695Skan      /* XXX: why the entire cache? */
1137169695Skan      /* XXX: race */
1138169695Skan      memset (__mf_lookup_cache, 0, sizeof(__mf_lookup_cache));
1139169695Skan      /* void slot 0 */
1140169695Skan      __mf_lookup_cache[0].low = MAXPTR;
1141169695Skan      break;
1142169695Skan
1143169695Skan    case mode_check:
1144169695Skan      {
1145169695Skan        __mf_object_t *ovr_objs [1];
1146169695Skan        unsigned num_overlapping_objs;
1147169695Skan        uintptr_t low = (uintptr_t) ptr;
1148169695Skan        uintptr_t high = CLAMPSZ (ptr, sz);
1149169695Skan        uintptr_t pc = (uintptr_t) __builtin_return_address (0);
1150169695Skan
1151169695Skan        /* Treat unknown size indication as 1.  */
1152169695Skan        if (UNLIKELY (sz == 0)) sz = 1;
1153169695Skan
1154169695Skan        /* Look for objects only of the same type.  This will e.g. permit a registration
1155169695Skan           of a STATIC overlapping with a GUESS, and a HEAP with a NOACCESS.  At
1156169695Skan           __mf_check time however harmful overlaps will be detected. */
1157169695Skan        num_overlapping_objs = __mf_find_objects2 (low, high, ovr_objs, 1, type);
1158169695Skan
1159169695Skan        /* Handle overlaps.  */
1160169695Skan        if (UNLIKELY (num_overlapping_objs > 0))
1161169695Skan          {
1162169695Skan            __mf_object_t *ovr_obj = ovr_objs[0];
1163169695Skan
1164169695Skan            /* Accept certain specific duplication pairs.  */
1165169695Skan            if (((type == __MF_TYPE_STATIC) || (type == __MF_TYPE_GUESS))
1166169695Skan                && ovr_obj->low == low
1167169695Skan                && ovr_obj->high == high
1168169695Skan                && ovr_obj->type == type)
1169169695Skan              {
1170169695Skan                /* Duplicate registration for static objects may come
1171169695Skan                   from distinct compilation units.  */
1172169695Skan                VERBOSE_TRACE ("harmless duplicate reg %p-%p `%s'\n",
1173169695Skan                               (void *) low, (void *) high,
1174169695Skan                               (ovr_obj->name ? ovr_obj->name : ""));
1175169695Skan                break;
1176169695Skan              }
1177169695Skan
1178169695Skan            /* Alas, a genuine violation.  */
1179169695Skan            else
1180169695Skan              {
1181169695Skan                /* Two or more *real* mappings here. */
1182169695Skan                __mf_violation ((void *) ptr, sz,
1183169695Skan                                (uintptr_t) __builtin_return_address (0), NULL,
1184169695Skan                                __MF_VIOL_REGISTER);
1185169695Skan              }
1186169695Skan          }
1187169695Skan        else /* No overlapping objects: AOK.  */
1188169695Skan          __mf_insert_new_object (low, high, type, name, pc);
1189169695Skan
1190169695Skan        /* We could conceivably call __mf_check() here to prime the cache,
1191169695Skan           but then the read_count/write_count field is not reliable.  */
1192169695Skan        break;
1193169695Skan      }
1194169695Skan    } /* end switch (__mf_opts.mudflap_mode) */
1195169695Skan}
1196169695Skan
1197169695Skan
1198169695Skanvoid
1199169695Skan__mf_unregister (void *ptr, size_t sz, int type)
1200169695Skan{
1201169695Skan  LOCKTH ();
1202169695Skan  BEGIN_RECURSION_PROTECT ();
1203169695Skan  __mfu_unregister (ptr, sz, type);
1204169695Skan  END_RECURSION_PROTECT ();
1205169695Skan  UNLOCKTH ();
1206169695Skan}
1207169695Skan
1208169695Skan
1209169695Skanvoid
1210169695Skan__mfu_unregister (void *ptr, size_t sz, int type)
1211169695Skan{
1212169695Skan  DECLARE (void, free, void *ptr);
1213169695Skan
1214169695Skan  if (UNLIKELY (__mf_opts.sigusr1_report))
1215169695Skan    __mf_sigusr1_respond ();
1216169695Skan
1217169695Skan  TRACE ("unregister ptr=%p size=%lu type=%x\n", ptr, (unsigned long) sz, type);
1218169695Skan
1219169695Skan  switch (__mf_opts.mudflap_mode)
1220169695Skan    {
1221169695Skan    case mode_nop:
1222169695Skan      break;
1223169695Skan
1224169695Skan    case mode_violate:
1225169695Skan      __mf_violation (ptr, sz,
1226169695Skan                      (uintptr_t) __builtin_return_address (0), NULL,
1227169695Skan                      __MF_VIOL_UNREGISTER);
1228169695Skan      break;
1229169695Skan
1230169695Skan    case mode_populate:
1231169695Skan      /* Clear the cache.  */
1232169695Skan      /* XXX: race */
1233169695Skan      memset (__mf_lookup_cache, 0, sizeof(__mf_lookup_cache));
1234169695Skan      /* void slot 0 */
1235169695Skan      __mf_lookup_cache[0].low = MAXPTR;
1236169695Skan      break;
1237169695Skan
1238169695Skan    case mode_check:
1239169695Skan      {
1240169695Skan        __mf_object_t *old_obj = NULL;
1241169695Skan        __mf_object_t *del_obj = NULL;  /* Object to actually delete. */
1242169695Skan        __mf_object_t *objs[1] = {NULL};
1243169695Skan        unsigned num_overlapping_objs;
1244169695Skan
1245169695Skan        num_overlapping_objs = __mf_find_objects2 ((uintptr_t) ptr,
1246169695Skan                                                   CLAMPSZ (ptr, sz), objs, 1, type);
1247169695Skan
1248169695Skan        /* Special case for HEAP_I - see free & realloc hook.  They don't
1249169695Skan           know whether the input region was HEAP or HEAP_I before
1250169695Skan           unmapping it.  Here we give HEAP a try in case HEAP_I
1251169695Skan           failed.  */
1252169695Skan        if ((type == __MF_TYPE_HEAP_I) && (num_overlapping_objs == 0))
1253169695Skan          {
1254169695Skan            num_overlapping_objs = __mf_find_objects2 ((uintptr_t) ptr,
1255169695Skan                                                       CLAMPSZ (ptr, sz), objs, 1, __MF_TYPE_HEAP);
1256169695Skan          }
1257169695Skan
1258169695Skan        old_obj = objs[0];
1259169695Skan        if (UNLIKELY ((num_overlapping_objs != 1) /* more than one overlap */
1260169695Skan                      || ((sz == 0) ? 0 : (sz != (old_obj->high - old_obj->low + 1))) /* size mismatch */
1261169695Skan                      || ((uintptr_t) ptr != old_obj->low))) /* base mismatch */
1262169695Skan          {
1263169695Skan            __mf_violation (ptr, sz,
1264169695Skan                            (uintptr_t) __builtin_return_address (0), NULL,
1265169695Skan                            __MF_VIOL_UNREGISTER);
1266169695Skan            break;
1267169695Skan          }
1268169695Skan
1269169695Skan        __mf_unlink_object (old_obj);
1270169695Skan        __mf_uncache_object (old_obj);
1271169695Skan
1272169695Skan        /* Wipe buffer contents if desired.  */
1273169695Skan        if ((__mf_opts.wipe_stack && old_obj->type == __MF_TYPE_STACK)
1274169695Skan            || (__mf_opts.wipe_heap && (old_obj->type == __MF_TYPE_HEAP
1275169695Skan                                        || old_obj->type == __MF_TYPE_HEAP_I)))
1276169695Skan          {
1277169695Skan            memset ((void *) old_obj->low,
1278169695Skan                    0,
1279169695Skan                    (size_t) (old_obj->high - old_obj->low + 1));
1280169695Skan          }
1281169695Skan
1282169695Skan        /* Manage the object cemetary.  */
1283169695Skan        if (__mf_opts.persistent_count > 0
1284169695Skan	    && (unsigned) old_obj->type <= __MF_TYPE_MAX_CEM)
1285169695Skan          {
1286169695Skan            old_obj->deallocated_p = 1;
1287169695Skan            old_obj->dealloc_pc = (uintptr_t) __builtin_return_address (0);
1288169695Skan#if HAVE_GETTIMEOFDAY
1289169695Skan            if (__mf_opts.timestamps)
1290169695Skan              gettimeofday (& old_obj->dealloc_time, NULL);
1291169695Skan#endif
1292169695Skan#ifdef LIBMUDFLAPTH
1293169695Skan            old_obj->dealloc_thread = pthread_self ();
1294169695Skan#endif
1295169695Skan
1296169695Skan            if (__mf_opts.backtrace > 0 && old_obj->type == __MF_TYPE_HEAP)
1297169695Skan              old_obj->dealloc_backtrace_size =
1298169695Skan                __mf_backtrace (& old_obj->dealloc_backtrace,
1299169695Skan                                NULL, 2);
1300169695Skan
1301169695Skan            /* Encourage this object to be displayed again in current epoch.  */
1302169695Skan            old_obj->description_epoch --;
1303169695Skan
1304169695Skan            /* Put this object into the cemetary.  This may require this plot to
1305169695Skan               be recycled, and the previous resident to be designated del_obj.  */
1306169695Skan            {
1307169695Skan              unsigned row = old_obj->type;
1308169695Skan              unsigned plot = __mf_object_dead_head [row];
1309169695Skan
1310169695Skan              del_obj = __mf_object_cemetary [row][plot];
1311169695Skan              __mf_object_cemetary [row][plot] = old_obj;
1312169695Skan              plot ++;
1313169695Skan              if (plot == __mf_opts.persistent_count) plot = 0;
1314169695Skan              __mf_object_dead_head [row] = plot;
1315169695Skan            }
1316169695Skan          }
1317169695Skan        else
1318169695Skan          del_obj = old_obj;
1319169695Skan
1320169695Skan        if (__mf_opts.print_leaks)
1321169695Skan          {
1322169695Skan            if ((old_obj->read_count + old_obj->write_count) == 0 &&
1323169695Skan                (old_obj->type == __MF_TYPE_HEAP
1324169695Skan                 || old_obj->type == __MF_TYPE_HEAP_I))
1325169695Skan              {
1326169695Skan                fprintf (stderr,
1327169695Skan                         "*******\n"
1328169695Skan                         "mudflap warning: unaccessed registered object:\n");
1329169695Skan                __mf_describe_object (old_obj);
1330169695Skan              }
1331169695Skan          }
1332169695Skan
1333169695Skan        if (del_obj != NULL) /* May or may not equal old_obj.  */
1334169695Skan          {
1335169695Skan            if (__mf_opts.backtrace > 0)
1336169695Skan              {
1337169695Skan                CALL_REAL(free, del_obj->alloc_backtrace);
1338169695Skan                if (__mf_opts.persistent_count > 0)
1339169695Skan                  {
1340169695Skan                    CALL_REAL(free, del_obj->dealloc_backtrace);
1341169695Skan                  }
1342169695Skan              }
1343169695Skan            CALL_REAL(free, del_obj);
1344169695Skan          }
1345169695Skan
1346169695Skan        break;
1347169695Skan      }
1348169695Skan    } /* end switch (__mf_opts.mudflap_mode) */
1349169695Skan
1350169695Skan
1351169695Skan  if (__mf_opts.collect_stats)
1352169695Skan    {
1353169695Skan      __mf_count_unregister ++;
1354169695Skan      __mf_total_unregister_size += sz;
1355169695Skan    }
1356169695Skan}
1357169695Skan
1358169695Skan
1359169695Skan
1360169695Skanstruct tree_stats
1361169695Skan{
1362169695Skan  unsigned obj_count;
1363169695Skan  unsigned long total_size;
1364169695Skan  unsigned live_obj_count;
1365169695Skan  double total_weight;
1366169695Skan  double weighted_size;
1367169695Skan  unsigned long weighted_address_bits [sizeof (uintptr_t) * 8][2];
1368169695Skan};
1369169695Skan
1370169695Skan
1371169695Skan
1372169695Skanstatic int
1373169695Skan__mf_adapt_cache_fn (mfsplay_tree_node n, void *param)
1374169695Skan{
1375169695Skan  __mf_object_t *obj = (__mf_object_t *) n->value;
1376169695Skan  struct tree_stats *s = (struct tree_stats *) param;
1377169695Skan
1378169695Skan  assert (obj != NULL && s != NULL);
1379169695Skan
1380169695Skan  /* Exclude never-accessed objects.  */
1381169695Skan  if (obj->read_count + obj->write_count)
1382169695Skan    {
1383169695Skan      s->obj_count ++;
1384169695Skan      s->total_size += (obj->high - obj->low + 1);
1385169695Skan
1386169695Skan      if (obj->liveness)
1387169695Skan        {
1388169695Skan          unsigned i;
1389169695Skan          uintptr_t addr;
1390169695Skan
1391169695Skan          /* VERBOSE_TRACE ("analyze low=%p live=%u name=`%s'\n",
1392169695Skan             (void *) obj->low, obj->liveness, obj->name); */
1393169695Skan
1394169695Skan          s->live_obj_count ++;
1395169695Skan          s->total_weight += (double) obj->liveness;
1396169695Skan          s->weighted_size +=
1397169695Skan            (double) (obj->high - obj->low + 1) *
1398169695Skan            (double) obj->liveness;
1399169695Skan
1400169695Skan          addr = obj->low;
1401169695Skan          for (i=0; i<sizeof(uintptr_t) * 8; i++)
1402169695Skan            {
1403169695Skan              unsigned bit = addr & 1;
1404169695Skan              s->weighted_address_bits[i][bit] += obj->liveness;
1405169695Skan              addr = addr >> 1;
1406169695Skan            }
1407169695Skan
1408169695Skan          /* Age the liveness value.  */
1409169695Skan          obj->liveness >>= 1;
1410169695Skan        }
1411169695Skan    }
1412169695Skan
1413169695Skan  return 0;
1414169695Skan}
1415169695Skan
1416169695Skan
1417169695Skanstatic void
1418169695Skan__mf_adapt_cache ()
1419169695Skan{
1420169695Skan  struct tree_stats s;
1421169695Skan  uintptr_t new_mask = 0;
1422169695Skan  unsigned char new_shift;
1423169695Skan  float cache_utilization;
1424169695Skan  float max_value;
1425169695Skan  static float smoothed_new_shift = -1.0;
1426169695Skan  unsigned i;
1427169695Skan
1428169695Skan  memset (&s, 0, sizeof (s));
1429169695Skan
1430169695Skan  mfsplay_tree_foreach (__mf_object_tree (__MF_TYPE_HEAP), __mf_adapt_cache_fn, (void *) & s);
1431169695Skan  mfsplay_tree_foreach (__mf_object_tree (__MF_TYPE_HEAP_I), __mf_adapt_cache_fn, (void *) & s);
1432169695Skan  mfsplay_tree_foreach (__mf_object_tree (__MF_TYPE_STACK), __mf_adapt_cache_fn, (void *) & s);
1433169695Skan  mfsplay_tree_foreach (__mf_object_tree (__MF_TYPE_STATIC), __mf_adapt_cache_fn, (void *) & s);
1434169695Skan  mfsplay_tree_foreach (__mf_object_tree (__MF_TYPE_GUESS), __mf_adapt_cache_fn, (void *) & s);
1435169695Skan
1436169695Skan  /* Maybe we're dealing with funny aging/adaptation parameters, or an
1437169695Skan     empty tree.  Just leave the cache alone in such cases, rather
1438169695Skan     than risk dying by division-by-zero.  */
1439169695Skan  if (! (s.obj_count > 0) && (s.live_obj_count > 0) && (s.total_weight > 0.0))
1440169695Skan    return;
1441169695Skan
1442169695Skan  /* Guess a good value for the shift parameter by finding an address bit that is a
1443169695Skan     good discriminant of lively objects.  */
1444169695Skan  max_value = 0.0;
1445169695Skan  for (i=0; i<sizeof (uintptr_t)*8; i++)
1446169695Skan    {
1447169695Skan      float value = (float) s.weighted_address_bits[i][0] * (float) s.weighted_address_bits[i][1];
1448169695Skan      if (max_value < value) max_value = value;
1449169695Skan    }
1450169695Skan  for (i=0; i<sizeof (uintptr_t)*8; i++)
1451169695Skan    {
1452169695Skan      float shoulder_factor = 0.7;  /* Include slightly less popular bits too.  */
1453169695Skan      float value = (float) s.weighted_address_bits[i][0] * (float) s.weighted_address_bits[i][1];
1454169695Skan      if (value >= max_value * shoulder_factor)
1455169695Skan        break;
1456169695Skan    }
1457169695Skan  if (smoothed_new_shift < 0) smoothed_new_shift = __mf_lc_shift;
1458169695Skan  /* Converge toward this slowly to reduce flapping. */
1459169695Skan  smoothed_new_shift = 0.9*smoothed_new_shift + 0.1*i;
1460169695Skan  new_shift = (unsigned) (smoothed_new_shift + 0.5);
1461169695Skan  assert (new_shift < sizeof (uintptr_t)*8);
1462169695Skan
1463169695Skan  /* Count number of used buckets.  */
1464169695Skan  cache_utilization = 0.0;
1465169695Skan  for (i = 0; i < (1 + __mf_lc_mask); i++)
1466169695Skan    if (__mf_lookup_cache[i].low != 0 || __mf_lookup_cache[i].high != 0)
1467169695Skan      cache_utilization += 1.0;
1468169695Skan  cache_utilization /= (1 + __mf_lc_mask);
1469169695Skan
1470169695Skan  new_mask |= 0xffff; /* XXX: force a large cache.  */
1471169695Skan  new_mask &= (LOOKUP_CACHE_SIZE_MAX - 1);
1472169695Skan
1473169695Skan  VERBOSE_TRACE ("adapt cache obj=%u/%u sizes=%lu/%.0f/%.0f => "
1474169695Skan                 "util=%u%% m=%p s=%u\n",
1475169695Skan                 s.obj_count, s.live_obj_count, s.total_size, s.total_weight, s.weighted_size,
1476169695Skan                 (unsigned)(cache_utilization*100.0), (void *) new_mask, new_shift);
1477169695Skan
1478169695Skan  /* We should reinitialize cache if its parameters have changed.  */
1479169695Skan  if (new_mask != __mf_lc_mask ||
1480169695Skan      new_shift != __mf_lc_shift)
1481169695Skan    {
1482169695Skan      __mf_lc_mask = new_mask;
1483169695Skan      __mf_lc_shift = new_shift;
1484169695Skan      /* XXX: race */
1485169695Skan      memset (__mf_lookup_cache, 0, sizeof(__mf_lookup_cache));
1486169695Skan      /* void slot 0 */
1487169695Skan      __mf_lookup_cache[0].low = MAXPTR;
1488169695Skan    }
1489169695Skan}
1490169695Skan
1491169695Skan
1492169695Skan
1493169695Skan/* __mf_find_object[s] */
1494169695Skan
1495169695Skan/* Find overlapping live objecs between [low,high].  Return up to
1496169695Skan   max_objs of their pointers in objs[].  Return total count of
1497169695Skan   overlaps (may exceed max_objs). */
1498169695Skan
1499169695Skanunsigned
1500169695Skan__mf_find_objects2 (uintptr_t ptr_low, uintptr_t ptr_high,
1501169695Skan                    __mf_object_t **objs, unsigned max_objs, int type)
1502169695Skan{
1503169695Skan  unsigned count = 0;
1504169695Skan  mfsplay_tree t = __mf_object_tree (type);
1505169695Skan  mfsplay_tree_key k = (mfsplay_tree_key) ptr_low;
1506169695Skan  int direction;
1507169695Skan
1508169695Skan  mfsplay_tree_node n = mfsplay_tree_lookup (t, k);
1509169695Skan  /* An exact match for base address implies a hit.  */
1510169695Skan  if (n != NULL)
1511169695Skan    {
1512169695Skan      if (count < max_objs)
1513169695Skan        objs[count] = (__mf_object_t *) n->value;
1514169695Skan      count ++;
1515169695Skan    }
1516169695Skan
1517169695Skan  /* Iterate left then right near this key value to find all overlapping objects. */
1518169695Skan  for (direction = 0; direction < 2; direction ++)
1519169695Skan    {
1520169695Skan      /* Reset search origin.  */
1521169695Skan      k = (mfsplay_tree_key) ptr_low;
1522169695Skan
1523169695Skan      while (1)
1524169695Skan        {
1525169695Skan          __mf_object_t *obj;
1526169695Skan
1527169695Skan          n = (direction == 0 ? mfsplay_tree_successor (t, k) : mfsplay_tree_predecessor (t, k));
1528169695Skan          if (n == NULL) break;
1529169695Skan          obj = (__mf_object_t *) n->value;
1530169695Skan
1531169695Skan          if (! (obj->low <= ptr_high && obj->high >= ptr_low)) /* No overlap? */
1532169695Skan            break;
1533169695Skan
1534169695Skan          if (count < max_objs)
1535169695Skan            objs[count] = (__mf_object_t *) n->value;
1536169695Skan          count ++;
1537169695Skan
1538169695Skan          k = (mfsplay_tree_key) obj->low;
1539169695Skan        }
1540169695Skan    }
1541169695Skan
1542169695Skan  return count;
1543169695Skan}
1544169695Skan
1545169695Skan
1546169695Skanunsigned
1547169695Skan__mf_find_objects (uintptr_t ptr_low, uintptr_t ptr_high,
1548169695Skan                   __mf_object_t **objs, unsigned max_objs)
1549169695Skan{
1550169695Skan  int type;
1551169695Skan  unsigned count = 0;
1552169695Skan
1553169695Skan  /* Search each splay tree for overlaps.  */
1554169695Skan  for (type = __MF_TYPE_NOACCESS; type <= __MF_TYPE_GUESS; type++)
1555169695Skan    {
1556169695Skan      unsigned c = __mf_find_objects2 (ptr_low, ptr_high, objs, max_objs, type);
1557169695Skan      if (c > max_objs)
1558169695Skan        {
1559169695Skan          max_objs = 0;
1560169695Skan          objs = NULL;
1561169695Skan        }
1562169695Skan      else /* NB: C may equal 0 */
1563169695Skan        {
1564169695Skan          max_objs -= c;
1565169695Skan          objs += c;
1566169695Skan        }
1567169695Skan      count += c;
1568169695Skan    }
1569169695Skan
1570169695Skan  return count;
1571169695Skan}
1572169695Skan
1573169695Skan
1574169695Skan
1575169695Skan/* __mf_link_object */
1576169695Skan
1577169695Skanstatic void
1578169695Skan__mf_link_object (__mf_object_t *node)
1579169695Skan{
1580169695Skan  mfsplay_tree t = __mf_object_tree (node->type);
1581169695Skan  mfsplay_tree_insert (t, (mfsplay_tree_key) node->low, (mfsplay_tree_value) node);
1582169695Skan}
1583169695Skan
1584169695Skan/* __mf_unlink_object */
1585169695Skan
1586169695Skanstatic void
1587169695Skan__mf_unlink_object (__mf_object_t *node)
1588169695Skan{
1589169695Skan  mfsplay_tree t = __mf_object_tree (node->type);
1590169695Skan  mfsplay_tree_remove (t, (mfsplay_tree_key) node->low);
1591169695Skan}
1592169695Skan
1593169695Skan/* __mf_find_dead_objects */
1594169695Skan
1595169695Skan/* Find overlapping dead objecs between [low,high].  Return up to
1596169695Skan   max_objs of their pointers in objs[].  Return total count of
1597169695Skan   overlaps (may exceed max_objs).  */
1598169695Skan
1599169695Skanstatic unsigned
1600169695Skan__mf_find_dead_objects (uintptr_t low, uintptr_t high,
1601169695Skan                        __mf_object_t **objs, unsigned max_objs)
1602169695Skan{
1603169695Skan  if (__mf_opts.persistent_count > 0)
1604169695Skan    {
1605169695Skan      unsigned count = 0;
1606169695Skan      unsigned recollection = 0;
1607169695Skan      unsigned row = 0;
1608169695Skan
1609169695Skan      assert (low <= high);
1610169695Skan      assert (max_objs == 0 || objs != NULL);
1611169695Skan
1612169695Skan      /* Widen the search from the most recent plots in each row, looking
1613169695Skan         backward in time.  */
1614169695Skan      recollection = 0;
1615169695Skan      while (recollection < __mf_opts.persistent_count)
1616169695Skan        {
1617169695Skan          count = 0;
1618169695Skan
1619169695Skan          for (row = 0; row <= __MF_TYPE_MAX_CEM; row ++)
1620169695Skan            {
1621169695Skan              unsigned plot;
1622169695Skan              unsigned i;
1623169695Skan
1624169695Skan              plot = __mf_object_dead_head [row];
1625169695Skan              for (i = 0; i <= recollection; i ++)
1626169695Skan                {
1627169695Skan                  __mf_object_t *obj;
1628169695Skan
1629169695Skan                  /* Look backward through row: it's a circular buffer.  */
1630169695Skan                  if (plot > 0) plot --;
1631169695Skan                  else plot = __mf_opts.persistent_count - 1;
1632169695Skan
1633169695Skan                  obj = __mf_object_cemetary [row][plot];
1634169695Skan                  if (obj && obj->low <= high && obj->high >= low)
1635169695Skan                    {
1636169695Skan                      /* Found an overlapping dead object!  */
1637169695Skan                      if (count < max_objs)
1638169695Skan                        objs [count] = obj;
1639169695Skan                      count ++;
1640169695Skan                    }
1641169695Skan                }
1642169695Skan            }
1643169695Skan
1644169695Skan          if (count)
1645169695Skan            break;
1646169695Skan
1647169695Skan          /* Look farther back in time.  */
1648169695Skan          recollection = (recollection * 2) + 1;
1649169695Skan        }
1650169695Skan
1651169695Skan      return count;
1652169695Skan    } else {
1653169695Skan      return 0;
1654169695Skan    }
1655169695Skan}
1656169695Skan
1657169695Skan/* __mf_describe_object */
1658169695Skan
1659169695Skanstatic void
1660169695Skan__mf_describe_object (__mf_object_t *obj)
1661169695Skan{
1662169695Skan  static unsigned epoch = 0;
1663169695Skan  if (obj == NULL)
1664169695Skan    {
1665169695Skan      epoch ++;
1666169695Skan      return;
1667169695Skan    }
1668169695Skan
1669169695Skan  if (__mf_opts.abbreviate && obj->description_epoch == epoch)
1670169695Skan    {
1671169695Skan      fprintf (stderr,
1672169695Skan               "mudflap %sobject %p: name=`%s'\n",
1673169695Skan               (obj->deallocated_p ? "dead " : ""),
1674169695Skan               (void *) obj, (obj->name ? obj->name : ""));
1675169695Skan      return;
1676169695Skan    }
1677169695Skan  else
1678169695Skan    obj->description_epoch = epoch;
1679169695Skan
1680169695Skan  fprintf (stderr,
1681169695Skan           "mudflap %sobject %p: name=`%s'\n"
1682169695Skan           "bounds=[%p,%p] size=%lu area=%s check=%ur/%uw liveness=%u%s\n"
1683169695Skan           "alloc time=%lu.%06lu pc=%p"
1684169695Skan#ifdef LIBMUDFLAPTH
1685169695Skan           " thread=%u"
1686169695Skan#endif
1687169695Skan           "\n",
1688169695Skan           (obj->deallocated_p ? "dead " : ""),
1689169695Skan           (void *) obj, (obj->name ? obj->name : ""),
1690169695Skan           (void *) obj->low, (void *) obj->high,
1691169695Skan           (unsigned long) (obj->high - obj->low + 1),
1692169695Skan           (obj->type == __MF_TYPE_NOACCESS ? "no-access" :
1693169695Skan            obj->type == __MF_TYPE_HEAP ? "heap" :
1694169695Skan            obj->type == __MF_TYPE_HEAP_I ? "heap-init" :
1695169695Skan            obj->type == __MF_TYPE_STACK ? "stack" :
1696169695Skan            obj->type == __MF_TYPE_STATIC ? "static" :
1697169695Skan            obj->type == __MF_TYPE_GUESS ? "guess" :
1698169695Skan            "unknown"),
1699169695Skan           obj->read_count, obj->write_count, obj->liveness,
1700169695Skan           obj->watching_p ? " watching" : "",
1701169695Skan           obj->alloc_time.tv_sec, obj->alloc_time.tv_usec,
1702169695Skan           (void *) obj->alloc_pc
1703169695Skan#ifdef LIBMUDFLAPTH
1704169695Skan           , (unsigned) obj->alloc_thread
1705169695Skan#endif
1706169695Skan           );
1707169695Skan
1708169695Skan  if (__mf_opts.backtrace > 0)
1709169695Skan  {
1710169695Skan    unsigned i;
1711169695Skan    for (i=0; i<obj->alloc_backtrace_size; i++)
1712169695Skan      fprintf (stderr, "      %s\n", obj->alloc_backtrace[i]);
1713169695Skan  }
1714169695Skan
1715169695Skan  if (__mf_opts.persistent_count > 0)
1716169695Skan    {
1717169695Skan      if (obj->deallocated_p)
1718169695Skan        {
1719169695Skan          fprintf (stderr, "dealloc time=%lu.%06lu pc=%p"
1720169695Skan#ifdef LIBMUDFLAPTH
1721169695Skan                   " thread=%u"
1722169695Skan#endif
1723169695Skan                   "\n",
1724169695Skan                   obj->dealloc_time.tv_sec, obj->dealloc_time.tv_usec,
1725169695Skan                   (void *) obj->dealloc_pc
1726169695Skan#ifdef LIBMUDFLAPTH
1727169695Skan                   , (unsigned) obj->dealloc_thread
1728169695Skan#endif
1729169695Skan                   );
1730169695Skan
1731169695Skan
1732169695Skan          if (__mf_opts.backtrace > 0)
1733169695Skan          {
1734169695Skan            unsigned i;
1735169695Skan            for (i=0; i<obj->dealloc_backtrace_size; i++)
1736169695Skan              fprintf (stderr, "      %s\n", obj->dealloc_backtrace[i]);
1737169695Skan          }
1738169695Skan        }
1739169695Skan    }
1740169695Skan}
1741169695Skan
1742169695Skan
1743169695Skanstatic int
1744169695Skan__mf_report_leaks_fn (mfsplay_tree_node n, void *param)
1745169695Skan{
1746169695Skan  __mf_object_t *node = (__mf_object_t *) n->value;
1747169695Skan  unsigned *count = (unsigned *) param;
1748169695Skan
1749169695Skan  if (count != NULL)
1750169695Skan    (*count) ++;
1751169695Skan
1752169695Skan  fprintf (stderr, "Leaked object %u:\n", (*count));
1753169695Skan  __mf_describe_object (node);
1754169695Skan
1755169695Skan  return 0;
1756169695Skan}
1757169695Skan
1758169695Skan
1759169695Skanstatic unsigned
1760169695Skan__mf_report_leaks ()
1761169695Skan{
1762169695Skan  unsigned count = 0;
1763169695Skan
1764169695Skan  (void) mfsplay_tree_foreach (__mf_object_tree (__MF_TYPE_HEAP),
1765169695Skan                             __mf_report_leaks_fn, & count);
1766169695Skan  (void) mfsplay_tree_foreach (__mf_object_tree (__MF_TYPE_HEAP_I),
1767169695Skan                             __mf_report_leaks_fn, & count);
1768169695Skan
1769169695Skan  return count;
1770169695Skan}
1771169695Skan
1772169695Skan/* ------------------------------------------------------------------------ */
1773169695Skan/* __mf_report */
1774169695Skan
1775169695Skanvoid
1776169695Skan__mf_report ()
1777169695Skan{
1778169695Skan  LOCKTH ();
1779169695Skan  BEGIN_RECURSION_PROTECT ();
1780169695Skan  __mfu_report ();
1781169695Skan  END_RECURSION_PROTECT ();
1782169695Skan  UNLOCKTH ();
1783169695Skan}
1784169695Skan
1785169695Skanvoid
1786169695Skan__mfu_report ()
1787169695Skan{
1788169695Skan  if (__mf_opts.collect_stats)
1789169695Skan    {
1790169695Skan      fprintf (stderr,
1791169695Skan               "*******\n"
1792169695Skan               "mudflap stats:\n"
1793169695Skan               "calls to __mf_check: %lu\n"
1794169695Skan               "         __mf_register: %lu [%luB, %luB, %luB, %luB, %luB]\n"
1795169695Skan               "         __mf_unregister: %lu [%luB]\n"
1796169695Skan               "         __mf_violation: [%lu, %lu, %lu, %lu, %lu]\n",
1797169695Skan               __mf_count_check,
1798169695Skan               __mf_count_register,
1799169695Skan               __mf_total_register_size[0], __mf_total_register_size[1],
1800169695Skan               __mf_total_register_size[2], __mf_total_register_size[3],
1801169695Skan               __mf_total_register_size[4], /* XXX */
1802169695Skan               __mf_count_unregister, __mf_total_unregister_size,
1803169695Skan               __mf_count_violation[0], __mf_count_violation[1],
1804169695Skan               __mf_count_violation[2], __mf_count_violation[3],
1805169695Skan               __mf_count_violation[4]);
1806169695Skan
1807169695Skan      fprintf (stderr,
1808169695Skan               "calls with reentrancy: %lu\n", __mf_reentrancy);
1809169695Skan#ifdef LIBMUDFLAPTH
1810169695Skan      fprintf (stderr,
1811169695Skan               "           lock contention: %lu\n", __mf_lock_contention);
1812169695Skan#endif
1813169695Skan
1814169695Skan      /* Lookup cache stats.  */
1815169695Skan      {
1816169695Skan        unsigned i;
1817169695Skan        unsigned max_reuse = 0;
1818169695Skan        unsigned num_used = 0;
1819169695Skan        unsigned num_unused = 0;
1820169695Skan
1821169695Skan        for (i = 0; i < LOOKUP_CACHE_SIZE; i++)
1822169695Skan          {
1823169695Skan            if (__mf_lookup_cache_reusecount[i])
1824169695Skan              num_used ++;
1825169695Skan            else
1826169695Skan              num_unused ++;
1827169695Skan            if (max_reuse < __mf_lookup_cache_reusecount[i])
1828169695Skan              max_reuse = __mf_lookup_cache_reusecount[i];
1829169695Skan          }
1830169695Skan        fprintf (stderr, "lookup cache slots used: %u  unused: %u  peak-reuse: %u\n",
1831169695Skan                 num_used, num_unused, max_reuse);
1832169695Skan      }
1833169695Skan
1834169695Skan      {
1835169695Skan        unsigned live_count;
1836169695Skan        live_count = __mf_find_objects (MINPTR, MAXPTR, NULL, 0);
1837169695Skan        fprintf (stderr, "number of live objects: %u\n", live_count);
1838169695Skan      }
1839169695Skan
1840169695Skan      if (__mf_opts.persistent_count > 0)
1841169695Skan        {
1842169695Skan          unsigned dead_count = 0;
1843169695Skan          unsigned row, plot;
1844169695Skan          for (row = 0; row <= __MF_TYPE_MAX_CEM; row ++)
1845169695Skan            for (plot = 0 ; plot < __mf_opts.persistent_count; plot ++)
1846169695Skan              if (__mf_object_cemetary [row][plot] != 0)
1847169695Skan                dead_count ++;
1848169695Skan          fprintf (stderr, "          zombie objects: %u\n", dead_count);
1849169695Skan        }
1850169695Skan    }
1851169695Skan  if (__mf_opts.print_leaks && (__mf_opts.mudflap_mode == mode_check))
1852169695Skan    {
1853169695Skan      unsigned l;
1854169695Skan      extern void * __mf_wrap_alloca_indirect (size_t c);
1855169695Skan
1856169695Skan      /* Free up any remaining alloca()'d blocks.  */
1857169695Skan      __mf_wrap_alloca_indirect (0);
1858169695Skan      __mf_describe_object (NULL); /* Reset description epoch.  */
1859169695Skan      l = __mf_report_leaks ();
1860169695Skan      fprintf (stderr, "number of leaked objects: %u\n", l);
1861169695Skan    }
1862169695Skan}
1863169695Skan
1864169695Skan/* __mf_backtrace */
1865169695Skan
1866169695Skansize_t
1867169695Skan__mf_backtrace (char ***symbols, void *guess_pc, unsigned guess_omit_levels)
1868169695Skan{
1869169695Skan  void ** pc_array;
1870169695Skan  unsigned pc_array_size = __mf_opts.backtrace + guess_omit_levels;
1871169695Skan  unsigned remaining_size;
1872169695Skan  unsigned omitted_size = 0;
1873169695Skan  unsigned i;
1874169695Skan  DECLARE (void, free, void *ptr);
1875169695Skan  DECLARE (void *, calloc, size_t c, size_t n);
1876169695Skan  DECLARE (void *, malloc, size_t n);
1877169695Skan
1878169695Skan  pc_array = CALL_REAL (calloc, pc_array_size, sizeof (void *) );
1879169695Skan#ifdef HAVE_BACKTRACE
1880169695Skan  pc_array_size = backtrace (pc_array, pc_array_size);
1881169695Skan#else
1882169695Skan#define FETCH(n) do { if (pc_array_size >= n) { \
1883169695Skan                 pc_array[n] = __builtin_return_address(n); \
1884169695Skan                 if (pc_array[n] == 0) pc_array_size = n; } } while (0)
1885169695Skan
1886169695Skan  /* Unroll some calls __builtin_return_address because this function
1887169695Skan     only takes a literal integer parameter.  */
1888169695Skan  FETCH (0);
1889169695Skan#if 0
1890169695Skan  /* XXX: __builtin_return_address sometimes crashes (!) on >0 arguments,
1891169695Skan     rather than simply returning 0.  :-(  */
1892169695Skan  FETCH (1);
1893169695Skan  FETCH (2);
1894169695Skan  FETCH (3);
1895169695Skan  FETCH (4);
1896169695Skan  FETCH (5);
1897169695Skan  FETCH (6);
1898169695Skan  FETCH (7);
1899169695Skan  FETCH (8);
1900169695Skan  if (pc_array_size > 8) pc_array_size = 9;
1901169695Skan#else
1902169695Skan  if (pc_array_size > 0) pc_array_size = 1;
1903169695Skan#endif
1904169695Skan
1905169695Skan#undef FETCH
1906169695Skan#endif
1907169695Skan
1908169695Skan  /* We want to trim the first few levels of the stack traceback,
1909169695Skan     since they contain libmudflap wrappers and junk.  If pc_array[]
1910169695Skan     ends up containing a non-NULL guess_pc, then trim everything
1911169695Skan     before that.  Otherwise, omit the first guess_omit_levels
1912169695Skan     entries. */
1913169695Skan
1914169695Skan  if (guess_pc != NULL)
1915169695Skan    for (i=0; i<pc_array_size; i++)
1916169695Skan      if (pc_array [i] == guess_pc)
1917169695Skan        omitted_size = i;
1918169695Skan
1919169695Skan  if (omitted_size == 0) /* No match? */
1920169695Skan    if (pc_array_size > guess_omit_levels)
1921169695Skan      omitted_size = guess_omit_levels;
1922169695Skan
1923169695Skan  remaining_size = pc_array_size - omitted_size;
1924169695Skan
1925169695Skan#ifdef HAVE_BACKTRACE_SYMBOLS
1926169695Skan  *symbols = backtrace_symbols (pc_array + omitted_size, remaining_size);
1927169695Skan#else
1928169695Skan  {
1929169695Skan    /* Let's construct a buffer by hand.  It will have <remaining_size>
1930169695Skan       char*'s at the front, pointing at individual strings immediately
1931169695Skan       afterwards.  */
1932169695Skan    void *buffer;
1933169695Skan    char *chars;
1934169695Skan    char **pointers;
1935169695Skan    enum { perline = 30 };
1936169695Skan    buffer = CALL_REAL (malloc, remaining_size * (perline + sizeof(char *)));
1937169695Skan    pointers = (char **) buffer;
1938169695Skan    chars = (char *)buffer + (remaining_size * sizeof (char *));
1939169695Skan    for (i = 0; i < remaining_size; i++)
1940169695Skan      {
1941169695Skan        pointers[i] = chars;
1942169695Skan        sprintf (chars, "[0x%p]", pc_array [omitted_size + i]);
1943169695Skan        chars = chars + perline;
1944169695Skan      }
1945169695Skan    *symbols = pointers;
1946169695Skan  }
1947169695Skan#endif
1948169695Skan  CALL_REAL (free, pc_array);
1949169695Skan
1950169695Skan  return remaining_size;
1951169695Skan}
1952169695Skan
1953169695Skan/* ------------------------------------------------------------------------ */
1954169695Skan/* __mf_violation */
1955169695Skan
1956169695Skanvoid
1957169695Skan__mf_violation (void *ptr, size_t sz, uintptr_t pc,
1958169695Skan                const char *location, int type)
1959169695Skan{
1960169695Skan  char buf [128];
1961169695Skan  static unsigned violation_number;
1962169695Skan  DECLARE(void, free, void *ptr);
1963169695Skan
1964169695Skan  TRACE ("violation pc=%p location=%s type=%d ptr=%p size=%lu\n",
1965169695Skan         (void *) pc,
1966169695Skan         (location != NULL ? location : ""), type, ptr, (unsigned long) sz);
1967169695Skan
1968169695Skan  if (__mf_opts.collect_stats)
1969169695Skan    __mf_count_violation [(type < 0) ? 0 :
1970169695Skan                          (type > __MF_VIOL_WATCH) ? 0 :
1971169695Skan                          type] ++;
1972169695Skan
1973169695Skan  /* Print out a basic warning message.  */
1974169695Skan  if (__mf_opts.verbose_violations)
1975169695Skan  {
1976169695Skan    unsigned dead_p;
1977169695Skan    unsigned num_helpful = 0;
1978169695Skan    struct timeval now = { 0, 0 };
1979169695Skan#if HAVE_GETTIMEOFDAY
1980169695Skan    gettimeofday (& now, NULL);
1981169695Skan#endif
1982169695Skan
1983169695Skan    violation_number ++;
1984169695Skan    fprintf (stderr,
1985169695Skan             "*******\n"
1986169695Skan             "mudflap violation %u (%s): time=%lu.%06lu "
1987169695Skan             "ptr=%p size=%lu\npc=%p%s%s%s\n",
1988169695Skan             violation_number,
1989169695Skan             ((type == __MF_VIOL_READ) ? "check/read" :
1990169695Skan              (type == __MF_VIOL_WRITE) ? "check/write" :
1991169695Skan              (type == __MF_VIOL_REGISTER) ? "register" :
1992169695Skan              (type == __MF_VIOL_UNREGISTER) ? "unregister" :
1993169695Skan              (type == __MF_VIOL_WATCH) ? "watch" : "unknown"),
1994169695Skan             now.tv_sec, now.tv_usec,
1995169695Skan             (void *) ptr, (unsigned long)sz, (void *) pc,
1996169695Skan             (location != NULL ? " location=`" : ""),
1997169695Skan             (location != NULL ? location : ""),
1998169695Skan             (location != NULL ? "'" : ""));
1999169695Skan
2000169695Skan    if (__mf_opts.backtrace > 0)
2001169695Skan      {
2002169695Skan        char ** symbols;
2003169695Skan        unsigned i, num;
2004169695Skan
2005169695Skan        num = __mf_backtrace (& symbols, (void *) pc, 2);
2006169695Skan        /* Note: backtrace_symbols calls malloc().  But since we're in
2007169695Skan           __mf_violation and presumably __mf_check, it'll detect
2008169695Skan           recursion, and not put the new string into the database.  */
2009169695Skan
2010169695Skan        for (i=0; i<num; i++)
2011169695Skan          fprintf (stderr, "      %s\n", symbols[i]);
2012169695Skan
2013169695Skan        /* Calling free() here would trigger a violation.  */
2014169695Skan        CALL_REAL(free, symbols);
2015169695Skan      }
2016169695Skan
2017169695Skan
2018169695Skan    /* Look for nearby objects.  For this, we start with s_low/s_high
2019169695Skan       pointing to the given area, looking for overlapping objects.
2020169695Skan       If none show up, widen the search area and keep looking. */
2021169695Skan
2022169695Skan    if (sz == 0) sz = 1;
2023169695Skan
2024169695Skan    for (dead_p = 0; dead_p <= 1; dead_p ++) /* for dead_p in 0 1 */
2025169695Skan      {
2026169695Skan        enum {max_objs = 3}; /* magic */
2027169695Skan        __mf_object_t *objs[max_objs];
2028169695Skan        unsigned num_objs = 0;
2029169695Skan        uintptr_t s_low, s_high;
2030169695Skan        unsigned tries = 0;
2031169695Skan        unsigned i;
2032169695Skan
2033169695Skan        s_low = (uintptr_t) ptr;
2034169695Skan        s_high = CLAMPSZ (ptr, sz);
2035169695Skan
2036169695Skan        while (tries < 16) /* magic */
2037169695Skan          {
2038169695Skan            if (dead_p)
2039169695Skan              num_objs = __mf_find_dead_objects (s_low, s_high, objs, max_objs);
2040169695Skan            else
2041169695Skan              num_objs = __mf_find_objects (s_low, s_high, objs, max_objs);
2042169695Skan
2043169695Skan            if (num_objs) /* good enough */
2044169695Skan              break;
2045169695Skan
2046169695Skan            tries ++;
2047169695Skan
2048169695Skan            /* XXX: tune this search strategy.  It's too dependent on
2049169695Skan             sz, which can vary from 1 to very big (when array index
2050169695Skan             checking) numbers. */
2051169695Skan            s_low = CLAMPSUB (s_low, (sz * tries * tries));
2052169695Skan            s_high = CLAMPADD (s_high, (sz * tries * tries));
2053169695Skan          }
2054169695Skan
2055169695Skan        for (i = 0; i < min (num_objs, max_objs); i++)
2056169695Skan          {
2057169695Skan            __mf_object_t *obj = objs[i];
2058169695Skan            uintptr_t low = (uintptr_t) ptr;
2059169695Skan            uintptr_t high = CLAMPSZ (ptr, sz);
2060169695Skan            unsigned before1 = (low < obj->low) ? obj->low - low : 0;
2061169695Skan            unsigned after1 = (low > obj->high) ? low - obj->high : 0;
2062169695Skan            unsigned into1 = (high >= obj->low && low <= obj->high) ? low - obj->low : 0;
2063169695Skan            unsigned before2 = (high < obj->low) ? obj->low - high : 0;
2064169695Skan            unsigned after2 = (high > obj->high) ? high - obj->high : 0;
2065169695Skan            unsigned into2 = (high >= obj->low && low <= obj->high) ? high - obj->low : 0;
2066169695Skan
2067169695Skan            fprintf (stderr, "Nearby object %u: checked region begins %uB %s and ends %uB %s\n",
2068169695Skan                     num_helpful + i + 1,
2069169695Skan                     (before1 ? before1 : after1 ? after1 : into1),
2070169695Skan                     (before1 ? "before" : after1 ? "after" : "into"),
2071169695Skan                     (before2 ? before2 : after2 ? after2 : into2),
2072169695Skan                     (before2 ? "before" : after2 ? "after" : "into"));
2073169695Skan            __mf_describe_object (obj);
2074169695Skan          }
2075169695Skan        num_helpful += num_objs;
2076169695Skan      }
2077169695Skan
2078169695Skan    fprintf (stderr, "number of nearby objects: %u\n", num_helpful);
2079169695Skan  }
2080169695Skan
2081169695Skan  /* How to finally handle this violation?  */
2082169695Skan  switch (__mf_opts.violation_mode)
2083169695Skan    {
2084169695Skan    case viol_nop:
2085169695Skan      break;
2086169695Skan    case viol_segv:
2087169695Skan      kill (getpid(), SIGSEGV);
2088169695Skan      break;
2089169695Skan    case viol_abort:
2090169695Skan      abort ();
2091169695Skan      break;
2092169695Skan    case viol_gdb:
2093169695Skan
2094169695Skan      snprintf (buf, 128, "gdb --pid=%u", (unsigned) getpid ());
2095169695Skan      system (buf);
2096169695Skan      /* XXX: should probably fork() && sleep(GDB_WAIT_PARAMETER)
2097169695Skan      instead, and let the forked child execlp() gdb.  That way, this
2098169695Skan      subject process can be resumed under the supervision of gdb.
2099169695Skan      This can't happen now, since system() only returns when gdb
2100169695Skan      dies.  In that case, we need to beware of starting a second
2101169695Skan      concurrent gdb child upon the next violation.  (But if the first
2102169695Skan      gdb dies, then starting a new one is appropriate.)  */
2103169695Skan      break;
2104169695Skan    }
2105169695Skan}
2106169695Skan
2107169695Skan/* ------------------------------------------------------------------------ */
2108169695Skan
2109169695Skan
2110169695Skanunsigned __mf_watch (void *ptr, size_t sz)
2111169695Skan{
2112169695Skan  unsigned rc;
2113169695Skan  LOCKTH ();
2114169695Skan  BEGIN_RECURSION_PROTECT ();
2115169695Skan  rc = __mf_watch_or_not (ptr, sz, 1);
2116169695Skan  END_RECURSION_PROTECT ();
2117169695Skan  UNLOCKTH ();
2118169695Skan  return rc;
2119169695Skan}
2120169695Skan
2121169695Skanunsigned __mf_unwatch (void *ptr, size_t sz)
2122169695Skan{
2123169695Skan  unsigned rc;
2124169695Skan  LOCKTH ();
2125169695Skan  rc = __mf_watch_or_not (ptr, sz, 0);
2126169695Skan  UNLOCKTH ();
2127169695Skan  return rc;
2128169695Skan}
2129169695Skan
2130169695Skan
2131169695Skanstatic unsigned
2132169695Skan__mf_watch_or_not (void *ptr, size_t sz, char flag)
2133169695Skan{
2134169695Skan  uintptr_t ptr_high = CLAMPSZ (ptr, sz);
2135169695Skan  uintptr_t ptr_low = (uintptr_t) ptr;
2136169695Skan  unsigned count = 0;
2137169695Skan
2138169695Skan  TRACE ("%s ptr=%p size=%lu\n",
2139169695Skan         (flag ? "watch" : "unwatch"), ptr, (unsigned long) sz);
2140169695Skan
2141169695Skan  switch (__mf_opts.mudflap_mode)
2142169695Skan    {
2143169695Skan    case mode_nop:
2144169695Skan    case mode_populate:
2145169695Skan    case mode_violate:
2146169695Skan      count = 0;
2147169695Skan      break;
2148169695Skan
2149169695Skan    case mode_check:
2150169695Skan      {
2151169695Skan        __mf_object_t **all_ovr_objs;
2152169695Skan        unsigned obj_count;
2153169695Skan        unsigned n;
2154169695Skan        DECLARE (void *, malloc, size_t c);
2155169695Skan        DECLARE (void, free, void *p);
2156169695Skan
2157169695Skan        obj_count = __mf_find_objects (ptr_low, ptr_high, NULL, 0);
2158169695Skan        VERBOSE_TRACE (" %u:", obj_count);
2159169695Skan
2160169695Skan        all_ovr_objs = CALL_REAL (malloc, (sizeof (__mf_object_t *) * obj_count));
2161169695Skan        if (all_ovr_objs == NULL) abort ();
2162169695Skan        n = __mf_find_objects (ptr_low, ptr_high, all_ovr_objs, obj_count);
2163169695Skan        assert (n == obj_count);
2164169695Skan
2165169695Skan        for (n = 0; n < obj_count; n ++)
2166169695Skan          {
2167169695Skan            __mf_object_t *obj = all_ovr_objs[n];
2168169695Skan
2169169695Skan            VERBOSE_TRACE (" [%p]", (void *) obj);
2170169695Skan            if (obj->watching_p != flag)
2171169695Skan              {
2172169695Skan                obj->watching_p = flag;
2173169695Skan                count ++;
2174169695Skan
2175169695Skan                /* Remove object from cache, to ensure next access
2176169695Skan                   goes through __mf_check().  */
2177169695Skan                if (flag)
2178169695Skan                  __mf_uncache_object (obj);
2179169695Skan              }
2180169695Skan          }
2181169695Skan        CALL_REAL (free, all_ovr_objs);
2182169695Skan      }
2183169695Skan      break;
2184169695Skan    }
2185169695Skan
2186169695Skan  return count;
2187169695Skan}
2188169695Skan
2189169695Skan
2190169695Skanvoid
2191169695Skan__mf_sigusr1_handler (int num)
2192169695Skan{
2193169695Skan  __mf_sigusr1_received ++;
2194169695Skan}
2195169695Skan
2196169695Skan/* Install or remove SIGUSR1 handler as necessary.
2197169695Skan   Also, respond to a received pending SIGUSR1.  */
2198169695Skanvoid
2199169695Skan__mf_sigusr1_respond ()
2200169695Skan{
2201169695Skan  static int handler_installed;
2202169695Skan
2203169695Skan#ifdef SIGUSR1
2204169695Skan  /* Manage handler */
2205169695Skan  if (__mf_opts.sigusr1_report && ! handler_installed)
2206169695Skan    {
2207169695Skan      signal (SIGUSR1, __mf_sigusr1_handler);
2208169695Skan      handler_installed = 1;
2209169695Skan    }
2210169695Skan  else if(! __mf_opts.sigusr1_report && handler_installed)
2211169695Skan    {
2212169695Skan      signal (SIGUSR1, SIG_DFL);
2213169695Skan      handler_installed = 0;
2214169695Skan    }
2215169695Skan#endif
2216169695Skan
2217169695Skan  /* Manage enqueued signals */
2218169695Skan  if (__mf_sigusr1_received > __mf_sigusr1_handled)
2219169695Skan    {
2220169695Skan      __mf_sigusr1_handled ++;
2221169695Skan      assert (__mf_get_state () == reentrant);
2222169695Skan      __mfu_report ();
2223169695Skan      handler_installed = 0; /* We may need to re-enable signal; this might be a SysV library. */
2224169695Skan    }
2225169695Skan}
2226169695Skan
2227169695Skan
2228169695Skan/* XXX: provide an alternative __assert_fail function that cannot
2229169695Skan   fail due to libmudflap infinite recursion.  */
2230169695Skan#ifndef NDEBUG
2231169695Skan
2232169695Skanstatic void
2233169695Skanwrite_itoa (int fd, unsigned n)
2234169695Skan{
2235169695Skan  enum x { bufsize = sizeof(n)*4 };
2236169695Skan  char buf [bufsize];
2237169695Skan  unsigned i;
2238169695Skan
2239169695Skan  for (i=0; i<bufsize-1; i++)
2240169695Skan    {
2241169695Skan      unsigned digit = n % 10;
2242169695Skan      buf[bufsize-2-i] = digit + '0';
2243169695Skan      n /= 10;
2244169695Skan      if (n == 0)
2245169695Skan        {
2246169695Skan          char *m = & buf [bufsize-2-i];
2247169695Skan          buf[bufsize-1] = '\0';
2248169695Skan          write (fd, m, strlen(m));
2249169695Skan          break;
2250169695Skan        }
2251169695Skan    }
2252169695Skan}
2253169695Skan
2254169695Skan
2255169695Skanvoid
2256169695Skan__assert_fail (const char *msg, const char *file, unsigned line, const char *func)
2257169695Skan{
2258169695Skan#define write2(string) write (2, (string), strlen ((string)));
2259169695Skan  write2("mf");
2260169695Skan#ifdef LIBMUDFLAPTH
2261169695Skan  write2("(");
2262169695Skan  write_itoa (2, (unsigned) pthread_self ());
2263169695Skan  write2(")");
2264169695Skan#endif
2265169695Skan  write2(": assertion failure: `");
2266169695Skan  write (2, msg, strlen (msg));
2267169695Skan  write2("' in ");
2268169695Skan  write (2, func, strlen (func));
2269169695Skan  write2(" at ");
2270169695Skan  write (2, file, strlen (file));
2271169695Skan  write2(":");
2272169695Skan  write_itoa (2, line);
2273169695Skan  write2("\n");
2274169695Skan#undef write2
2275169695Skan  abort ();
2276169695Skan}
2277169695Skan
2278169695Skan
2279169695Skan#endif
2280169695Skan
2281169695Skan
2282169695Skan
2283169695Skan/* Adapted splay tree code, originally from libiberty.  It has been
2284169695Skan   specialized for libmudflap as requested by RMS.  */
2285169695Skan
2286169695Skanstatic void
2287169695Skanmfsplay_tree_free (void *p)
2288169695Skan{
2289169695Skan  DECLARE (void, free, void *p);
2290169695Skan  CALL_REAL (free, p);
2291169695Skan}
2292169695Skan
2293169695Skanstatic void *
2294169695Skanmfsplay_tree_xmalloc (size_t s)
2295169695Skan{
2296169695Skan  DECLARE (void *, malloc, size_t s);
2297169695Skan  return CALL_REAL (malloc, s);
2298169695Skan}
2299169695Skan
2300169695Skan
2301169695Skanstatic void mfsplay_tree_splay (mfsplay_tree, mfsplay_tree_key);
2302169695Skanstatic mfsplay_tree_node mfsplay_tree_splay_helper (mfsplay_tree,
2303169695Skan                                                mfsplay_tree_key,
2304169695Skan                                                mfsplay_tree_node *,
2305169695Skan                                                mfsplay_tree_node *,
2306169695Skan                                                mfsplay_tree_node *);
2307169695Skan
2308169695Skan
2309169695Skan/* Help splay SP around KEY.  PARENT and GRANDPARENT are the parent
2310169695Skan   and grandparent, respectively, of NODE.  */
2311169695Skan
2312169695Skanstatic mfsplay_tree_node
2313169695Skanmfsplay_tree_splay_helper (mfsplay_tree sp,
2314169695Skan                         mfsplay_tree_key key,
2315169695Skan                         mfsplay_tree_node * node,
2316169695Skan                         mfsplay_tree_node * parent,
2317169695Skan                         mfsplay_tree_node * grandparent)
2318169695Skan{
2319169695Skan  mfsplay_tree_node *next;
2320169695Skan  mfsplay_tree_node n;
2321169695Skan  int comparison;
2322169695Skan
2323169695Skan  n = *node;
2324169695Skan
2325169695Skan  if (!n)
2326169695Skan    return *parent;
2327169695Skan
2328169695Skan  comparison = ((key > n->key) ? 1 : ((key < n->key) ? -1 : 0));
2329169695Skan
2330169695Skan  if (comparison == 0)
2331169695Skan    /* We've found the target.  */
2332169695Skan    next = 0;
2333169695Skan  else if (comparison < 0)
2334169695Skan    /* The target is to the left.  */
2335169695Skan    next = &n->left;
2336169695Skan  else
2337169695Skan    /* The target is to the right.  */
2338169695Skan    next = &n->right;
2339169695Skan
2340169695Skan  if (next)
2341169695Skan    {
2342169695Skan      /* Check whether our recursion depth is too high.  Abort this search,
2343169695Skan         and signal that a rebalance is required to continue.  */
2344169695Skan      if (sp->depth > sp->max_depth)
2345169695Skan        {
2346169695Skan          sp->rebalance_p = 1;
2347169695Skan          return n;
2348169695Skan         }
2349169695Skan
2350169695Skan      /* Continue down the tree.  */
2351169695Skan      sp->depth ++;
2352169695Skan      n = mfsplay_tree_splay_helper (sp, key, next, node, parent);
2353169695Skan      sp->depth --;
2354169695Skan
2355169695Skan      /* The recursive call will change the place to which NODE
2356169695Skan         points.  */
2357169695Skan      if (*node != n || sp->rebalance_p)
2358169695Skan        return n;
2359169695Skan    }
2360169695Skan
2361169695Skan  if (!parent)
2362169695Skan    /* NODE is the root.  We are done.  */
2363169695Skan    return n;
2364169695Skan
2365169695Skan  /* First, handle the case where there is no grandparent (i.e.,
2366169695Skan   *PARENT is the root of the tree.)  */
2367169695Skan  if (!grandparent)
2368169695Skan    {
2369169695Skan      if (n == (*parent)->left)
2370169695Skan        {
2371169695Skan          *node = n->right;
2372169695Skan          n->right = *parent;
2373169695Skan        }
2374169695Skan      else
2375169695Skan        {
2376169695Skan          *node = n->left;
2377169695Skan          n->left = *parent;
2378169695Skan        }
2379169695Skan      *parent = n;
2380169695Skan      return n;
2381169695Skan    }
2382169695Skan
2383169695Skan  /* Next handle the cases where both N and *PARENT are left children,
2384169695Skan     or where both are right children.  */
2385169695Skan  if (n == (*parent)->left && *parent == (*grandparent)->left)
2386169695Skan    {
2387169695Skan      mfsplay_tree_node p = *parent;
2388169695Skan
2389169695Skan      (*grandparent)->left = p->right;
2390169695Skan      p->right = *grandparent;
2391169695Skan      p->left = n->right;
2392169695Skan      n->right = p;
2393169695Skan      *grandparent = n;
2394169695Skan      return n;
2395169695Skan    }
2396169695Skan  else if (n == (*parent)->right && *parent == (*grandparent)->right)
2397169695Skan    {
2398169695Skan      mfsplay_tree_node p = *parent;
2399169695Skan
2400169695Skan      (*grandparent)->right = p->left;
2401169695Skan      p->left = *grandparent;
2402169695Skan      p->right = n->left;
2403169695Skan      n->left = p;
2404169695Skan      *grandparent = n;
2405169695Skan      return n;
2406169695Skan    }
2407169695Skan
2408169695Skan  /* Finally, deal with the case where N is a left child, but *PARENT
2409169695Skan     is a right child, or vice versa.  */
2410169695Skan  if (n == (*parent)->left)
2411169695Skan    {
2412169695Skan      (*parent)->left = n->right;
2413169695Skan      n->right = *parent;
2414169695Skan      (*grandparent)->right = n->left;
2415169695Skan      n->left = *grandparent;
2416169695Skan      *grandparent = n;
2417169695Skan      return n;
2418169695Skan    }
2419169695Skan  else
2420169695Skan    {
2421169695Skan      (*parent)->right = n->left;
2422169695Skan      n->left = *parent;
2423169695Skan      (*grandparent)->left = n->right;
2424169695Skan      n->right = *grandparent;
2425169695Skan      *grandparent = n;
2426169695Skan      return n;
2427169695Skan    }
2428169695Skan}
2429169695Skan
2430169695Skan
2431169695Skan
2432169695Skanstatic int
2433169695Skanmfsplay_tree_rebalance_helper1 (mfsplay_tree_node n, void *array_ptr)
2434169695Skan{
2435169695Skan  mfsplay_tree_node **p = array_ptr;
2436169695Skan  *(*p) = n;
2437169695Skan  (*p)++;
2438169695Skan  return 0;
2439169695Skan}
2440169695Skan
2441169695Skan
2442169695Skanstatic mfsplay_tree_node
2443169695Skanmfsplay_tree_rebalance_helper2 (mfsplay_tree_node * array, unsigned low,
2444169695Skan                              unsigned high)
2445169695Skan{
2446169695Skan  unsigned middle = low + (high - low) / 2;
2447169695Skan  mfsplay_tree_node n = array[middle];
2448169695Skan
2449169695Skan  /* Note that since we're producing a balanced binary tree, it is not a problem
2450169695Skan     that this function is recursive.  */
2451169695Skan  if (low + 1 <= middle)
2452169695Skan    n->left = mfsplay_tree_rebalance_helper2 (array, low, middle - 1);
2453169695Skan  else
2454169695Skan    n->left = NULL;
2455169695Skan
2456169695Skan  if (middle + 1 <= high)
2457169695Skan    n->right = mfsplay_tree_rebalance_helper2 (array, middle + 1, high);
2458169695Skan  else
2459169695Skan    n->right = NULL;
2460169695Skan
2461169695Skan  return n;
2462169695Skan}
2463169695Skan
2464169695Skan
2465169695Skan/* Rebalance the entire tree.  Do this by copying all the node
2466169695Skan   pointers into an array, then cleverly re-linking them.  */
2467169695Skanstatic void
2468169695Skanmfsplay_tree_rebalance (mfsplay_tree sp)
2469169695Skan{
2470169695Skan  mfsplay_tree_node *all_nodes, *all_nodes_1;
2471169695Skan
2472169695Skan  if (sp->num_keys <= 2)
2473169695Skan    return;
2474169695Skan
2475169695Skan  all_nodes = mfsplay_tree_xmalloc (sizeof (mfsplay_tree_node) * sp->num_keys);
2476169695Skan
2477169695Skan  /* Traverse all nodes to copy their addresses into this array.  */
2478169695Skan  all_nodes_1 = all_nodes;
2479169695Skan  mfsplay_tree_foreach (sp, mfsplay_tree_rebalance_helper1,
2480169695Skan                      (void *) &all_nodes_1);
2481169695Skan
2482169695Skan  /* Relink all the nodes.  */
2483169695Skan  sp->root = mfsplay_tree_rebalance_helper2 (all_nodes, 0, sp->num_keys - 1);
2484169695Skan
2485169695Skan  mfsplay_tree_free (all_nodes);
2486169695Skan}
2487169695Skan
2488169695Skan
2489169695Skan/* Splay SP around KEY.  */
2490169695Skanstatic void
2491169695Skanmfsplay_tree_splay (mfsplay_tree sp, mfsplay_tree_key key)
2492169695Skan{
2493169695Skan  if (sp->root == 0)
2494169695Skan    return;
2495169695Skan
2496169695Skan  /* If we just splayed the tree with the same key, do nothing.  */
2497169695Skan  if (sp->last_splayed_key_p &&
2498169695Skan      (sp->last_splayed_key == key))
2499169695Skan    return;
2500169695Skan
2501169695Skan  /* Compute a maximum recursion depth for a splay tree with NUM nodes.
2502169695Skan     The idea is to limit excessive stack usage if we're facing
2503169695Skan     degenerate access patterns.  Unfortunately such patterns can occur
2504169695Skan     e.g. during static initialization, where many static objects might
2505169695Skan     be registered in increasing address sequence, or during a case where
2506169695Skan     large tree-like heap data structures are allocated quickly.
2507169695Skan
2508169695Skan     On x86, this corresponds to roughly 200K of stack usage.
2509169695Skan     XXX: For libmudflapth, this could be a function of __mf_opts.thread_stack.  */
2510169695Skan  sp->max_depth = 2500;
2511169695Skan  sp->rebalance_p = sp->depth = 0;
2512169695Skan
2513169695Skan  mfsplay_tree_splay_helper (sp, key, &sp->root, NULL, NULL);
2514169695Skan  if (sp->rebalance_p)
2515169695Skan    {
2516169695Skan      mfsplay_tree_rebalance (sp);
2517169695Skan
2518169695Skan      sp->rebalance_p = sp->depth = 0;
2519169695Skan      mfsplay_tree_splay_helper (sp, key, &sp->root, NULL, NULL);
2520169695Skan
2521169695Skan      if (sp->rebalance_p)
2522169695Skan        abort ();
2523169695Skan    }
2524169695Skan
2525169695Skan
2526169695Skan  /* Cache this splay key. */
2527169695Skan  sp->last_splayed_key = key;
2528169695Skan  sp->last_splayed_key_p = 1;
2529169695Skan}
2530169695Skan
2531169695Skan
2532169695Skan
2533169695Skan/* Allocate a new splay tree.  */
2534169695Skanstatic mfsplay_tree
2535169695Skanmfsplay_tree_new ()
2536169695Skan{
2537169695Skan  mfsplay_tree sp = mfsplay_tree_xmalloc (sizeof (struct mfsplay_tree_s));
2538169695Skan  sp->root = NULL;
2539169695Skan  sp->last_splayed_key_p = 0;
2540169695Skan  sp->num_keys = 0;
2541169695Skan
2542169695Skan  return sp;
2543169695Skan}
2544169695Skan
2545169695Skan
2546169695Skan
2547169695Skan/* Insert a new node (associating KEY with DATA) into SP.  If a
2548169695Skan   previous node with the indicated KEY exists, its data is replaced
2549169695Skan   with the new value.  Returns the new node.  */
2550169695Skanstatic mfsplay_tree_node
2551169695Skanmfsplay_tree_insert (mfsplay_tree sp, mfsplay_tree_key key, mfsplay_tree_value value)
2552169695Skan{
2553169695Skan  int comparison = 0;
2554169695Skan
2555169695Skan  mfsplay_tree_splay (sp, key);
2556169695Skan
2557169695Skan  if (sp->root)
2558169695Skan    comparison = ((sp->root->key > key) ? 1 :
2559169695Skan                  ((sp->root->key < key) ? -1 : 0));
2560169695Skan
2561169695Skan  if (sp->root && comparison == 0)
2562169695Skan    {
2563169695Skan      /* If the root of the tree already has the indicated KEY, just
2564169695Skan         replace the value with VALUE.  */
2565169695Skan      sp->root->value = value;
2566169695Skan    }
2567169695Skan  else
2568169695Skan    {
2569169695Skan      /* Create a new node, and insert it at the root.  */
2570169695Skan      mfsplay_tree_node node;
2571169695Skan
2572169695Skan      node = mfsplay_tree_xmalloc (sizeof (struct mfsplay_tree_node_s));
2573169695Skan      node->key = key;
2574169695Skan      node->value = value;
2575169695Skan      sp->num_keys++;
2576169695Skan      if (!sp->root)
2577169695Skan        node->left = node->right = 0;
2578169695Skan      else if (comparison < 0)
2579169695Skan        {
2580169695Skan          node->left = sp->root;
2581169695Skan          node->right = node->left->right;
2582169695Skan          node->left->right = 0;
2583169695Skan        }
2584169695Skan      else
2585169695Skan        {
2586169695Skan          node->right = sp->root;
2587169695Skan          node->left = node->right->left;
2588169695Skan          node->right->left = 0;
2589169695Skan        }
2590169695Skan
2591169695Skan      sp->root = node;
2592169695Skan      sp->last_splayed_key_p = 0;
2593169695Skan    }
2594169695Skan
2595169695Skan  return sp->root;
2596169695Skan}
2597169695Skan
2598169695Skan/* Remove KEY from SP.  It is not an error if it did not exist.  */
2599169695Skan
2600169695Skanstatic void
2601169695Skanmfsplay_tree_remove (mfsplay_tree sp, mfsplay_tree_key key)
2602169695Skan{
2603169695Skan  mfsplay_tree_splay (sp, key);
2604169695Skan  sp->last_splayed_key_p = 0;
2605169695Skan  if (sp->root && (sp->root->key == key))
2606169695Skan    {
2607169695Skan      mfsplay_tree_node left, right;
2608169695Skan      left = sp->root->left;
2609169695Skan      right = sp->root->right;
2610169695Skan      /* Delete the root node itself.  */
2611169695Skan      mfsplay_tree_free (sp->root);
2612169695Skan      sp->num_keys--;
2613169695Skan      /* One of the children is now the root.  Doesn't matter much
2614169695Skan         which, so long as we preserve the properties of the tree.  */
2615169695Skan      if (left)
2616169695Skan        {
2617169695Skan          sp->root = left;
2618169695Skan          /* If there was a right child as well, hang it off the
2619169695Skan             right-most leaf of the left child.  */
2620169695Skan          if (right)
2621169695Skan            {
2622169695Skan              while (left->right)
2623169695Skan                left = left->right;
2624169695Skan              left->right = right;
2625169695Skan            }
2626169695Skan        }
2627169695Skan      else
2628169695Skan        sp->root = right;
2629169695Skan    }
2630169695Skan}
2631169695Skan
2632169695Skan/* Lookup KEY in SP, returning VALUE if present, and NULL
2633169695Skan   otherwise.  */
2634169695Skan
2635169695Skanstatic mfsplay_tree_node
2636169695Skanmfsplay_tree_lookup (mfsplay_tree sp, mfsplay_tree_key key)
2637169695Skan{
2638169695Skan  mfsplay_tree_splay (sp, key);
2639169695Skan  if (sp->root && (sp->root->key == key))
2640169695Skan    return sp->root;
2641169695Skan  else
2642169695Skan    return 0;
2643169695Skan}
2644169695Skan
2645169695Skan
2646169695Skan/* Return the immediate predecessor KEY, or NULL if there is no
2647169695Skan   predecessor.  KEY need not be present in the tree.  */
2648169695Skan
2649169695Skanstatic mfsplay_tree_node
2650169695Skanmfsplay_tree_predecessor (mfsplay_tree sp, mfsplay_tree_key key)
2651169695Skan{
2652169695Skan  int comparison;
2653169695Skan  mfsplay_tree_node node;
2654169695Skan  /* If the tree is empty, there is certainly no predecessor.  */
2655169695Skan  if (!sp->root)
2656169695Skan    return NULL;
2657169695Skan  /* Splay the tree around KEY.  That will leave either the KEY
2658169695Skan     itself, its predecessor, or its successor at the root.  */
2659169695Skan  mfsplay_tree_splay (sp, key);
2660169695Skan  comparison = ((sp->root->key > key) ? 1 :
2661169695Skan                ((sp->root->key < key) ? -1 : 0));
2662169695Skan
2663169695Skan  /* If the predecessor is at the root, just return it.  */
2664169695Skan  if (comparison < 0)
2665169695Skan    return sp->root;
2666169695Skan  /* Otherwise, find the rightmost element of the left subtree.  */
2667169695Skan  node = sp->root->left;
2668169695Skan  if (node)
2669169695Skan    while (node->right)
2670169695Skan      node = node->right;
2671169695Skan  return node;
2672169695Skan}
2673169695Skan
2674169695Skan/* Return the immediate successor KEY, or NULL if there is no
2675169695Skan   successor.  KEY need not be present in the tree.  */
2676169695Skan
2677169695Skanstatic mfsplay_tree_node
2678169695Skanmfsplay_tree_successor (mfsplay_tree sp, mfsplay_tree_key key)
2679169695Skan{
2680169695Skan  int comparison;
2681169695Skan  mfsplay_tree_node node;
2682169695Skan  /* If the tree is empty, there is certainly no successor.  */
2683169695Skan  if (!sp->root)
2684169695Skan    return NULL;
2685169695Skan  /* Splay the tree around KEY.  That will leave either the KEY
2686169695Skan     itself, its predecessor, or its successor at the root.  */
2687169695Skan  mfsplay_tree_splay (sp, key);
2688169695Skan  comparison = ((sp->root->key > key) ? 1 :
2689169695Skan                ((sp->root->key < key) ? -1 : 0));
2690169695Skan  /* If the successor is at the root, just return it.  */
2691169695Skan  if (comparison > 0)
2692169695Skan    return sp->root;
2693169695Skan  /* Otherwise, find the leftmost element of the right subtree.  */
2694169695Skan  node = sp->root->right;
2695169695Skan  if (node)
2696169695Skan    while (node->left)
2697169695Skan      node = node->left;
2698169695Skan  return node;
2699169695Skan}
2700169695Skan
2701169695Skan/* Call FN, passing it the DATA, for every node in SP, following an
2702169695Skan   in-order traversal.  If FN every returns a non-zero value, the
2703169695Skan   iteration ceases immediately, and the value is returned.
2704169695Skan   Otherwise, this function returns 0.
2705169695Skan
2706169695Skan   This function simulates recursion using dynamically allocated
2707169695Skan   arrays, since it may be called from mfsplay_tree_rebalance(), which
2708169695Skan   in turn means that the tree is already uncomfortably deep for stack
2709169695Skan   space limits.  */
2710169695Skanstatic int
2711169695Skanmfsplay_tree_foreach (mfsplay_tree st, mfsplay_tree_foreach_fn fn, void *data)
2712169695Skan{
2713169695Skan  mfsplay_tree_node *stack1;
2714169695Skan  char *stack2;
2715169695Skan  unsigned sp;
2716169695Skan  int val = 0;
2717169695Skan  enum s { s_left, s_here, s_right, s_up };
2718169695Skan
2719169695Skan  if (st->root == NULL) /* => num_keys == 0 */
2720169695Skan    return 0;
2721169695Skan
2722169695Skan  stack1 = mfsplay_tree_xmalloc (sizeof (mfsplay_tree_node) * st->num_keys);
2723169695Skan  stack2 = mfsplay_tree_xmalloc (sizeof (char) * st->num_keys);
2724169695Skan
2725169695Skan  sp = 0;
2726169695Skan  stack1 [sp] = st->root;
2727169695Skan  stack2 [sp] = s_left;
2728169695Skan
2729169695Skan  while (1)
2730169695Skan    {
2731169695Skan      mfsplay_tree_node n;
2732169695Skan      enum s s;
2733169695Skan
2734169695Skan      n = stack1 [sp];
2735169695Skan      s = stack2 [sp];
2736169695Skan
2737169695Skan      /* Handle each of the four possible states separately.  */
2738169695Skan
2739169695Skan      /* 1: We're here to traverse the left subtree (if any).  */
2740169695Skan      if (s == s_left)
2741169695Skan        {
2742169695Skan          stack2 [sp] = s_here;
2743169695Skan          if (n->left != NULL)
2744169695Skan            {
2745169695Skan              sp ++;
2746169695Skan              stack1 [sp] = n->left;
2747169695Skan              stack2 [sp] = s_left;
2748169695Skan            }
2749169695Skan        }
2750169695Skan
2751169695Skan      /* 2: We're here to traverse this node.  */
2752169695Skan      else if (s == s_here)
2753169695Skan        {
2754169695Skan          stack2 [sp] = s_right;
2755169695Skan          val = (*fn) (n, data);
2756169695Skan          if (val) break;
2757169695Skan        }
2758169695Skan
2759169695Skan      /* 3: We're here to traverse the right subtree (if any).  */
2760169695Skan      else if (s == s_right)
2761169695Skan        {
2762169695Skan          stack2 [sp] = s_up;
2763169695Skan          if (n->right != NULL)
2764169695Skan            {
2765169695Skan              sp ++;
2766169695Skan              stack1 [sp] = n->right;
2767169695Skan              stack2 [sp] = s_left;
2768169695Skan            }
2769169695Skan        }
2770169695Skan
2771169695Skan      /* 4: We're here after both subtrees (if any) have been traversed.  */
2772169695Skan      else if (s == s_up)
2773169695Skan        {
2774169695Skan          /* Pop the stack.  */
2775169695Skan          if (sp == 0) break; /* Popping off the root note: we're finished!  */
2776169695Skan          sp --;
2777169695Skan        }
2778169695Skan
2779169695Skan      else
2780169695Skan        abort ();
2781169695Skan    }
2782169695Skan
2783169695Skan  mfsplay_tree_free (stack1);
2784169695Skan  mfsplay_tree_free (stack2);
2785169695Skan  return val;
2786169695Skan}
2787