1/* 2 3 Copyright (C) 2000,2002,2004,2005 Silicon Graphics, Inc. All Rights Reserved. 4 5 This program is free software; you can redistribute it and/or modify it 6 under the terms of version 2.1 of the GNU Lesser General Public License 7 as published by the Free Software Foundation. 8 9 This program is distributed in the hope that it would be useful, but 10 WITHOUT ANY WARRANTY; without even the implied warranty of 11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. 12 13 Further, this software is distributed without any warranty that it is 14 free of the rightful claim of any third person regarding infringement 15 or the like. Any license provided herein, whether implied or 16 otherwise, applies only to this software file. Patent licenses, if 17 any, provided herein do not apply to combinations of this program with 18 other software, or any other product whatsoever. 19 20 You should have received a copy of the GNU Lesser General Public 21 License along with this program; if not, write the Free Software 22 Foundation, Inc., 59 Temple Place - Suite 330, Boston MA 02111-1307, 23 USA. 24 25 Contact information: Silicon Graphics, Inc., 1500 Crittenden Lane, 26 Mountain View, CA 94043, or: 27 28 http://www.sgi.com 29 30 For further information regarding this notice, see: 31 32 http://oss.sgi.com/projects/GenInfo/NoticeExplan 33 34*/ 35#undef DEBUG 36 37#include "config.h" 38#include "dwarf_incl.h" 39#include <sys/types.h> 40 41#include <stdlib.h> 42#include <stdio.h> 43#include "malloc_check.h" 44 45/* 46 These files are included to get the sizes 47 of structs to set the ah_bytes_one_struct field 48 of the Dwarf_Alloc_Hdr_s structs for each 49 allocation type. 50*/ 51#include "dwarf_line.h" 52#include "dwarf_global.h" 53#include "dwarf_arange.h" 54#include "dwarf_abbrev.h" 55#include "dwarf_die_deliv.h" 56#include "dwarf_frame.h" 57#include "dwarf_loc.h" 58#include "dwarf_funcs.h" 59#include "dwarf_types.h" 60#include "dwarf_vars.h" 61#include "dwarf_weaks.h" 62 63static void _dwarf_free_special_error(Dwarf_Ptr space); 64 65#ifdef DWARF_SIMPLE_MALLOC 66static void _dwarf_simple_malloc_add_to_list(Dwarf_Debug dbg, 67 Dwarf_Ptr addr, 68 unsigned long size, 69 short alloc_type); 70static void _dwarf_simple_malloc_delete_from_list(Dwarf_Debug dbg, 71 Dwarf_Ptr space, 72 short alloc_type); 73void _dwarf_simple_malloc_botch(int err); 74 75#endif /* DWARF_SIMPLE_MALLOC */ 76 77 78 79 80/* 81 This macro adds the size of a pointer to the size of a 82 struct that is given to it. It rounds up the size to 83 be a multiple of the size of a pointer. This is done 84 so that every struct returned by _dwarf_get_alloc() 85 can be preceded by a pointer to the chunk it came from. 86 Before allocating, it checks if the size of struct is less than 87 the size of a pointer. If yes, it returns the size 88 of 2 pointers. The returned size should be at least 89 the size of 2 pointers, since the first points to the 90 chunk the struct was allocated from, and the second 91 is used to link the free list. 92 93 We want DW_RESERVE to be at least the size of 94 a long long and at least the size of a pointer because 95 our struct has a long long and we want that aligned right. 96 Now Standard C defines long long as 8 bytes, so lets 97 make that standard. It will become unworkable when 98 long long or pointer grows beyound 8 bytes. 99 Unclear what to do with wierd requirements, like 100 36 bit pointers. 101 102 103*/ 104#define DW_RESERVE 8 105 106/* Round size up to the next multiple of DW_RESERVE bytes 107*/ 108#define ROUND_SIZE(inputsize) \ 109 (((inputsize) % (DW_RESERVE)) == 0 ? \ 110 (inputsize): \ 111 ((inputsize) + \ 112 (DW_RESERVE) - ((inputsize) % (DW_RESERVE)) )) 113 114#define ROUND_SIZE_WITH_POINTER(i_size) (ROUND_SIZE(i_size) + DW_RESERVE) 115 116/* SMALL_ALLOC is for trivia where allocation is a waste. 117 Things that should be removed, really. */ 118#define SMALL_ALLOC 2 119 120/* BASE_ALLOC is where a basic allocation makes sense, but 'not too large'. 121 No thorough evaluation of this value has been done, though 122 it was found wasteful of memory to have BASE_ALLOC be as large as 123 BIG_ALLOC. */ 124#define BASE_ALLOC 64 125 126/* BIG_ALLOC is where a larger-than-BASE_ALLOC 127 allocation makes sense, but still 'not too large'. 128 No thorough evaluation of this value has been done. */ 129#define BIG_ALLOC 128 130 131/* This translates into de_alloc_hdr index 132** the 0,1,1 entries are special: they don't use the 133** table values at all. 134** Rearranging the DW_DLA values would break binary compatibility 135** so that is not an option. 136*/ 137struct ial_s { 138 int ia_al_num; /* Index into de_alloc_hdr table. */ 139 140 /* In bytes, one struct instance. This does not account for extra 141 space needed per block, but that (DW_RESERVE) will be added in 142 later where it is needed (DW_RESERVE space never added in here). 143 */ 144 int ia_struct_size; 145 146 147 /* Number of instances per alloc block. MUST be > 0. */ 148 int ia_base_count; 149 150 int (*specialconstructor) (Dwarf_Debug, void *); 151 void (*specialdestructor) (void *); 152}; 153 154static const 155struct ial_s index_into_allocated[ALLOC_AREA_INDEX_TABLE_MAX] = { 156 {0, 1, 1, 0, 0}, /* none */ 157 {0, 1, 1, 0, 0}, /* 1 DW_DLA_STRING */ 158 {1, sizeof(Dwarf_Loc), BASE_ALLOC, 0, 0} 159 , /* 2 DW_DLA_LOC */ 160 {2, sizeof(Dwarf_Locdesc), BASE_ALLOC, 0, 0} 161 , /* 3 DW_DLA_LOCDESC */ 162 {0, 1, 1, 0, 0} 163 , /* not used *//* 4 DW_DLA_ELLIST */ 164 {0, 1, 1, 0, 0} 165 , /* not used *//* 5 DW_DLA_BOUNDS */ 166 {3, sizeof(Dwarf_Block), BASE_ALLOC, 0, 0} 167 , /* 6 DW_DLA_BLOCK */ 168 {0, 1, 1, 0, 0} 169 , /* the actual dwarf_debug structure *//* 7 DW_DLA_DEBUG */ 170 {4, sizeof(struct Dwarf_Die_s), BIG_ALLOC, 0, 0}, /* 8 DW_DLA_DIE 171 */ 172 {5, sizeof(struct Dwarf_Line_s), BIG_ALLOC, 0, 0}, /* 9 173 DW_DLA_LINE */ 174 {6, sizeof(struct Dwarf_Attribute_s), BIG_ALLOC * 2, 0, 0}, 175 /* 10 DW_DLA_ATTR */ 176 {0, 1, 1, 0, 0}, /* not used *//* 11 DW_DLA_TYPE */ 177 {0, 1, 1, 0, 0}, /* not used *//* 12 DW_DLA_SUBSCR */ 178 {7, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 13 179 DW_DLA_GLOBAL 180 */ 181 {8, sizeof(struct Dwarf_Error_s), BASE_ALLOC, 0, 0}, /* 14 182 DW_DLA_ERROR 183 */ 184 {0, 1, 1, 0, 0}, /* 15 DW_DLA_LIST */ 185 {0, 1, 1, 0, 0}, /* not used *//* 16 DW_DLA_LINEBUF */ 186 {9, sizeof(struct Dwarf_Arange_s), BASE_ALLOC, 0, 0}, /* 17 187 DW_DLA_ARANGE 188 */ 189 {10, sizeof(struct Dwarf_Abbrev_s), BIG_ALLOC, 0, 0}, /* 18 190 DW_DLA_ABBREV 191 */ 192 {11, sizeof(Dwarf_Frame_Op), BIG_ALLOC, 0, 0} 193 , /* 19 DW_DLA_FRAME_OP */ 194 {12, sizeof(struct Dwarf_Cie_s), BASE_ALLOC, 0, 0}, /* 20 195 DW_DLA_CIE */ 196 {13, sizeof(struct Dwarf_Fde_s), BASE_ALLOC, 0, 0}, /* 21 197 DW_DLA_FDE */ 198 {0, 1, 1, 0, 0}, /* 22 DW_DLA_LOC_BLOCK */ 199 {0, 1, 1, 0, 0}, /* 23 DW_DLA_FRAME_BLOCK */ 200 {14, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 24 201 DW_DLA_FUNC 202 UNUSED */ 203 {15, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 25 204 DW_DLA_TYPENAME 205 UNUSED */ 206 {16, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 26 207 DW_DLA_VAR 208 UNUSED */ 209 {17, sizeof(struct Dwarf_Global_s), BASE_ALLOC, 0, 0}, /* 27 210 DW_DLA_WEAK 211 UNUSED */ 212 {0, 1, 1, 0, 0}, /* 28 DW_DLA_ADDR */ 213 {18, sizeof(struct Dwarf_Abbrev_List_s), BIG_ALLOC, 0, 0}, 214 /* 29 DW_DLA_ABBREV_LIST */ 215 216 {19, sizeof(struct Dwarf_Chain_s), BIG_ALLOC, 0, 0}, /* 30 217 DW_DLA_CHAIN 218 */ 219 {20, sizeof(struct Dwarf_CU_Context_s), BASE_ALLOC, 0, 0}, 220 /* 31 DW_DLA_CU_CONTEXT */ 221 {21, sizeof(struct Dwarf_Frame_s), BASE_ALLOC, 222 _dwarf_frame_constructor, 223 _dwarf_frame_destructor}, /* 32 DW_DLA_FRAME */ 224 {22, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0}, 225 /* 33 DW_DLA_GLOBAL_CONTEXT */ 226 {23, sizeof(struct Dwarf_File_Entry_s), BASE_ALLOC, 0, 0}, /* 34 */ 227 /* 34 DW_DLA_FILE_ENTRY */ 228 {24, sizeof(struct Dwarf_Line_Context_s), BASE_ALLOC, 0, 0}, 229 /* 35 DW_DLA_LINE_CONTEXT */ 230 {25, sizeof(struct Dwarf_Loc_Chain_s), BASE_ALLOC, 0, 0}, /* 36 */ 231 /* 36 DW_DLA_LOC_CHAIN */ 232 233 /* See use of ABBREV_HASH_TABLE_SIZE below for final dealloc. */ 234 {26, ABBREV_HASH_TABLE_SIZE * sizeof(struct Dwarf_Hash_Table_s), 235 BASE_ALLOC, 0, 0}, /* 37 */ 236 237 238 /* 37 DW_DLA_HASH_TABLE */ 239 240/* The following really use Global struct: used to be unique struct 241 per type, but now merged (11/99). The opaque types 242 are visible in the interface. The types for 243 DW_DLA_FUNC, 244 DW_DLA_TYPENAME, DW_DLA_VAR, DW_DLA_WEAK also use 245 the global types. 246 247*/ 248 {27, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0}, 249 /* 38 DW_DLA_FUNC_CONTEXT */ 250 {28, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0}, 251 /* 39 DW_DLA_TYPENAME_CONTEXT */ 252 {29, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0}, 253 /* 40 DW_DLA_VAR_CONTEXT */ 254 {30, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0}, 255 /* 41 DW_DLA_WEAK_CONTEXT */ 256 {31, sizeof(struct Dwarf_Global_Context_s), BASE_ALLOC, 0, 0}, 257 /* 42 DW_DLA_PUBTYPES_CONTEXT DWARF3 */ 258 259}; 260 261#ifndef DWARF_SIMPLE_MALLOC 262 263/* 264 This function is given a pointer to the header 265 structure that is used to allocate 1 struct of 266 the type given by alloc_type. It first checks 267 if a struct is available in its free list. If 268 not, it checks if 1 is available in its blob, 269 which is a chunk of memory that is reserved for 270 its use. If not, it malloc's a chunk. The 271 initial part of it is used to store the end 272 address of the chunk, and also to keep track 273 of the number of free structs in that chunk. 274 This information is used for freeing the chunk 275 when all the structs in it are free. 276 277 Assume all input arguments have been validated. 278 279 This function can be used only to allocate 1 280 struct of the given type. 281 282 It returns a pointer to the struct that the 283 user can use. It returns NULL only when it 284 is out of free structs, and cannot malloc 285 any more. The struct returned is zero-ed. 286 287 A pointer to the chunk that the struct belongs 288 to is stored in the bytes preceding the 289 returned address. Since this pointer it 290 never overwritten, when a struct is allocated 291 from the free_list this pointer does not 292 have to be written. In the 2 other cases, 293 where the struct is allocated from a new 294 chunk, or the blob, a pointer to the chunk 295 is written. 296*/ 297static Dwarf_Ptr 298_dwarf_find_memory(Dwarf_Alloc_Hdr alloc_hdr) 299{ 300 /* Pointer to the struct allocated. */ 301 Dwarf_Small *ret_mem = 0; 302 303 /* Pointer to info about chunks allocated. */ 304 Dwarf_Alloc_Area alloc_area; 305 306 /* Size of chunk malloc'ed when no free structs left. */ 307 Dwarf_Signed mem_block_size; 308 309 /* Pointer to block malloc'ed. */ 310 Dwarf_Small *mem_block; 311 312 /* 313 Check the alloc_area from which the last allocation was made 314 (most recent new block). If that is not successful, then search 315 the list of alloc_area's from alloc_header. */ 316 alloc_area = alloc_hdr->ah_last_alloc_area; 317 if (alloc_area == NULL || alloc_area->aa_free_structs_in_chunk == 0) 318 for (alloc_area = alloc_hdr->ah_alloc_area_head; 319 alloc_area != NULL; alloc_area = alloc_area->aa_next) { 320 321 if (alloc_area->aa_free_structs_in_chunk > 0) { 322 break; /* found a free entry! */ 323 } 324 325 } 326 327 if (alloc_area != NULL) { 328 alloc_area->aa_free_structs_in_chunk--; 329 330 if (alloc_area->aa_free_list != NULL) { 331 ret_mem = alloc_area->aa_free_list; 332 333 /* 334 Update the free list. The initial part of the struct is 335 used to hold a pointer to the next struct on the free 336 list. In this way, the free list chain is maintained at 337 0 memory cost. */ 338 alloc_area->aa_free_list = 339 ((Dwarf_Free_List) ret_mem)->fl_next; 340 } else if (alloc_area->aa_blob_start < alloc_area->aa_blob_end) { 341 ret_mem = alloc_area->aa_blob_start; 342 343 /* 344 Store pointer to chunk this struct belongs to in the 345 first few bytes. Return pointer to bytes after this 346 pointer storage. */ 347 *(Dwarf_Alloc_Area *) ret_mem = alloc_area; 348 ret_mem += DW_RESERVE; 349 350 alloc_area->aa_blob_start += alloc_hdr->ah_bytes_one_struct; 351 } else { 352 /* else fall thru , though it should be impossible to fall 353 thru. And represents a disastrous programming error if 354 we get here. */ 355#ifdef DEBUG 356 fprintf(stderr, "libdwarf Internal error start %x end %x\n", 357 (int) alloc_area->aa_blob_start, 358 (int) alloc_area->aa_blob_end); 359#endif 360 } 361 } 362 363 /* New memory has to malloc'ed since there are no free structs. */ 364 if (ret_mem == 0) { 365 Dwarf_Word rounded_area_hdr_size; 366 367 alloc_hdr->ah_chunks_allocated++; 368 369 { /* this nonsense avoids a warning */ 370 /* CONSTCOND would be better */ 371 unsigned long v = sizeof(struct Dwarf_Alloc_Area_s); 372 373 rounded_area_hdr_size = ROUND_SIZE(v); 374 } 375 376 /* 377 Allocate memory to contain the required number of structs 378 and the Dwarf_Alloc_Area_s to control it. */ 379 mem_block_size = alloc_hdr->ah_bytes_malloc_per_chunk + 380 rounded_area_hdr_size; 381 382 mem_block = malloc(mem_block_size); 383 if (mem_block == NULL) { 384 return (NULL); 385 } 386 387 388 /* 389 Attach the Dwarf_Alloc_Area_s struct to the list of chunks 390 malloc'ed for this struct type. Also initialize the fields 391 of the Dwarf_Alloc_Area_s. */ 392 alloc_area = (Dwarf_Alloc_Area) mem_block; 393 alloc_area->aa_prev = 0; 394 if (alloc_hdr->ah_alloc_area_head != NULL) { 395 alloc_hdr->ah_alloc_area_head->aa_prev = alloc_area; 396 } 397 alloc_area->aa_free_list = 0; 398 alloc_area->aa_next = alloc_hdr->ah_alloc_area_head; 399 alloc_hdr->ah_alloc_area_head = alloc_area; 400 401 alloc_area->aa_alloc_hdr = alloc_hdr; 402 alloc_area->aa_free_structs_in_chunk = 403 (Dwarf_Sword) alloc_hdr->ah_structs_per_chunk - 1; 404 if (alloc_area->aa_free_structs_in_chunk < 1) { 405 /* If we get here, there is a disastrous programming error 406 somewhere. */ 407#ifdef DEBUG 408 fprintf(stderr, 409 "libdwarf Internal error: free structs in chunk %d\n", 410 (int) alloc_area->aa_free_structs_in_chunk); 411#endif 412 return NULL; 413 } 414 415 /* 416 The struct returned begins immediately after the 417 Dwarf_Alloc_Area_s struct. */ 418 ret_mem = mem_block + rounded_area_hdr_size; 419 alloc_area->aa_blob_start = 420 ret_mem + alloc_hdr->ah_bytes_one_struct; 421 alloc_area->aa_blob_end = mem_block + mem_block_size; 422 423 /* 424 Store pointer to chunk this struct belongs to in the first 425 few bytes. Return pointer to bytes after this pointer 426 storage. */ 427 *(Dwarf_Alloc_Area *) ret_mem = alloc_area; 428 ret_mem += DW_RESERVE; 429 } 430 431 alloc_hdr->ah_last_alloc_area = alloc_area; 432 alloc_hdr->ah_struct_user_holds++; 433 memset(ret_mem, 0, alloc_hdr->ah_bytes_one_struct - DW_RESERVE); 434 return (ret_mem); 435} 436 437#endif /* ndef DWARF_SIMPLE_MALLOC */ 438 439/* 440 This function returns a pointer to a region 441 of memory. For alloc_types that are not 442 strings or lists of pointers, only 1 struct 443 can be requested at a time. This is indicated 444 by an input count of 1. For strings, count 445 equals the length of the string it will 446 contain, i.e it the length of the string 447 plus 1 for the terminating null. For lists 448 of pointers, count is equal to the number of 449 pointers. For DW_DLA_FRAME_BLOCK, and 450 DW_DLA_LOC_BLOCK allocation types also, count 451 is the count of the number of structs needed. 452 453 This function cannot be used to allocate a 454 Dwarf_Debug_s struct. 455 456*/ 457Dwarf_Ptr 458_dwarf_get_alloc(Dwarf_Debug dbg, 459 Dwarf_Small alloc_type, Dwarf_Unsigned count) 460{ 461 Dwarf_Alloc_Hdr alloc_hdr; 462 463 Dwarf_Ptr ret_mem; 464 465 Dwarf_Signed size = 0; 466 unsigned int index; 467 unsigned int type = alloc_type; 468 469 if (dbg == NULL) { 470 return (NULL); 471 } 472 473 if (type >= ALLOC_AREA_INDEX_TABLE_MAX) { 474 /* internal error */ 475 return NULL; 476 } 477 index = index_into_allocated[type].ia_al_num; 478 /* zero also illegal but not tested for */ 479 480 /* If the Dwarf_Debug is not fully set up, we will get index 0 for 481 any type and must do something. 'Not fully set up' can only 482 happen for DW_DLA_ERROR, I (davea) believe, and for that we call 483 special code here.. */ 484 485 if (index == 0) { 486 if (alloc_type == DW_DLA_STRING) { 487 size = count; 488 } else if (alloc_type == DW_DLA_LIST) { 489 size = count * sizeof(Dwarf_Ptr); 490 } else if (alloc_type == DW_DLA_FRAME_BLOCK) { 491 size = count * sizeof(Dwarf_Frame_Op); 492 } else if (alloc_type == DW_DLA_LOC_BLOCK) { 493 size = count * sizeof(Dwarf_Loc); 494 } else if (alloc_type == DW_DLA_ADDR) { 495 size = count * 496 (sizeof(Dwarf_Addr) > sizeof(Dwarf_Off) ? 497 sizeof(Dwarf_Addr) : sizeof(Dwarf_Off)); 498 } else if (alloc_type == DW_DLA_ERROR) { 499 void *m = _dwarf_special_no_dbg_error_malloc(); 500 501 dwarf_malloc_check_alloc_data(m, DW_DLA_ERROR); 502 return m; 503 504 } else { 505 /* If we get here, there is a disastrous programming error 506 somewhere. */ 507#ifdef DEBUG 508 fprintf(stderr, 509 "libdwarf Internal error: type %d unexpected\n", 510 (int) type); 511#endif 512 } 513 } else { 514 alloc_hdr = &dbg->de_alloc_hdr[index]; 515 if (alloc_hdr->ah_bytes_one_struct > 0) { 516#ifdef DWARF_SIMPLE_MALLOC 517 size = alloc_hdr->ah_bytes_one_struct; 518#else 519 { 520 void *m = _dwarf_find_memory(alloc_hdr); 521 522 dwarf_malloc_check_alloc_data(m, type); 523 if (index_into_allocated[type].specialconstructor) { 524 int res = 525 index_into_allocated[type]. 526 specialconstructor(dbg, m); 527 if (res != DW_DLV_OK) { 528 /* We leak what we allocated in 529 _dwarf_find_memory when constructor fails. */ 530 return NULL; 531 } 532 } 533 return m; 534 } 535#endif 536 537 } else { 538 /* Special case: should not really happen at all. */ 539 if (type == DW_DLA_ERROR) { 540 /* dwarf_init failure. Because dbg is incomplete we 541 won't use it to record the malloc. */ 542 void *m = _dwarf_special_no_dbg_error_malloc(); 543 544 dwarf_malloc_check_alloc_data(m, DW_DLA_ERROR); 545 return m; 546 } else { 547 /* If we get here, there is a disastrous programming 548 error somewhere. */ 549#ifdef DWARF_SIMPLE_MALLOC 550 _dwarf_simple_malloc_botch(3); 551#endif 552#ifdef DEBUG 553 fprintf(stderr, 554 "libdwarf Internal error: Type %d unexpected\n", 555 (int) type); 556#endif 557 } 558 } 559 } 560 561 ret_mem = malloc(size); 562#ifdef DWARF_SIMPLE_MALLOC 563 _dwarf_simple_malloc_add_to_list(dbg, ret_mem, (unsigned long) size, 564 type); 565#endif 566 if (ret_mem != NULL) 567 memset(ret_mem, 0, size); 568 569 dwarf_malloc_check_alloc_data(ret_mem, type); 570 if (index_into_allocated[type].specialconstructor) { 571 int res = 572 index_into_allocated[type].specialconstructor(dbg, ret_mem); 573 if (res != DW_DLV_OK) { 574 /* We leak what we allocated in _dwarf_find_memory when 575 constructor fails. */ 576 return NULL; 577 } 578 } 579 580 return (ret_mem); 581} 582 583 584 585/* 586 This function is used to deallocate a region of memory 587 that was obtained by a call to _dwarf_get_alloc. Note 588 that though dwarf_dealloc() is a public function, 589 _dwarf_get_alloc() isn't. 590 591 For lists, typically arrays of pointers, it is assumed 592 that the space was allocated by a direct call to malloc, 593 and so a straight free() is done. This is also the case 594 for variable length blocks such as DW_DLA_FRAME_BLOCK 595 and DW_DLA_LOC_BLOCK. 596 597 For strings, the pointer might point to a string in 598 .debug_info or .debug_string. After this is checked, 599 and if found not to be the case, a free() is done, 600 again on the assumption that a malloc was used to 601 obtain the space. 602 603 For other types of structs, a pointer to the chunk that 604 the struct was allocated out of, is present in the bytes 605 preceding the pointer passed in. For this chunk it is 606 checked whether all the structs in that chunk are now free. 607 If so, the entire chunk is free_ed. Otherwise, the space 608 is added to the free list for that chunk, and the free count 609 incremented. 610 611 This function does not return anything. 612*/ 613void 614dwarf_dealloc(Dwarf_Debug dbg, 615 Dwarf_Ptr space, Dwarf_Unsigned alloc_type) 616{ 617 Dwarf_Alloc_Hdr alloc_hdr; 618 Dwarf_Alloc_Area alloc_area; 619 unsigned int type = alloc_type; 620 unsigned int index; 621 622 if (space == NULL) { 623 return; 624 } 625 if (type == DW_DLA_ERROR) { 626 /* Get pointer to Dwarf_Alloc_Area this struct came from. See 627 dwarf_alloc.h ROUND_SIZE_WITH_POINTER stuff */ 628 alloc_area = 629 *(Dwarf_Alloc_Area *) ((char *) space - DW_RESERVE); 630 if (alloc_area == 0) { 631 /* This is the special case of a failed dwarf_init(). Also 632 (and more signficantly) there are a variety of other 633 situations where libdwarf does not *know* what dbg is 634 involved (because of a libdwarf-caller-error) so 635 libdwarf uses NULL as the dbg. Those too wind up here. */ 636 _dwarf_free_special_error(space); 637 dwarf_malloc_check_dealloc_data(space, type); 638 return; 639 } 640 641 } 642 if (dbg == NULL) { 643 /* App error, or an app that failed to succeed in a 644 dwarf_init() call. */ 645 return; 646 } 647 if (type >= ALLOC_AREA_INDEX_TABLE_MAX) { 648 /* internal or user app error */ 649 return; 650 } 651 652 index = index_into_allocated[type].ia_al_num; 653 /* 654 A string pointer may point into .debug_info or .debug_string. 655 Otherwise, they are directly malloc'ed. */ 656 dwarf_malloc_check_dealloc_data(space, type); 657 if (index == 0) { 658 if (type == DW_DLA_STRING) { 659 if ((Dwarf_Small *) space >= dbg->de_debug_info && 660 (Dwarf_Small *) space < 661 dbg->de_debug_info + dbg->de_debug_info_size) 662 return; 663 664 if (dbg->de_debug_line != NULL && 665 (Dwarf_Small *) space >= dbg->de_debug_line && 666 (Dwarf_Small *) space < 667 dbg->de_debug_line + dbg->de_debug_line_size) 668 return; 669 670 if (dbg->de_debug_pubnames != NULL && 671 (Dwarf_Small *) space >= dbg->de_debug_pubnames && 672 (Dwarf_Small *) space < 673 dbg->de_debug_pubnames + dbg->de_debug_pubnames_size) 674 return; 675 676 if (dbg->de_debug_frame != NULL && 677 (Dwarf_Small *) space >= dbg->de_debug_frame && 678 (Dwarf_Small *) space < 679 dbg->de_debug_frame + dbg->de_debug_frame_size) 680 return; 681 682 if (dbg->de_debug_str != NULL && 683 (Dwarf_Small *) space >= dbg->de_debug_str && 684 (Dwarf_Small *) space < 685 dbg->de_debug_str + dbg->de_debug_str_size) 686 return; 687 688 if (dbg->de_debug_funcnames != NULL && 689 (Dwarf_Small *) space >= dbg->de_debug_funcnames && 690 (Dwarf_Small *) space < 691 dbg->de_debug_funcnames + dbg->de_debug_funcnames_size) 692 return; 693 694 if (dbg->de_debug_typenames != NULL && 695 (Dwarf_Small *) space >= dbg->de_debug_typenames && 696 (Dwarf_Small *) space < 697 dbg->de_debug_typenames + dbg->de_debug_typenames_size) 698 return; 699 if (dbg->de_debug_pubtypes != NULL && 700 (Dwarf_Small *) space >= dbg->de_debug_pubtypes && 701 (Dwarf_Small *) space < 702 dbg->de_debug_pubtypes + dbg->de_debug_pubtypes_size) 703 return; 704 705 if (dbg->de_debug_varnames != NULL && 706 (Dwarf_Small *) space >= dbg->de_debug_varnames && 707 (Dwarf_Small *) space < 708 dbg->de_debug_varnames + dbg->de_debug_varnames_size) 709 return; 710 711 if (dbg->de_debug_weaknames != NULL && 712 (Dwarf_Small *) space >= dbg->de_debug_weaknames && 713 (Dwarf_Small *) space < 714 dbg->de_debug_weaknames + dbg->de_debug_weaknames_size) 715 return; 716 717 free(space); 718 return; 719 } 720 721 if (type == DW_DLA_LIST || 722 type == DW_DLA_FRAME_BLOCK || 723 type == DW_DLA_LOC_BLOCK || type == DW_DLA_ADDR) { 724 725 free(space); 726 return; 727 } 728 /* else is an alloc type that is not used */ 729 /* app or internal error */ 730#ifdef DWARF_SIMPLE_MALLOC 731 _dwarf_simple_malloc_botch(4); 732#endif 733 return; 734 735 } 736 if (index_into_allocated[type].specialdestructor) { 737 index_into_allocated[type].specialdestructor(space); 738 } 739#ifdef DWARF_SIMPLE_MALLOC 740 _dwarf_simple_malloc_delete_from_list(dbg, space, type); 741 free(space); 742#else /* !DWARF_SIMPLE_MALLOC */ 743 alloc_hdr = &dbg->de_alloc_hdr[index]; 744 745 /* Get pointer to Dwarf_Alloc_Area this struct came from. See 746 dwarf_alloc.h ROUND_SIZE_WITH_POINTER stuff */ 747 alloc_area = *(Dwarf_Alloc_Area *) ((char *) space - DW_RESERVE); 748 749 /* ASSERT: alloc_area != NULL If NULL we could abort, let it 750 coredump below, or return, pretending all is well. We go on, 751 letting program crash. Is caller error. */ 752 753 /* 754 Check that the alloc_hdr field of the alloc_area we have is 755 pointing to the right alloc_hdr. This is used to catch use of 756 incorrect deallocation code by the user. */ 757 if (alloc_area->aa_alloc_hdr != alloc_hdr) { 758 /* If we get here, the user has called dwarf_dealloc wrongly or 759 there is some other disastrous error. By leaking mem here we 760 try to be safe... */ 761#ifdef DEBUG 762 fprintf(stderr, 763 "libdwarf Internal error: type %d hdr mismatch %lx %lx " 764 "area ptr %lx\n", 765 (int) type, 766 (long) alloc_area->aa_alloc_hdr, 767 (long) alloc_hdr, (long) alloc_area); 768#endif 769 return; 770 } 771 772 alloc_hdr->ah_struct_user_holds--; 773 alloc_area->aa_free_structs_in_chunk++; 774 775 /* 776 Give chunk back to malloc only when every struct is freed */ 777 if (alloc_area->aa_free_structs_in_chunk == 778 alloc_hdr->ah_structs_per_chunk) { 779 if (alloc_area->aa_prev != NULL) { 780 alloc_area->aa_prev->aa_next = alloc_area->aa_next; 781 } else { 782 alloc_hdr->ah_alloc_area_head = alloc_area->aa_next; 783 } 784 785 if (alloc_area->aa_next != NULL) { 786 alloc_area->aa_next->aa_prev = alloc_area->aa_prev; 787 } 788 789 alloc_hdr->ah_chunks_allocated--; 790 791 if (alloc_area == alloc_hdr->ah_last_alloc_area) { 792 alloc_hdr->ah_last_alloc_area = NULL; 793 } 794 memset(alloc_area, 0, sizeof(*alloc_area)); 795 free(alloc_area); 796 } 797 798 else { 799 ((Dwarf_Free_List) space)->fl_next = alloc_area->aa_free_list; 800 alloc_area->aa_free_list = space; 801 } 802#endif /* !DWARF_SIMPLE_MALLOC */ 803} 804 805 806/* 807 Allocates space for a Dwarf_Debug_s struct, 808 since one does not exist. 809*/ 810Dwarf_Debug 811_dwarf_get_debug(void 812 ) 813{ 814 Dwarf_Debug dbg; 815 816 dbg = (Dwarf_Debug) malloc(sizeof(struct Dwarf_Debug_s)); 817 if (dbg == NULL) 818 return (NULL); 819 else 820 memset(dbg, 0, sizeof(struct Dwarf_Debug_s)); 821 822 return (dbg); 823} 824 825 826/* 827 Sets up the Dwarf_Debug_s struct for all the 828 allocation types currently defined. 829 Allocation types DW_DLA_STRING, DW_DLA_LIST, 830 DW_DLA_FRAME_BLOCK, DW_DLA_LOC_BLOCK are 831 malloc'ed directly. 832 833 This routine should be called after _dwarf_setup(), 834 so that information about the sizes of the Dwarf 835 sections can be used to decide the number of 836 structs of each type malloc'ed. 837 838 Also DW_DLA_ELLIST, DW_DLA_BOUNDS, DW_DLA_TYPE, 839 DW_DLA_SUBSCR, DW_DLA_LINEBUF allocation types 840 are currently not used. 841 The ah_bytes_one_struct and ah_structs_per_chunk fields for 842 these types have been set to 1 for efficiency 843 in dwarf_get_alloc(). 844 845 Ah_alloc_num should be greater than 1 for all 846 types that are currently being used. 847 848 Therefore, for these allocation types the 849 ah_bytes_one_struct, and ah_structs_per_chunk fields do not 850 need to be initialized. 851 852 Being an internal routine, assume proper dbg. 853 854 855 856 857*/ 858/* 859** Set up all the Dwarf_Alloc_Hdr records. 860*/ 861 862Dwarf_Debug 863_dwarf_setup_debug(Dwarf_Debug dbg) 864{ 865 int i; 866 867 for (i = 1; i <= MAX_DW_DLA; i++) { 868 const struct ial_s *ialp = &index_into_allocated[i]; 869 unsigned int hdr_index = ialp->ia_al_num; 870 Dwarf_Word str_size = ialp->ia_struct_size; 871 Dwarf_Word str_count = ialp->ia_base_count; 872 Dwarf_Word rnded_size = ROUND_SIZE_WITH_POINTER(str_size); 873 874 Dwarf_Alloc_Hdr alloc_hdr = &dbg->de_alloc_hdr[hdr_index]; 875 876 alloc_hdr->ah_bytes_one_struct = (Dwarf_Half) rnded_size; 877 878 /* ah_structs_per_chunk must be >0 else we are in trouble */ 879 alloc_hdr->ah_structs_per_chunk = str_count; 880 alloc_hdr->ah_bytes_malloc_per_chunk = rnded_size * str_count; 881 } 882 return (dbg); 883} 884 885/* 886 This function prints out the statistics 887 collected on allocation of memory chunks. 888*/ 889void 890dwarf_print_memory_stats(Dwarf_Debug dbg) 891{ 892 Dwarf_Alloc_Hdr alloc_hdr; 893 Dwarf_Shalf i; 894 895 /* 896 Alloc types start at 1, not 0. Hence, the first NULL string, and 897 also a size of MAX_DW_DLA + 1. */ 898 char *alloc_type_name[MAX_DW_DLA + 1] = { 899 "", 900 "DW_DLA_STRING", 901 "DW_DLA_LOC", 902 "DW_DLA_LOCDESC", 903 "DW_DLA_ELLIST", 904 "DW_DLA_BOUNDS", 905 "DW_DLA_BLOCK", 906 "DW_DLA_DEBUG", 907 "DW_DLA_DIE", 908 "DW_DLA_LINE", 909 "DW_DLA_ATTR", 910 "DW_DLA_TYPE", 911 "DW_DLA_SUBSCR", 912 "DW_DLA_GLOBAL", 913 "DW_DLA_ERROR", 914 "DW_DLA_LIST", 915 "DW_DLA_LINEBUF", 916 "DW_DLA_ARANGE", 917 "DW_DLA_ABBREV", 918 "DW_DLA_FRAME_OP", 919 "DW_DLA_CIE", 920 "DW_DLA_FDE", 921 "DW_DLA_LOC_BLOCK", 922 "DW_DLA_FRAME_BLOCK", 923 "DW_DLA_FUNC", 924 "DW_DLA_TYPENAME", 925 "DW_DLA_VAR", 926 "DW_DLA_WEAK", 927 "DW_DLA_ADDR", 928 "DW_DLA_ABBREV_LIST", 929 "DW_DLA_CHAIN", 930 "DW_DLA_CU_CONTEXT", 931 "DW_DLA_FRAME", 932 "DW_DLA_GLOBAL_CONTEXT", 933 "DW_DLA_FILE_ENTRY", 934 "DW_DLA_LINE_CONTEXT", 935 "DW_DLA_LOC_CHAIN", 936 "DW_DLA_HASH_TABLE", 937 "DW_DLA_FUNC_CONTEXT", 938 "DW_DLA_TYPENAME_CONTEXT", 939 "DW_DLA_VAR_CONTEXT", 940 "DW_DLA_WEAK_CONTEXT" "DW_DLA_PUBTYPES_CONTEXT" 941 }; 942 943 if (dbg == NULL) 944 return; 945 946 printf("Size of Dwarf_Debug %4ld bytes\n", 947 (long) sizeof(*dbg)); 948 printf("Size of Dwarf_Alloc_Hdr_s %4ld bytes\n", 949 (long) sizeof(struct Dwarf_Alloc_Hdr_s)); 950 printf("size of Dwarf_Alloc_Area_s %4ld bytes\n", 951 (long) sizeof(struct Dwarf_Alloc_Area_s)); 952 953 printf(" Alloc Type Curr Structs byt str\n"); 954 printf(" ---------- ---- ------- per per\n"); 955 for (i = 1; i <= MAX_DW_DLA; i++) { 956 int indx = index_into_allocated[i].ia_al_num; 957 958 alloc_hdr = &dbg->de_alloc_hdr[indx]; 959 if (alloc_hdr->ah_bytes_one_struct != 1) { 960 printf("%2d %-25s %6d %8d %6d %6d\n", 961 (int) i, 962 alloc_type_name[i], 963 (int) alloc_hdr->ah_chunks_allocated, 964 (int) alloc_hdr->ah_struct_user_holds, 965 (int) alloc_hdr->ah_bytes_malloc_per_chunk, 966 (int) alloc_hdr->ah_structs_per_chunk); 967 } 968 } 969} 970 971 972#ifndef DWARF_SIMPLE_MALLOC 973/* 974 This function is used to recursively 975 free the chunks still allocated, and 976 forward chained through the aa_next 977 pointer. 978*/ 979static void 980_dwarf_recursive_free(Dwarf_Alloc_Area alloc_area) 981{ 982 if (alloc_area->aa_next != NULL) { 983 _dwarf_recursive_free(alloc_area->aa_next); 984 } 985 986 alloc_area->aa_next = 0; 987 alloc_area->aa_prev = 0; 988 free(alloc_area); 989} 990#endif 991 992/* 993 Used to free all space allocated for this Dwarf_Debug. 994 The caller should assume that the Dwarf_Debug pointer 995 itself is no longer valid upon return from this function. 996 997 In case of difficulty, this function simply returns quietly. 998*/ 999int 1000_dwarf_free_all_of_one_debug(Dwarf_Debug dbg) 1001{ 1002 Dwarf_Alloc_Hdr alloc_hdr; 1003 Dwarf_Shalf i; 1004 Dwarf_CU_Context context = 0; 1005 Dwarf_CU_Context nextcontext = 0; 1006 1007 if (dbg == NULL) 1008 return (DW_DLV_ERROR); 1009 1010 /* To do complete validation that we have no surprising missing or 1011 erroneous deallocs it is advisable to do the dwarf_deallocs here 1012 that are not things the user can otherwise request. 1013 Housecleaning. */ 1014 1015 for (context = dbg->de_cu_context_list; 1016 context; context = nextcontext) { 1017 Dwarf_Hash_Table hash_table = context->cc_abbrev_hash_table; 1018 1019 /* A Hash Table is an array with ABBREV_HASH_TABLE_SIZE struct 1020 Dwarf_Hash_Table_s entries in the array. */ 1021 int hashnum = 0; 1022 1023 for (; hashnum < ABBREV_HASH_TABLE_SIZE; ++hashnum) { 1024 struct Dwarf_Abbrev_List_s *abbrev = 0; 1025 struct Dwarf_Abbrev_List_s *nextabbrev = 0; 1026 1027 abbrev = hash_table[hashnum].at_head; 1028 for (; abbrev; abbrev = nextabbrev) { 1029 nextabbrev = abbrev->ab_next; 1030 dwarf_dealloc(dbg, abbrev, DW_DLA_ABBREV_LIST); 1031 } 1032 } 1033 nextcontext = context->cc_next; 1034 dwarf_dealloc(dbg, hash_table, DW_DLA_HASH_TABLE); 1035 dwarf_dealloc(dbg, context, DW_DLA_CU_CONTEXT); 1036 } 1037 1038 /* Housecleaning done. Now really free all the space. */ 1039 1040#ifdef DWARF_SIMPLE_MALLOC 1041 if (dbg->de_simple_malloc_base) { 1042 struct simple_malloc_record_s *smp = dbg->de_simple_malloc_base; 1043 1044 while (smp) { 1045 int i; 1046 struct simple_malloc_record_s *prev_smp = 0; 1047 1048 for (i = 0; i < smp->sr_used; ++i) { 1049 struct simple_malloc_entry_s *cur; 1050 1051 cur = &smp->sr_entry[i]; 1052 if (cur->se_addr != 0) { 1053 free(cur->se_addr); 1054 cur->se_addr = 0; 1055 } 1056 } 1057 prev_smp = smp; 1058 smp = smp->sr_next; 1059 free(prev_smp); 1060 } 1061 dbg->de_simple_malloc_base = 0; 1062 dbg->de_simple_malloc_current = 0; 1063 } 1064#else 1065 for (i = 1; i < ALLOC_AREA_REAL_TABLE_MAX; i++) { 1066 int indx = i; 1067 1068 alloc_hdr = &dbg->de_alloc_hdr[indx]; 1069 if (alloc_hdr->ah_alloc_area_head != NULL) { 1070 _dwarf_recursive_free(alloc_hdr->ah_alloc_area_head); 1071 } 1072 } 1073 1074#endif 1075 1076 memset(dbg, 0, sizeof(*dbg)); /* prevent accidental use later 1077 */ 1078 free(dbg); 1079 return (DW_DLV_OK); 1080} 1081 1082/* A special case: we have no dbg, no alloc header etc. 1083 So create something out of thin air that we can recognize 1084 in dwarf_dealloc. 1085 Something with the prefix (prefix space hidden from caller). 1086 1087 Only applies to DW_DLA_ERROR, making up an error record. 1088*/ 1089 1090struct Dwarf_Error_s * 1091_dwarf_special_no_dbg_error_malloc(void) 1092{ 1093 /* the union unused things are to guarantee proper alignment */ 1094 union u { 1095 Dwarf_Alloc_Area ptr_not_used; 1096 struct Dwarf_Error_s base_not_used; 1097 char data_space[sizeof(struct Dwarf_Error_s) + 1098 (DW_RESERVE * 2)]; 1099 }; 1100 char *mem; 1101 1102 mem = malloc(sizeof(union u)); 1103 1104 if (mem == 0) { 1105 return 0; 1106 1107 } 1108 memset(mem, 0, sizeof(union u)); 1109 mem += DW_RESERVE; 1110 return (struct Dwarf_Error_s *) mem; 1111} 1112 1113/* The free side of _dwarf_special_no_dbg_error_malloc() 1114*/ 1115static void 1116_dwarf_free_special_error(Dwarf_Ptr space) 1117{ 1118 char *mem = (char *) space; 1119 1120 mem -= DW_RESERVE; 1121 free(mem); 1122} 1123 1124 1125#ifdef DWARF_SIMPLE_MALLOC 1126/* here solely for planting a breakpoint. */ 1127/* ARGSUSED */ 1128void 1129_dwarf_simple_malloc_botch(int err) 1130{ 1131} 1132static void 1133_dwarf_simple_malloc_add_to_list(Dwarf_Debug dbg, 1134 Dwarf_Ptr addr, 1135 unsigned long size, short alloc_type) 1136{ 1137 struct simple_malloc_record_s *cur; 1138 struct simple_malloc_entry_s *newentry; 1139 1140 if (!dbg->de_simple_malloc_current) { 1141 /* First entry to this routine. */ 1142 dbg->de_simple_malloc_current = 1143 malloc(sizeof(struct simple_malloc_record_s)); 1144 if (!dbg->de_simple_malloc_current) { 1145 return; /* no memory, give up */ 1146 } 1147 memset(dbg->de_simple_malloc_current, 1148 0, sizeof(struct simple_malloc_record_s)); 1149 dbg->de_simple_malloc_base = dbg->de_simple_malloc_current; 1150 } 1151 cur = dbg->de_simple_malloc_current; 1152 1153 if (cur->sr_used >= DSM_BLOCK_COUNT) { 1154 /* better not be > than as that means chaos */ 1155 1156 /* Create a new block to link at the head. */ 1157 1158 struct simple_malloc_record_s *newblock = 1159 malloc(sizeof(struct simple_malloc_record_s)); 1160 if (!newblock) { 1161 return; /* Can do nothing, out of memory */ 1162 } 1163 memset(newblock, 0, sizeof(struct simple_malloc_record_s)); 1164 /* Link the new block at the head of the chain, and make it 1165 'current' */ 1166 dbg->de_simple_malloc_current = newblock; 1167 newblock->sr_next = cur; 1168 cur = newblock; 1169 } 1170 newentry = &cur->sr_entry[cur->sr_used]; 1171 newentry->se_addr = addr; 1172 newentry->se_size = size; 1173 newentry->se_type = alloc_type; 1174 ++cur->sr_used; 1175} 1176 1177/* 1178 DWARF_SIMPLE_MALLOC is for testing the hypothesis that the existing 1179 complex malloc scheme in libdwarf is pointless complexity. 1180 1181 DWARF_SIMPLE_MALLOC also makes it easy for a malloc-tracing 1182 tool to verify libdwarf malloc has no botches (though of course 1183 such does not test the complicated standard-libdwarf-alloc code). 1184 1185 To properly answer the question, the simple-malloc allocate 1186 and delete should be something other than a simple list. 1187 Perhaps a heap, or perhaps a red-black tree. 1188 1189*/ 1190static void 1191_dwarf_simple_malloc_delete_from_list(Dwarf_Debug dbg, 1192 Dwarf_Ptr space, short alloc_type) 1193{ 1194 if (space == 0) { 1195 _dwarf_simple_malloc_botch(6); 1196 } 1197 if (dbg->de_simple_malloc_base) { 1198 struct simple_malloc_record_s *smp = dbg->de_simple_malloc_base; 1199 1200 while (smp) { 1201 int i; 1202 1203 for (i = 0; i < smp->sr_used; ++i) { 1204 struct simple_malloc_entry_s *cur; 1205 1206 cur = &smp->sr_entry[i]; 1207 if (cur->se_addr == space) { 1208 if (cur->se_type != alloc_type) { 1209 _dwarf_simple_malloc_botch(0); 1210 } 1211 cur->se_addr = 0; 1212 return; 1213 } 1214 } 1215 smp = smp->sr_next; 1216 } 1217 } 1218 /* Never found the space */ 1219 _dwarf_simple_malloc_botch(1); 1220 return; 1221 1222} 1223#endif 1224