1/* 2 * Copyright (c) 2011 Apple Inc. All rights reserved. 3 * 4 * @APPLE_APACHE_LICENSE_HEADER_START@ 5 * 6 * Licensed under the Apache License, Version 2.0 (the "License"); 7 * you may not use this file except in compliance with the License. 8 * You may obtain a copy of the License at 9 * 10 * http://www.apache.org/licenses/LICENSE-2.0 11 * 12 * Unless required by applicable law or agreed to in writing, software 13 * distributed under the License is distributed on an "AS IS" BASIS, 14 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 * See the License for the specific language governing permissions and 16 * limitations under the License. 17 * 18 * @APPLE_APACHE_LICENSE_HEADER_END@ 19 */ 20/* 21 auto_gdb_interface.cpp 22 Routines called by gdb to implement its info gc-references and gc-roots commands. 23 Copyright (c) 2007-2011 Apple Inc. All rights reserved. 24 */ 25 26#include <vector> 27#include <deque> 28#include "auto_gdb_interface.h" 29#include "auto_impl_utilities.h" 30#include "Zone.h" 31#include "BlockIterator.h" 32#include "ReferenceIterator.h" 33 34namespace Auto { 35 template <typename ReferenceIterator> class GDBPendingStack { 36 typedef std::vector<uintptr_t, AuxAllocator<uintptr_t> > uintptr_vector; 37 uintptr_vector _small_stack, _large_stack; 38 public: 39 const PendingStackHint hints() { return PendingStackWantsEagerScanning; } 40 41 void push(Subzone *subzone, usword_t q) { 42 assert(q <= 65536); 43 assert(uintptr_t(subzone) == (uintptr_t(subzone) & ~0x1FFFF)); 44 _small_stack.push_back(uintptr_t(subzone) | q); // 1024 * 1024 / 16 == 65536 == 0x10000 45 } 46 47 void push(Large *large) { 48 _large_stack.push_back(uintptr_t(large)); 49 } 50 51 static bool mark(Subzone *subzone, usword_t q) { return subzone->test_and_set_mark(q); } 52 static bool mark(Large *large) { return large->test_and_set_mark(); } 53 54 static bool is_marked(Subzone *subzone, usword_t q) { return subzone->is_marked(q); } 55 static bool is_marked(Large *large) { return large->is_marked(); } 56 57 void scan(ReferenceIterator &scanner) { 58 for (;;) { 59 // prefer scanning small blocks to large blocks, to keep the stacks shallow. 60 if (_small_stack.size()) { 61 uintptr_t back = _small_stack.back(); 62 _small_stack.pop_back(); 63 Subzone *subzone = reinterpret_cast<Subzone *>(back & ~0x1FFFF); 64 usword_t q = back & 0x1FFFF; 65 scanner.scan(subzone, q); 66 } else if (_large_stack.size()) { 67 Large *large = reinterpret_cast<Large*>(_large_stack.back()); 68 _large_stack.pop_back(); 69 scanner.scan(large); 70 } else { 71 return; 72 } 73 } 74 } 75 76 template <typename U> struct rebind { typedef GDBPendingStack<U> other; }; 77 }; 78 79 template <typename ReferenceIterator> class GDBScanningStrategy : public FullScanningStrategy<ReferenceIterator> { 80 public: 81 // provide a way to rebind this template type to another just like STL allocators can do. 82 template <typename U> struct rebind { typedef GDBScanningStrategy<U> other; }; 83 84 // Could use this to customize the scanning strategy. For now, it could just as easily be a typedef. 85 }; 86 87 typedef std::vector<auto_memory_reference_t, AuxAllocator<auto_memory_reference_t> > RefVector; 88 89 class GDBReferenceRecorder { 90 private: 91 Zone *_zone; 92 void *_block; 93 void *_stack_bottom; 94 RefVector _refs; 95 96 struct Configuration; 97 typedef ReferenceIterator<Configuration> GDBReferenceIterator; 98 99 struct Configuration { 100 typedef GDBReferenceRecorder ReferenceVisitor; 101 typedef GDBPendingStack<GDBReferenceIterator> PendingStack; 102 typedef GDBScanningStrategy<GDBReferenceIterator> ScanningStrategy; 103 }; 104 105 public: 106 GDBReferenceRecorder(Zone *zone, void *block, void *stack_bottom) : _zone(zone), _block(block), _stack_bottom(stack_bottom) {} 107 108 void visit(const ReferenceInfo &info, void **slot, void *block) { 109 if (block == _block) { 110 auto_memory_reference_t ref = { NULL }; 111 switch (info.kind()) { 112 case kRootReference: 113 ref.address = slot; 114 ref.offset = 0; 115 ref.kind = auto_memory_block_global; 116 break; 117 case kStackReference: 118 ref.address = info.thread().stack_base(); 119 ref.offset = (intptr_t)slot - (intptr_t)ref.address; 120 ref.kind = auto_memory_block_stack; 121 break; 122 case kConservativeHeapReference: 123 case kAllPointersHeapReference: 124 ref.address = _zone->block_start((void*)slot); 125 ref.offset = (intptr_t)slot - (intptr_t)ref.address; 126 ref.kind = auto_memory_block_bytes; 127 ref.retainCount = auto_zone_retain_count((auto_zone_t *)_zone,ref.address); 128 break; 129 case kExactHeapReference: 130 ref.address = _zone->block_start((void*)slot); 131 ref.offset = (intptr_t)slot - (intptr_t)ref.address; 132 ref.kind = auto_memory_block_object; 133 ref.retainCount = auto_zone_retain_count((auto_zone_t *)_zone,ref.address); 134 break; 135 case kAssociativeReference: 136 ref.address = info.object(); 137 ref.offset = (intptr_t)info.key(); 138 ref.kind = auto_memory_block_association; 139 ref.retainCount = auto_zone_retain_count((auto_zone_t *)_zone,ref.address); 140 break; 141 default: 142 break; 143 } 144 if (ref.address) _refs.push_back(ref); 145 } 146 } 147 148 void visit(const ReferenceInfo &info, void **slot, Subzone *subzone, usword_t q) { 149 visit(info, slot, subzone->quantum_address(q)); 150 } 151 152 void visit(const ReferenceInfo &info, void **slot, Large *large) { 153 visit(info, slot, large->address()); 154 } 155 156 void scan() { 157 Configuration::PendingStack stack; 158 GDBReferenceIterator scanner(_zone, *this, stack, _stack_bottom); 159 scanner.scan(); 160 } 161 162 auto_memory_reference_list_t *copy_refs() { 163 uint32_t count = _refs.size(); 164 auto_memory_reference_list_t *result = (auto_memory_reference_list_t *) aux_malloc(sizeof(auto_memory_reference_list_t) + count * sizeof(auto_memory_reference_t)); 165 result->count = count; 166 std::copy(_refs.begin(), _refs.end(), result->references); 167 return result; 168 } 169 }; 170 171 class GDBRootFinder { 172 private: 173 struct Configuration; 174 typedef ReferenceIterator<Configuration> GDBRootIterator; 175 struct Configuration { 176 typedef GDBRootFinder ReferenceVisitor; 177 typedef GDBPendingStack<GDBRootIterator> PendingStack; 178 typedef GDBScanningStrategy<GDBRootIterator> ScanningStrategy; 179 }; 180 181 struct Node; 182 typedef std::vector<Node *, AuxAllocator<Node *> > NodeVector; 183 typedef std::deque<Node *, AuxAllocator<Node *> > NodeQueue; 184 typedef __gnu_cxx::hash_map<void *, Node*, AuxPointerHash, AuxPointerEqual, AuxAllocator<void *> > NodeSet; 185 typedef __gnu_cxx::hash_map<Node *, auto_memory_reference_t, AuxPointerHash, AuxPointerEqual, AuxAllocator<Node *> > NodeRefMap; // Node * -> auto_memory_reference_t 186 187 // 188 // Node - Represents each object in the roots sub-graph. Contains a set that represents all of the unique pointers FROM other nodes to 189 // this node. Currently, only one reference from a given node is represented; this is sufficient for nodes that represent stack 190 // and global references, because there can be only from a given Node. On the other hand, since an object may contain multiple pointers 191 // we might want to change the represention to be a set of pair<ref, Node>, where there is a unique entry for every distinct reference 192 // to an object. This will provide a more comprehensive set of references, and may be necessary for understanding the complete picture 193 // of a root set. 194 // 195 struct Node { 196 void *_address; // base address of this node. 197 Node *_target; // used by shortest path algorithm. 198 NodeRefMap _references; // references to THIS Node, keyed by slotNode addresses. 199 enum Color { White = 0, Gray, Black }; // states a Node can be in during predecessor discovery. 200 Color _color : 2; 201 char _is_thread_local : 1; // true if this node is thread local. 202 char _is_retained : 1; // true if this node has a retain count. 203 204 Node(void *address) : _address(address), _target(NULL), _references(), _color(White), _is_thread_local(0), _is_retained(0) {} 205 206 Color darken() { if (_color < Black) _color = (Color)(_color + 1); return _color; } 207 208 void addRef(Zone *zone, Node *slotNode, const ReferenceInfo &info, void **slot) { 209 if (_references.find(slotNode) == _references.end()) { 210 auto_memory_reference_t ref = { NULL }; 211 switch (info.kind()) { 212 case kRootReference: 213 ref.address = slotNode->_address; 214 ref.offset = 0; 215 ref.kind = auto_memory_block_global; 216 break; 217 case kStackReference: 218 ref.address = info.thread().stack_base(); 219 ref.offset = (intptr_t)slot - (intptr_t)ref.address; 220 ref.kind = auto_memory_block_stack; 221 break; 222 case kConservativeHeapReference: 223 case kAllPointersHeapReference: 224 ref.address = slotNode->_address; 225 ref.offset = (intptr_t)slot - (intptr_t)ref.address; 226 ref.kind = auto_memory_block_bytes; 227 ref.retainCount = auto_zone_retain_count((auto_zone_t *)zone,ref.address); 228 break; 229 case kExactHeapReference: 230 ref.address = slotNode->_address; 231 ref.offset = (intptr_t)slot - (intptr_t)ref.address; 232 ref.kind = auto_memory_block_object; 233 ref.retainCount = auto_zone_retain_count((auto_zone_t *)zone,ref.address); 234 break; 235 case kAssociativeReference: 236 ref.address = slotNode->_address; 237 ref.offset = (intptr_t)info.key(); 238 ref.kind = auto_memory_block_association; 239 ref.retainCount = auto_zone_retain_count((auto_zone_t *)zone,ref.address); 240 break; 241 default: 242 return; 243 } 244 _references[slotNode] = ref; 245 } 246 } 247 248 typedef void (^ref_visitor_t) (Node *targetNode, Node* slotNode, auto_memory_reference_t &ref); 249 250 void visitRefs(ref_visitor_t visitor) { 251 NodeQueue queue; 252 queue.push_back(this); 253 this->_color = Black; 254 while (!queue.empty()) { 255 Node *node = queue.front(); 256 queue.pop_front(); 257 for (NodeRefMap::iterator i = node->_references.begin(), end = node->_references.end(); i != end; ++i) { 258 Node *child = i->first; 259 if (child->_color == White) { 260 child->_color = Black; 261 visitor(node, child, i->second); 262 queue.push_back(child); 263 } 264 } 265 } 266 } 267 }; 268 269 Zone *_zone; 270 Node *_blockNode; 271 auto_memory_reference_t _blockRef; 272 void *_stack_bottom; 273 NodeSet _nodes; 274 NodeSet _nodesToExplore; 275 276 public: 277 GDBRootFinder(Zone *zone, void *block, void* stack_bottom) : _zone(zone), _blockNode(new Node(block)), _stack_bottom(stack_bottom) { 278 _nodes[_blockNode->_address] = _blockNode; 279 _nodesToExplore[_blockNode->_address] = _blockNode; 280 281 auto_block_info_sieve<AUTO_BLOCK_INFO_REFCOUNT|AUTO_BLOCK_INFO_LAYOUT> block_info(_zone, _blockNode->_address); 282 _blockRef = (auto_memory_reference_t) { _blockNode->_address, 0, is_object(block_info.layout()) ? auto_memory_block_object : auto_memory_block_bytes, block_info.refcount() }; 283 } 284 285 ~GDBRootFinder() { 286 for (NodeSet::iterator i = _nodes.begin(), end = _nodes.end(); i != end; ++i) { 287 delete i->second; 288 } 289 } 290 291 Node *nodeForSlot(const ReferenceInfo &info, void **slot) { 292 Node *node = NULL; 293 NodeSet::iterator i; 294 switch (info.kind()) { 295 case kRootReference: 296 case kStackReference: 297 i = _nodes.find(slot); 298 if (i != _nodes.end()) { 299 node = i->second; 300 } else { 301 node = new Node(slot); 302 _nodes[slot] = node; 303 } 304 break; 305 case kAssociativeReference: 306 i = _nodes.find(slot); 307 if (i != _nodes.end()) { 308 node = i->second; 309 } else { 310 node = new Node(slot); 311 _nodes[slot] = node; 312 _nodesToExplore[slot] = node; 313 } 314 break; 315 case kConservativeHeapReference: 316 case kAllPointersHeapReference: 317 case kExactHeapReference: 318 { 319 void *start = _zone->block_start(slot); 320 i = _nodes.find(start); 321 if (i != _nodes.end()) { 322 node = i->second; 323 } else { 324 node = new Node(start); 325 _nodes[start] = node; 326 _nodesToExplore[start] = node; 327 } 328 } 329 break; 330 default: 331 break; 332 } 333 return node; 334 } 335 336 // New algorithm idea: build a sub-graph starting from the specified object connected to all of its roots. The sub-graph is built effectively in parallel, by considering more 337 // than one node at a time. When a new node is added to the tree, the current pass through the heap won't necessarily visit all of the children of the new node, so 338 // subsequent passes will be neeeded if new nodes are added. A node is known to be have been fully explored once it has been around through a complete pass, and it can be 339 // removed from the set of nodes still being explored. When this set goes empty, the subgraph is complete. 340 341 void visit(const ReferenceInfo &info, void **slot, void *block) { 342 NodeSet::iterator i = _nodesToExplore.find(block); 343 if (i != _nodesToExplore.end()) { 344 Node *node = i->second; 345 switch (info.kind()) { 346 case kThreadLocalReference: 347 node->_is_thread_local = 1; 348 break; 349 case kRetainedReference: 350 node->_is_retained = 1; 351 break; 352 case kAssociativeReference: 353 // slot points into the associations table. 354 slot = (void**)info.object(); 355 default: 356 // otherwise, this is a reference that comes from a slot in memory. 357 Node *slotNode = nodeForSlot(info, slot); 358 if (slotNode) node->addRef(_zone, slotNode, info, slot); 359 } 360 } 361 } 362 363 void visit(const ReferenceInfo &info, void **slot, Subzone *subzone, usword_t q) { 364 visit(info, slot, subzone->quantum_address(q)); 365 } 366 367 void visit(const ReferenceInfo &info, void **slot, Large *large) { 368 visit(info, slot, large->address()); 369 } 370 371 bool darkenExploredNodes() { 372 __block NodeVector blackNodes; 373 std::for_each(_nodesToExplore.begin(), _nodesToExplore.end(), ^(NodeSet::value_type &value) { 374 if (value.second->darken() == Node::Black) blackNodes.push_back(value.second); 375 }); 376 std::for_each(blackNodes.begin(), blackNodes.end(), ^(Node *node) { _nodesToExplore.erase(node->_address); }); 377 return (_nodesToExplore.size() != 0); 378 } 379 380 void scan() { 381 Configuration::PendingStack stack; 382 GDBRootIterator scanner(_zone, *this, stack, _stack_bottom); 383 while (darkenExploredNodes()) { 384 scanner.scan(); 385 _zone->reset_all_marks(); 386 } 387 // NodeSet::value_type is a std::pair<void *, Node *>. 388 std::for_each(_nodes.begin(), _nodes.end(), ^(NodeSet::value_type &value) { value.second->_color = Node::White; }); 389 } 390 391 typedef std::vector<RefVector, AuxAllocator<RefVector> > PathsVector; 392 393 void addPath(PathsVector &paths, Node *rootNode) { 394 paths.resize(paths.size() + 1); 395 RefVector &path = paths.back(); 396 Node *slotNode = rootNode; 397 while (slotNode != _blockNode) { 398 Node *targetNode = slotNode->_target; 399 assert(targetNode != NULL); 400 auto_memory_reference_t &ref = targetNode->_references[slotNode]; 401 path.push_back(ref); 402 slotNode = targetNode; 403 } 404 path.push_back(_blockRef); 405 } 406 407 auto_root_list_t *copy_roots() { 408 auto_root_list_t *result = NULL; 409 410 // use Djikstra's algorithm (breadth-first search) to discover the shortest path to each root. 411 __block PathsVector paths; 412 __block bool considerThreadLocalGarbage = false; 413 414 Node::ref_visitor_t visitor = ^(Node *targetNode, Node *slotNode, auto_memory_reference_t &ref) { 415 assert(slotNode->_target == NULL); 416 slotNode->_target = targetNode; 417 switch (ref.kind) { 418 case auto_memory_block_global: 419 case auto_memory_block_stack: 420 // these are both roots. 421 addPath(paths, slotNode); 422 break; 423 case auto_memory_block_bytes: 424 case auto_memory_block_object: 425 // retained and thread local blocks are roots too. 426 if (slotNode->_is_retained) { 427 addPath(paths, slotNode); 428 } else if (considerThreadLocalGarbage && slotNode->_is_thread_local) { 429 addPath(paths, slotNode); 430 } 431 break; 432 case auto_memory_block_association: 433 // include associations with non-blocks as roots. 434 if (!_zone->is_block(slotNode->_address) || slotNode->_is_retained) { 435 addPath(paths, slotNode); 436 } 437 break; 438 default: 439 break; 440 } 441 }; 442 _blockNode->visitRefs(visitor); 443 444 // <rdar://problem/6426033>: If block is retained, it roots itself. 445 if (_blockRef.retainCount) addPath(paths, _blockNode); 446 447 // <rdar://problem/8026966>: If no roots were found, consider thread local garbage. 448 if (paths.empty()) { 449 // reset all nodes to run another search, collecting paths from thread-local objects. 450 std::for_each(_nodes.begin(), _nodes.end(), ^(NodeSet::value_type &value) { 451 value.second->_color = Node::White; 452 value.second->_target = NULL; 453 }); 454 considerThreadLocalGarbage = true; 455 _blockNode->visitRefs(visitor); 456 } 457 458 size_t count = paths.size(); 459 size_t list_size = sizeof(auto_root_list_t) + count * sizeof(auto_memory_reference_list_t); 460 for (usword_t i = 0; i < count; i++) list_size += paths[i].size() * sizeof(auto_memory_reference_t); 461 result = (auto_root_list_t *)aux_malloc(list_size); 462 result->count = count; 463 auto_memory_reference_list_t *list = result->roots; 464 for (usword_t i = 0; i < count; i++) { 465 const RefVector &refs = paths[i]; 466 list->count = refs.size(); 467 std::copy(refs.begin(), refs.end(), list->references); 468 list = (auto_memory_reference_list_t *)displace(list, sizeof(auto_root_list_t) + list->count * sizeof(auto_memory_reference_t)); 469 } 470 return result; 471 } 472 }; 473}; 474 475using namespace Auto; 476 477// <rdar://problem/6614079> - To avoid deadlocks with malloc stack logging, this class inhibits the logger when called 478// from the debugger. 479 480struct MallocLoggerInhibitor { 481 malloc_logger_t *_old_logger; 482 static void inhibited_logger(uint32_t, uintptr_t, uintptr_t, uintptr_t, uintptr_t, uint32_t) {} 483 MallocLoggerInhibitor() : _old_logger(malloc_logger) { if (_old_logger) malloc_logger = &inhibited_logger; } 484 ~MallocLoggerInhibitor() { if (_old_logger) malloc_logger = _old_logger; } 485}; 486 487auto_memory_reference_list_t *auto_gdb_enumerate_references(auto_zone_t *zone, void *address, void *stack_base) { 488 auto_memory_reference_list_t *result = NULL; 489 Zone *azone = (Zone *)zone; 490 if (azone && azone->block_collector()) { 491 MallocLoggerInhibitor inhibitor; 492 GDBReferenceRecorder recorder(azone, address, stack_base); 493 recorder.scan(); 494 azone->reset_all_marks(); 495 result = recorder.copy_refs(); 496 azone->unblock_collector(); 497 } 498 return result; 499} 500 501auto_root_list_t *auto_gdb_enumerate_roots(auto_zone_t *zone, void *address, void *stack_base) { 502 auto_root_list_t *result = NULL; 503 Zone *azone = (Zone *)zone; 504 if (azone && azone->block_collector()) { 505 MallocLoggerInhibitor inhibitor; 506 GDBRootFinder finder(azone, address, stack_base); 507 finder.scan(); 508 result = finder.copy_roots(); 509 azone->unblock_collector(); 510 } 511 return result; 512} 513 514extern "C" bool gdb_is_local(void *address) { 515 Zone *azone = (Zone *)auto_zone_from_pointer(address); 516 if (azone && azone->in_subzone_memory(address)) { 517 Subzone *subzone = Subzone::subzone(address); 518 return subzone->is_live_thread_local(subzone->quantum_index(address)); 519 } 520 return false; 521} 522 523#if DEBUG 524 525extern "C" void gdb_refs(void *address) { 526 auto_zone_t *zone = auto_zone_from_pointer(address); 527 if (zone) { 528 auto_memory_reference_list_t *refs = auto_gdb_enumerate_references(zone, address, (void *)auto_get_sp()); 529 if (refs) aux_free(refs); 530 } 531} 532 533extern "C" void gdb_roots(void *address) { 534 auto_zone_t *zone = auto_zone_from_pointer(address); 535 if (zone) { 536 auto_root_list_t *roots = auto_gdb_enumerate_roots(zone, address, (void *)auto_get_sp()); 537 if (roots) aux_free(roots); 538 } 539} 540 541extern "C" bool gdb_is_root(auto_zone_t *zone, void *address) { 542 Zone *azone = (Zone *)zone; 543 return azone->is_root(address); 544} 545 546// Prototype of Template-Based Heap Scanner. 547// This is basically sample code showing how the template-based scanner works. 548 549struct RetainedBlocksVisitor { 550 Zone *_zone; 551 552 struct Configuration; 553 typedef ReferenceIterator<Configuration> Iterator; 554 struct Configuration { 555 typedef RetainedBlocksVisitor ReferenceVisitor; 556 typedef GDBPendingStack<Iterator> PendingStack; 557 typedef FullScanningStrategy<Iterator> ScanningStrategy; 558 }; 559 560 RetainedBlocksVisitor(Zone *zone) : _zone(zone) {} 561 562 void visit(const ReferenceInfo &info, void **ref, Subzone *subzone, usword_t q) { 563 if (subzone->has_refcount(q)) { 564 void *block = subzone->quantum_address(q); 565 SubzoneBlockRef blockRef(subzone, q); 566 printf("small/medium block %p (sz = %ld, rc = %d)\n", block, subzone->size(q), (int)blockRef.refcount()); 567 } 568 } 569 570 void visit(const ReferenceInfo &info, void **ref, Large *large) { 571 if (large->refcount()) { 572 printf("large block %p (sz = %ld, rc = %lu)\n", large->address(), large->size(), large->refcount()); 573 } else if (info.kind() == kAssociativeReference) { 574 printf("large block %p associatively referenced\n", large->address()); 575 } 576 } 577}; 578 579extern "C" void gdb_print_retained_blocks(auto_zone_t *zone) { 580 Zone *azone = (Zone *)zone; 581 if (azone->block_collector()) { 582 RetainedBlocksVisitor visitor(azone); 583 RetainedBlocksVisitor::Configuration::PendingStack pending_stack; 584 RetainedBlocksVisitor::Iterator scanner(azone, visitor, pending_stack, (void *)auto_get_sp()); 585 scanner.scan(); 586 azone->reset_all_marks(); 587 azone->unblock_collector(); 588 } 589} 590 591struct NewBlocksVisitor { 592 struct Configuration; 593 typedef ReferenceIterator<Configuration> Iterator; 594 struct Configuration { 595 typedef NewBlocksVisitor ReferenceVisitor; 596 typedef GDBPendingStack<Iterator> PendingStack; 597 typedef GenerationalScanningStrategy<Iterator> ScanningStrategy; 598 }; 599 size_t _small_count, _large_count; 600 601 NewBlocksVisitor() : _small_count(0), _large_count(0) {} 602 603 void visit(const ReferenceInfo &info, void **ref, Subzone *subzone, usword_t q) { 604 if (subzone->is_new(q)) { 605 ++_small_count; 606 // printf("small/medium block %p (sz = %lu, age = %lu)\n", subzone->quantum_address(q), subzone->size(q), subzone->age(q)); 607 } 608 } 609 610 void visit(const ReferenceInfo &info, void **ref, Large *large) { 611 if (large->is_new()) { 612 ++_large_count; 613 // printf("large block %p (sz = %lu, age = %lu)\n", large->address(), large->size(), large->age()); 614 } 615 } 616}; 617 618extern "C" void gdb_print_new_blocks(auto_zone_t *zone) { 619 Zone *azone = (Zone *)zone; 620 if (azone->block_collector()) { 621 NewBlocksVisitor visitor; 622 NewBlocksVisitor::Configuration::PendingStack pending_stack; 623 NewBlocksVisitor::Iterator scanner(azone, visitor, pending_stack, (void *)auto_get_sp()); 624 scanner.scan(); 625 printf("new blocks: %lu small/medium, %ld large\n", visitor._small_count, visitor._large_count); 626 627 azone->reset_all_marks(); 628 azone->unblock_collector(); 629 } 630} 631 632extern "C" void gdb_print_large_blocks(auto_zone_t *zone) { 633 Zone *azone = (Zone *)zone; 634 SpinLock lock(azone->large_lock()); 635 if (azone->large_list()) { 636 printf("global large blocks:\n"); 637 for (Large *large = azone->large_list(); large != NULL; large = large->next()) { 638 printf("large block %p: size = %ld, rc = %lu\n", large->address(), large->size(), large->refcount()); 639 } 640 } 641} 642 643#endif 644