Searched refs:Large (Results 1 - 25 of 34) sorted by relevance

12

/macosx-10.10.1/bmalloc-7600.1.17/bmalloc/
H A DObjectType.cpp41 return Large;
H A DObjectType.h34 enum ObjectType { Small, Medium, Large, XLarge }; enumerator in enum:bmalloc::ObjectType
H A Dbmalloc.h66 case Large: {
/macosx-10.10.1/libauto-186/
H A DLarge.h22 Large Block Support
43 //----- Large -----//
49 class Large { class in namespace:Auto
56 Large *_prev; // previous large block or NULL if head of list
57 Large *_next; // next large block or NULL if tail of list
70 Large(Zone *zone, usword_t vm_size, usword_t size, usword_t layout, usword_t refcount, usword_t age, const WriteBarrier &wb);
101 // Returns the Large of the specified large quantum.
103 static inline Large *quantum_large(const usword_t q, void*arena) { return (Large *)((usword_t)arena+(q << allocate_quantum_large_log2)); }
111 static inline usword_t side_data_size() { return align2(sizeof(Large), allocate_quantum_small_log
[all...]
H A DLarge.cpp21 Large.cpp
22 Large Block Support
26 #include "Large.h"
33 //----- Large -----//
41 At the quantum boundary we establish the "Large" data structure.
50 Large::Large(Zone *zone, usword_t vm_size, usword_t size, usword_t layout, usword_t refcount, usword_t age, const WriteBarrier &wb) function in class:Auto::Large
61 Large *Large::allocate(Zone *zone, const usword_t size, usword_t layout, bool refcount_is_one) {
92 // construct the WriteBarrier here, to simplify the Large constructo
[all...]
H A DInUseEnumerator.cpp29 #include "Large.h"
102 for (Large *large = zone->large_list(); large; large = large->next()) {
103 record(large, sizeof(Large), MALLOC_ADMIN_REGION_RANGE_TYPE);
104 Large *largeReader = (Large *)read(large, sizeof(Large));
109 record(displace(large, sizeof(Large) + largeReader->size()), largeReader->vm_size() - (sizeof(Large) + largeReader->size()), MALLOC_ADMIN_REGION_RANGE_TYPE);
H A DZoneCollectionChecking.cpp57 Large *large = _large_list;
77 Large *large = block_start_large(pointer);
97 Large *l = Large::large(block);
122 inline bool visit(Zone *zone, Large *large) {
188 inline bool visit(Zone *zone, Large *large) {
H A DBlockIterator.h33 #include "Large.h"
64 for (Large *large = zone->large_list(); large != NULL; large = large->next()) {
78 // void visit(Zone *, Large *) - visit a Large
87 Large *_current_large; // the next large to visit
95 // without requiring logic that picks the Large/Subzone to be customized by the Visitor.
97 // Visit a Large. If the visitor wants to process them all at once, visit them all.
98 void visit_large(Large *large_to_visit) {
128 Large *large_to_visit = NULL;
215 for (Large *larg
[all...]
H A DZoneCollectors.cpp40 Large *_scan_large;
66 void push(Large *large) {
72 static bool mark(Large *large) { return large->test_and_set_mark(); }
74 bool is_marked(Large *large) { return large->is_marked(); }
76 void visit(Zone *zone, Large *large) {
151 void visit(const ReferenceInfo &info, void **slot, Large *large) {}
174 void visit(const ReferenceInfo &info, void **slot, Large *large) {}
H A DReferenceIterator.h33 #include "Large.h"
128 // void visit(const ReferenceInfo &info, void **slot, Large *large);
133 // The block being referenced is either a small/medium block, represented by the subzone / quantum pair, or a Large block.
144 // void push(Large *large);
151 // scanner.scan(Subzone *subzone, usword_t) or scanner.scan(Large *large).
192 void push(Large *large) { _pending_stack.push(large); }
195 bool mark(Large *large) { return _pending_stack.mark(large); }
198 bool is_marked(Large *large) { return _pending_stack.is_marked(large); }
201 static bool should_scan(Large *large) { return Configuration::ScanningStrategy::should_scan(large); }
204 static bool should_mark(Large *larg
[all...]
H A DWriteBarrierIterator.h22 Write Barrier Iteration of Subzones and Large blocks
33 #include "Large.h"
65 for (Large *large = zone->large_list(); large != NULL; large = large->next()) {
H A DBlockRef.h27 #include "Large.h"
76 Large * const _large;
79 LargeBlockRef(Large *large) : _large(large) {}
81 inline Large * large() const { return _large; }
99 inline void get_description(char *buf, usword_t bufsz) const { snprintf(buf, bufsz, "Large=%p", _large); }
138 LargeBlockRef block(Large::large((void *)ptr));
171 Large *large = zone->block_start_large((void *)ptr);
H A DZone.cpp32 #include "Large.h"
268 for (Large *large = _large_list; large; ) {
269 Large *next = large->next();
411 Large *large = Large::allocate(this, size, layout, refcount_is_one);
425 _in_large.set_bit(Large::quantum_index(address));
465 void Zone::deallocate_large(Large *large, void *block) {
470 void Zone::deallocate_large_internal(Large *large, void *block) {
475 _in_large.clear_bit(Large::quantum_index(block));
709 Large *larg
[all...]
H A DZoneCompaction.cpp133 void push(Large *large) {
138 static bool mark(Large *large) { return large->test_and_set_mark(); }
141 static bool is_marked(Large *large) { return large->is_marked(); }
153 Large *large = reinterpret_cast<Large*>(_large_stack.back());
170 void push(Large *large) {}
173 static bool mark(Large *large) { return false; }
175 static bool is_marked(Large *large) { return false; }
261 }, ^(Large *slot_large) {
285 void visit(const ReferenceInfo &info, void **slot, Large *larg
[all...]
H A DZoneDump.cpp32 #include "Large.h"
65 inline bool visit(Zone *zone, Large *large) {
157 inline bool visit(Zone *zone, Large *large) {
H A DZone.h37 #include "Large.h"
127 pthread_mutex_t _mark_bits_mutex; // protects the per-Region and Large block mark bits.
134 Large *_large_list; // doubly linked list of large allocations
181 // thread safe Large deallocation routines.
183 void deallocate_large(Large *large, void *block);
184 void deallocate_large_internal(Large *large, void *block);
208 inline Large *find_large(void *block) { return Large::large(block); }
338 inline Large *large_list() { return _large_list; }
502 // since vm_allocate() returns addresses in arbitrary locations, can only really tell by calling Large
[all...]
H A Dauto_gdb_interface.cpp47 void push(Large *large) {
52 static bool mark(Large *large) { return large->test_and_set_mark(); }
55 static bool is_marked(Large *large) { return large->is_marked(); }
67 Large *large = reinterpret_cast<Large*>(_large_stack.back());
152 void visit(const ReferenceInfo &info, void **slot, Large *large) {
367 void visit(const ReferenceInfo &info, void **slot, Large *large) {
570 void visit(const ReferenceInfo &info, void **ref, Large *large) {
610 void visit(const ReferenceInfo &info, void **ref, Large *large) {
637 for (Large *larg
[all...]
/macosx-10.10.1/llvmCore-3425.0.34/include/llvm/Support/
H A DCodeGen.h27 enum Model { Default, JITDefault, Small, Kernel, Medium, Large }; enumerator in enum:llvm::CodeModel::Model
/macosx-10.10.1/llvmCore-3425.0.34/lib/Target/
H A DTargetMachineC.cpp95 CM = CodeModel::Large;
/macosx-10.10.1/llvmCore-3425.0.34/lib/Target/X86/
H A DX86Subtarget.cpp69 // Large model never uses stubs.
70 if (TM.getCodeModel() == CodeModel::Large)
/macosx-10.10.1/tcl-105/tcl_ext/mk4tcl/metakit/tests/
H A Dtresize.cpp147 B(r02, Large inserts and removes, 0)W(r02a);
/macosx-10.10.1/vim-55/src/
H A DMake_w16.mak171 -ml ; Large memory model
/macosx-10.10.1/llvmCore-3425.0.34/tools/llc/
H A Dllc.cpp107 clEnumValN(CodeModel::Large, "large",
108 "Large code model"),
/macosx-10.10.1/llvmCore-3425.0.34/tools/llvm-mc/
H A Dllvm-mc.cpp142 clEnumValN(CodeModel::Large, "large",
143 "Large code model"),
/macosx-10.10.1/vim-55/runtime/indent/
H A Dphp.vim364 echom 'Large indenting detected, speed optimizations engaged (v1.33)'
371 echom 'Large indenting deactivated'

Completed in 246 milliseconds

12