1/* Vector API for GNU compiler. 2 Copyright (C) 2004, 2005, 2007, 2008, 2009, 2010 3 Free Software Foundation, Inc. 4 Contributed by Nathan Sidwell <nathan@codesourcery.com> 5 6This file is part of GCC. 7 8GCC is free software; you can redistribute it and/or modify it under 9the terms of the GNU General Public License as published by the Free 10Software Foundation; either version 3, or (at your option) any later 11version. 12 13GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14WARRANTY; without even the implied warranty of MERCHANTABILITY or 15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16for more details. 17 18You should have received a copy of the GNU General Public License 19along with GCC; see the file COPYING3. If not see 20<http://www.gnu.org/licenses/>. */ 21 22#ifndef GCC_VEC_H 23#define GCC_VEC_H 24 25#include "statistics.h" /* For MEM_STAT_DECL. */ 26 27/* The macros here implement a set of templated vector types and 28 associated interfaces. These templates are implemented with 29 macros, as we're not in C++ land. The interface functions are 30 typesafe and use static inline functions, sometimes backed by 31 out-of-line generic functions. The vectors are designed to 32 interoperate with the GTY machinery. 33 34 Because of the different behavior of structure objects, scalar 35 objects and of pointers, there are three flavors, one for each of 36 these variants. Both the structure object and pointer variants 37 pass pointers to objects around -- in the former case the pointers 38 are stored into the vector and in the latter case the pointers are 39 dereferenced and the objects copied into the vector. The scalar 40 object variant is suitable for int-like objects, and the vector 41 elements are returned by value. 42 43 There are both 'index' and 'iterate' accessors. The iterator 44 returns a boolean iteration condition and updates the iteration 45 variable passed by reference. Because the iterator will be 46 inlined, the address-of can be optimized away. 47 48 The vectors are implemented using the trailing array idiom, thus 49 they are not resizeable without changing the address of the vector 50 object itself. This means you cannot have variables or fields of 51 vector type -- always use a pointer to a vector. The one exception 52 is the final field of a structure, which could be a vector type. 53 You will have to use the embedded_size & embedded_init calls to 54 create such objects, and they will probably not be resizeable (so 55 don't use the 'safe' allocation variants). The trailing array 56 idiom is used (rather than a pointer to an array of data), because, 57 if we allow NULL to also represent an empty vector, empty vectors 58 occupy minimal space in the structure containing them. 59 60 Each operation that increases the number of active elements is 61 available in 'quick' and 'safe' variants. The former presumes that 62 there is sufficient allocated space for the operation to succeed 63 (it dies if there is not). The latter will reallocate the 64 vector, if needed. Reallocation causes an exponential increase in 65 vector size. If you know you will be adding N elements, it would 66 be more efficient to use the reserve operation before adding the 67 elements with the 'quick' operation. This will ensure there are at 68 least as many elements as you ask for, it will exponentially 69 increase if there are too few spare slots. If you want reserve a 70 specific number of slots, but do not want the exponential increase 71 (for instance, you know this is the last allocation), use the 72 reserve_exact operation. You can also create a vector of a 73 specific size from the get go. 74 75 You should prefer the push and pop operations, as they append and 76 remove from the end of the vector. If you need to remove several 77 items in one go, use the truncate operation. The insert and remove 78 operations allow you to change elements in the middle of the 79 vector. There are two remove operations, one which preserves the 80 element ordering 'ordered_remove', and one which does not 81 'unordered_remove'. The latter function copies the end element 82 into the removed slot, rather than invoke a memmove operation. The 83 'lower_bound' function will determine where to place an item in the 84 array using insert that will maintain sorted order. 85 86 When a vector type is defined, first a non-memory managed version 87 is created. You can then define either or both garbage collected 88 and heap allocated versions. The allocation mechanism is specified 89 when the type is defined, and is therefore part of the type. If 90 you need both gc'd and heap allocated versions, you still must have 91 *exactly* one definition of the common non-memory managed base vector. 92 93 If you need to directly manipulate a vector, then the 'address' 94 accessor will return the address of the start of the vector. Also 95 the 'space' predicate will tell you whether there is spare capacity 96 in the vector. You will not normally need to use these two functions. 97 98 Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro, to 99 get the non-memory allocation version, and then a 100 DEF_VEC_ALLOC_{O,P,I}(TYPEDEF,ALLOC) macro to get memory managed 101 vectors. Variables of vector type are declared using a 102 VEC(TYPEDEF,ALLOC) macro. The ALLOC argument specifies the 103 allocation strategy, and can be either 'gc' or 'heap' for garbage 104 collected and heap allocated respectively. It can be 'none' to get 105 a vector that must be explicitly allocated (for instance as a 106 trailing array of another structure). The characters O, P and I 107 indicate whether TYPEDEF is a pointer (P), object (O) or integral 108 (I) type. Be careful to pick the correct one, as you'll get an 109 awkward and inefficient API if you use the wrong one. There is a 110 check, which results in a compile-time warning, for the P and I 111 versions, but there is no check for the O versions, as that is not 112 possible in plain C. Due to the way GTY works, you must annotate 113 any structures you wish to insert or reference from a vector with a 114 GTY(()) tag. You need to do this even if you never declare the GC 115 allocated variants. 116 117 An example of their use would be, 118 119 DEF_VEC_P(tree); // non-managed tree vector. 120 DEF_VEC_ALLOC_P(tree,gc); // gc'd vector of tree pointers. This must 121 // appear at file scope. 122 123 struct my_struct { 124 VEC(tree,gc) *v; // A (pointer to) a vector of tree pointers. 125 }; 126 127 struct my_struct *s; 128 129 if (VEC_length(tree,s->v)) { we have some contents } 130 VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end 131 for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++) 132 { do something with elt } 133 134*/ 135 136/* Macros to invoke API calls. A single macro works for both pointer 137 and object vectors, but the argument and return types might well be 138 different. In each macro, T is the typedef of the vector elements, 139 and A is the allocation strategy. The allocation strategy is only 140 present when it is required. Some of these macros pass the vector, 141 V, by reference (by taking its address), this is noted in the 142 descriptions. */ 143 144/* Length of vector 145 unsigned VEC_T_length(const VEC(T) *v); 146 147 Return the number of active elements in V. V can be NULL, in which 148 case zero is returned. */ 149 150#define VEC_length(T,V) (VEC_OP(T,base,length)(VEC_BASE(V))) 151 152 153/* Check if vector is empty 154 int VEC_T_empty(const VEC(T) *v); 155 156 Return nonzero if V is an empty vector (or V is NULL), zero otherwise. */ 157 158#define VEC_empty(T,V) (VEC_length (T,V) == 0) 159 160 161/* Get the final element of the vector. 162 T VEC_T_last(VEC(T) *v); // Integer 163 T VEC_T_last(VEC(T) *v); // Pointer 164 T *VEC_T_last(VEC(T) *v); // Object 165 166 Return the final element. V must not be empty. */ 167 168#define VEC_last(T,V) (VEC_OP(T,base,last)(VEC_BASE(V) VEC_CHECK_INFO)) 169 170/* Index into vector 171 T VEC_T_index(VEC(T) *v, unsigned ix); // Integer 172 T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer 173 T *VEC_T_index(VEC(T) *v, unsigned ix); // Object 174 175 Return the IX'th element. If IX must be in the domain of V. */ 176 177#define VEC_index(T,V,I) (VEC_OP(T,base,index)(VEC_BASE(V),I VEC_CHECK_INFO)) 178 179/* Iterate over vector 180 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer 181 int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer 182 int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object 183 184 Return iteration condition and update PTR to point to the IX'th 185 element. At the end of iteration, sets PTR to NULL. Use this to 186 iterate over the elements of a vector as follows, 187 188 for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++) 189 continue; */ 190 191#define VEC_iterate(T,V,I,P) (VEC_OP(T,base,iterate)(VEC_BASE(V),I,&(P))) 192 193/* Convenience macro for forward iteration. */ 194 195#define FOR_EACH_VEC_ELT(T, V, I, P) \ 196 for (I = 0; VEC_iterate (T, (V), (I), (P)); ++(I)) 197 198/* Convenience macro for reverse iteration. */ 199 200#define FOR_EACH_VEC_ELT_REVERSE(T,V,I,P) \ 201 for (I = VEC_length (T, (V)) - 1; \ 202 VEC_iterate (T, (V), (I), (P)); \ 203 (I)--) 204 205/* Allocate new vector. 206 VEC(T,A) *VEC_T_A_alloc(int reserve); 207 208 Allocate a new vector with space for RESERVE objects. If RESERVE 209 is zero, NO vector is created. */ 210 211#define VEC_alloc(T,A,N) (VEC_OP(T,A,alloc)(N MEM_STAT_INFO)) 212 213/* Free a vector. 214 void VEC_T_A_free(VEC(T,A) *&); 215 216 Free a vector and set it to NULL. */ 217 218#define VEC_free(T,A,V) (VEC_OP(T,A,free)(&V)) 219 220/* Use these to determine the required size and initialization of a 221 vector embedded within another structure (as the final member). 222 223 size_t VEC_T_embedded_size(int reserve); 224 void VEC_T_embedded_init(VEC(T) *v, int reserve); 225 226 These allow the caller to perform the memory allocation. */ 227 228#define VEC_embedded_size(T,N) (VEC_OP(T,base,embedded_size)(N)) 229#define VEC_embedded_init(T,O,N) (VEC_OP(T,base,embedded_init)(VEC_BASE(O),N)) 230 231/* Copy a vector. 232 VEC(T,A) *VEC_T_A_copy(VEC(T) *); 233 234 Copy the live elements of a vector into a new vector. The new and 235 old vectors need not be allocated by the same mechanism. */ 236 237#define VEC_copy(T,A,V) (VEC_OP(T,A,copy)(VEC_BASE(V) MEM_STAT_INFO)) 238 239/* Determine if a vector has additional capacity. 240 241 int VEC_T_space (VEC(T) *v,int reserve) 242 243 If V has space for RESERVE additional entries, return nonzero. You 244 usually only need to use this if you are doing your own vector 245 reallocation, for instance on an embedded vector. This returns 246 nonzero in exactly the same circumstances that VEC_T_reserve 247 will. */ 248 249#define VEC_space(T,V,R) \ 250 (VEC_OP(T,base,space)(VEC_BASE(V),R VEC_CHECK_INFO)) 251 252/* Reserve space. 253 int VEC_T_A_reserve(VEC(T,A) *&v, int reserve); 254 255 Ensure that V has at least RESERVE slots available. This will 256 create additional headroom. Note this can cause V to be 257 reallocated. Returns nonzero iff reallocation actually 258 occurred. */ 259 260#define VEC_reserve(T,A,V,R) \ 261 (VEC_OP(T,A,reserve)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO)) 262 263/* Reserve space exactly. 264 int VEC_T_A_reserve_exact(VEC(T,A) *&v, int reserve); 265 266 Ensure that V has at least RESERVE slots available. This will not 267 create additional headroom. Note this can cause V to be 268 reallocated. Returns nonzero iff reallocation actually 269 occurred. */ 270 271#define VEC_reserve_exact(T,A,V,R) \ 272 (VEC_OP(T,A,reserve_exact)(&(V),R VEC_CHECK_INFO MEM_STAT_INFO)) 273 274/* Copy elements with no reallocation 275 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Integer 276 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Pointer 277 void VEC_T_splice (VEC(T) *dst, VEC(T) *src); // Object 278 279 Copy the elements in SRC to the end of DST as if by memcpy. DST and 280 SRC need not be allocated with the same mechanism, although they most 281 often will be. DST is assumed to have sufficient headroom 282 available. */ 283 284#define VEC_splice(T,DST,SRC) \ 285 (VEC_OP(T,base,splice)(VEC_BASE(DST), VEC_BASE(SRC) VEC_CHECK_INFO)) 286 287/* Copy elements with reallocation 288 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Integer 289 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Pointer 290 void VEC_T_safe_splice (VEC(T,A) *&dst, VEC(T) *src); // Object 291 292 Copy the elements in SRC to the end of DST as if by memcpy. DST and 293 SRC need not be allocated with the same mechanism, although they most 294 often will be. DST need not have sufficient headroom and will be 295 reallocated if needed. */ 296 297#define VEC_safe_splice(T,A,DST,SRC) \ 298 (VEC_OP(T,A,safe_splice)(&(DST), VEC_BASE(SRC) VEC_CHECK_INFO MEM_STAT_INFO)) 299 300/* Push object with no reallocation 301 T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer 302 T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer 303 T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object 304 305 Push a new element onto the end, returns a pointer to the slot 306 filled in. For object vectors, the new value can be NULL, in which 307 case NO initialization is performed. There must 308 be sufficient space in the vector. */ 309 310#define VEC_quick_push(T,V,O) \ 311 (VEC_OP(T,base,quick_push)(VEC_BASE(V),O VEC_CHECK_INFO)) 312 313/* Push object with reallocation 314 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Integer 315 T *VEC_T_A_safe_push (VEC(T,A) *&v, T obj); // Pointer 316 T *VEC_T_A_safe_push (VEC(T,A) *&v, T *obj); // Object 317 318 Push a new element onto the end, returns a pointer to the slot 319 filled in. For object vectors, the new value can be NULL, in which 320 case NO initialization is performed. Reallocates V, if needed. */ 321 322#define VEC_safe_push(T,A,V,O) \ 323 (VEC_OP(T,A,safe_push)(&(V),O VEC_CHECK_INFO MEM_STAT_INFO)) 324 325/* Pop element off end 326 T VEC_T_pop (VEC(T) *v); // Integer 327 T VEC_T_pop (VEC(T) *v); // Pointer 328 void VEC_T_pop (VEC(T) *v); // Object 329 330 Pop the last element off the end. Returns the element popped, for 331 pointer vectors. */ 332 333#define VEC_pop(T,V) (VEC_OP(T,base,pop)(VEC_BASE(V) VEC_CHECK_INFO)) 334 335/* Truncate to specific length 336 void VEC_T_truncate (VEC(T) *v, unsigned len); 337 338 Set the length as specified. The new length must be less than or 339 equal to the current length. This is an O(1) operation. */ 340 341#define VEC_truncate(T,V,I) \ 342 (VEC_OP(T,base,truncate)(VEC_BASE(V),I VEC_CHECK_INFO)) 343 344/* Grow to a specific length. 345 void VEC_T_A_safe_grow (VEC(T,A) *&v, int len); 346 347 Grow the vector to a specific length. The LEN must be as 348 long or longer than the current length. The new elements are 349 uninitialized. */ 350 351#define VEC_safe_grow(T,A,V,I) \ 352 (VEC_OP(T,A,safe_grow)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO)) 353 354/* Grow to a specific length. 355 void VEC_T_A_safe_grow_cleared (VEC(T,A) *&v, int len); 356 357 Grow the vector to a specific length. The LEN must be as 358 long or longer than the current length. The new elements are 359 initialized to zero. */ 360 361#define VEC_safe_grow_cleared(T,A,V,I) \ 362 (VEC_OP(T,A,safe_grow_cleared)(&(V),I VEC_CHECK_INFO MEM_STAT_INFO)) 363 364/* Replace element 365 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer 366 T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer 367 T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val); // Object 368 369 Replace the IXth element of V with a new value, VAL. For pointer 370 vectors returns the original value. For object vectors returns a 371 pointer to the new value. For object vectors the new value can be 372 NULL, in which case no overwriting of the slot is actually 373 performed. */ 374 375#define VEC_replace(T,V,I,O) \ 376 (VEC_OP(T,base,replace)(VEC_BASE(V),I,O VEC_CHECK_INFO)) 377 378/* Insert object with no reallocation 379 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer 380 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer 381 T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object 382 383 Insert an element, VAL, at the IXth position of V. Return a pointer 384 to the slot created. For vectors of object, the new value can be 385 NULL, in which case no initialization of the inserted slot takes 386 place. There must be sufficient space. */ 387 388#define VEC_quick_insert(T,V,I,O) \ 389 (VEC_OP(T,base,quick_insert)(VEC_BASE(V),I,O VEC_CHECK_INFO)) 390 391/* Insert object with reallocation 392 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer 393 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer 394 T *VEC_T_A_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object 395 396 Insert an element, VAL, at the IXth position of V. Return a pointer 397 to the slot created. For vectors of object, the new value can be 398 NULL, in which case no initialization of the inserted slot takes 399 place. Reallocate V, if necessary. */ 400 401#define VEC_safe_insert(T,A,V,I,O) \ 402 (VEC_OP(T,A,safe_insert)(&(V),I,O VEC_CHECK_INFO MEM_STAT_INFO)) 403 404/* Remove element retaining order 405 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer 406 T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer 407 void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object 408 409 Remove an element from the IXth position of V. Ordering of 410 remaining elements is preserved. For pointer vectors returns the 411 removed object. This is an O(N) operation due to a memmove. */ 412 413#define VEC_ordered_remove(T,V,I) \ 414 (VEC_OP(T,base,ordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO)) 415 416/* Remove element destroying order 417 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer 418 T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer 419 void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object 420 421 Remove an element from the IXth position of V. Ordering of 422 remaining elements is destroyed. For pointer vectors returns the 423 removed object. This is an O(1) operation. */ 424 425#define VEC_unordered_remove(T,V,I) \ 426 (VEC_OP(T,base,unordered_remove)(VEC_BASE(V),I VEC_CHECK_INFO)) 427 428/* Remove a block of elements 429 void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len); 430 431 Remove LEN elements starting at the IXth. Ordering is retained. 432 This is an O(N) operation due to memmove. */ 433 434#define VEC_block_remove(T,V,I,L) \ 435 (VEC_OP(T,base,block_remove)(VEC_BASE(V),I,L VEC_CHECK_INFO)) 436 437/* Get the address of the array of elements 438 T *VEC_T_address (VEC(T) v) 439 440 If you need to directly manipulate the array (for instance, you 441 want to feed it to qsort), use this accessor. */ 442 443#define VEC_address(T,V) (VEC_OP(T,base,address)(VEC_BASE(V))) 444 445/* Conveniently sort the contents of the vector with qsort. 446 void VEC_qsort (VEC(T) *v, int (*cmp_func)(const void *, const void *)) */ 447 448#define VEC_qsort(T,V,CMP) qsort(VEC_address (T,V), VEC_length(T,V), \ 449 sizeof (T), CMP) 450 451/* Find the first index in the vector not less than the object. 452 unsigned VEC_T_lower_bound (VEC(T) *v, const T val, 453 bool (*lessthan) (const T, const T)); // Integer 454 unsigned VEC_T_lower_bound (VEC(T) *v, const T val, 455 bool (*lessthan) (const T, const T)); // Pointer 456 unsigned VEC_T_lower_bound (VEC(T) *v, const T *val, 457 bool (*lessthan) (const T*, const T*)); // Object 458 459 Find the first position in which VAL could be inserted without 460 changing the ordering of V. LESSTHAN is a function that returns 461 true if the first argument is strictly less than the second. */ 462 463#define VEC_lower_bound(T,V,O,LT) \ 464 (VEC_OP(T,base,lower_bound)(VEC_BASE(V),O,LT VEC_CHECK_INFO)) 465 466/* Reallocate an array of elements with prefix. */ 467extern void *vec_gc_p_reserve (void *, int MEM_STAT_DECL); 468extern void *vec_gc_p_reserve_exact (void *, int MEM_STAT_DECL); 469extern void *vec_gc_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL); 470extern void *vec_gc_o_reserve_exact (void *, int, size_t, size_t 471 MEM_STAT_DECL); 472extern void ggc_free (void *); 473#define vec_gc_free(V) ggc_free (V) 474extern void *vec_heap_p_reserve (void *, int MEM_STAT_DECL); 475extern void *vec_heap_p_reserve_exact (void *, int MEM_STAT_DECL); 476extern void *vec_heap_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL); 477extern void *vec_heap_o_reserve_exact (void *, int, size_t, size_t 478 MEM_STAT_DECL); 479extern void dump_vec_loc_statistics (void); 480#ifdef GATHER_STATISTICS 481void vec_heap_free (void *); 482#else 483/* Avoid problems with frontends that #define free(x). */ 484#define vec_heap_free(V) (free) (V) 485#endif 486 487#if ENABLE_CHECKING 488#define VEC_CHECK_INFO ,__FILE__,__LINE__,__FUNCTION__ 489#define VEC_CHECK_DECL ,const char *file_,unsigned line_,const char *function_ 490#define VEC_CHECK_PASS ,file_,line_,function_ 491 492#define VEC_ASSERT(EXPR,OP,T,A) \ 493 (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0)) 494 495extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL) 496 ATTRIBUTE_NORETURN; 497#define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS) 498#else 499#define VEC_CHECK_INFO 500#define VEC_CHECK_DECL 501#define VEC_CHECK_PASS 502#define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR) 503#endif 504 505/* Note: gengtype has hardwired knowledge of the expansions of the 506 VEC, DEF_VEC_*, and DEF_VEC_ALLOC_* macros. If you change the 507 expansions of these macros you may need to change gengtype too. */ 508 509#define VEC(T,A) VEC_##T##_##A 510#define VEC_OP(T,A,OP) VEC_##T##_##A##_##OP 511 512/* Base of vector type, not user visible. */ 513#define VEC_T(T,B) \ 514typedef struct VEC(T,B) \ 515{ \ 516 unsigned num; \ 517 unsigned alloc; \ 518 T vec[1]; \ 519} VEC(T,B) 520 521#define VEC_T_GTY(T,B) \ 522typedef struct GTY(()) VEC(T,B) \ 523{ \ 524 unsigned num; \ 525 unsigned alloc; \ 526 T GTY ((length ("%h.num"))) vec[1]; \ 527} VEC(T,B) 528 529/* Derived vector type, user visible. */ 530#define VEC_TA_GTY(T,B,A,GTY) \ 531typedef struct GTY VEC(T,A) \ 532{ \ 533 VEC(T,B) base; \ 534} VEC(T,A) 535 536#define VEC_TA(T,B,A) \ 537typedef struct VEC(T,A) \ 538{ \ 539 VEC(T,B) base; \ 540} VEC(T,A) 541 542/* Convert to base type. */ 543#define VEC_BASE(P) ((P) ? &(P)->base : 0) 544 545/* Vector of integer-like object. */ 546#define DEF_VEC_I(T) \ 547static inline void VEC_OP (T,must_be,integral_type) (void) \ 548{ \ 549 (void)~(T)0; \ 550} \ 551 \ 552VEC_T(T,base); \ 553VEC_TA(T,base,none); \ 554DEF_VEC_FUNC_P(T) \ 555struct vec_swallow_trailing_semi 556#define DEF_VEC_ALLOC_I(T,A) \ 557VEC_TA(T,base,A); \ 558DEF_VEC_ALLOC_FUNC_I(T,A) \ 559DEF_VEC_NONALLOC_FUNCS_I(T,A) \ 560struct vec_swallow_trailing_semi 561 562/* Vector of pointer to object. */ 563#define DEF_VEC_P(T) \ 564static inline void VEC_OP (T,must_be,pointer_type) (void) \ 565{ \ 566 (void)((T)1 == (void *)1); \ 567} \ 568 \ 569VEC_T_GTY(T,base); \ 570VEC_TA(T,base,none); \ 571DEF_VEC_FUNC_P(T) \ 572struct vec_swallow_trailing_semi 573#define DEF_VEC_ALLOC_P(T,A) \ 574VEC_TA(T,base,A); \ 575DEF_VEC_ALLOC_FUNC_P(T,A) \ 576DEF_VEC_NONALLOC_FUNCS_P(T,A) \ 577struct vec_swallow_trailing_semi 578 579#define DEF_VEC_FUNC_P(T) \ 580static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \ 581{ \ 582 return vec_ ? vec_->num : 0; \ 583} \ 584 \ 585static inline T VEC_OP (T,base,last) \ 586 (const VEC(T,base) *vec_ VEC_CHECK_DECL) \ 587{ \ 588 VEC_ASSERT (vec_ && vec_->num, "last", T, base); \ 589 \ 590 return vec_->vec[vec_->num - 1]; \ 591} \ 592 \ 593static inline T VEC_OP (T,base,index) \ 594 (const VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \ 595{ \ 596 VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base); \ 597 \ 598 return vec_->vec[ix_]; \ 599} \ 600 \ 601static inline int VEC_OP (T,base,iterate) \ 602 (const VEC(T,base) *vec_, unsigned ix_, T *ptr) \ 603{ \ 604 if (vec_ && ix_ < vec_->num) \ 605 { \ 606 *ptr = vec_->vec[ix_]; \ 607 return 1; \ 608 } \ 609 else \ 610 { \ 611 *ptr = (T) 0; \ 612 return 0; \ 613 } \ 614} \ 615 \ 616static inline size_t VEC_OP (T,base,embedded_size) \ 617 (int alloc_) \ 618{ \ 619 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \ 620} \ 621 \ 622static inline void VEC_OP (T,base,embedded_init) \ 623 (VEC(T,base) *vec_, int alloc_) \ 624{ \ 625 vec_->num = 0; \ 626 vec_->alloc = alloc_; \ 627} \ 628 \ 629static inline int VEC_OP (T,base,space) \ 630 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \ 631{ \ 632 VEC_ASSERT (alloc_ >= 0, "space", T, base); \ 633 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \ 634} \ 635 \ 636static inline void VEC_OP(T,base,splice) \ 637 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \ 638{ \ 639 if (src_) \ 640 { \ 641 unsigned len_ = src_->num; \ 642 VEC_ASSERT (dst_->num + len_ <= dst_->alloc, "splice", T, base); \ 643 \ 644 memcpy (&dst_->vec[dst_->num], &src_->vec[0], len_ * sizeof (T)); \ 645 dst_->num += len_; \ 646 } \ 647} \ 648 \ 649static inline T *VEC_OP (T,base,quick_push) \ 650 (VEC(T,base) *vec_, T obj_ VEC_CHECK_DECL) \ 651{ \ 652 T *slot_; \ 653 \ 654 VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base); \ 655 slot_ = &vec_->vec[vec_->num++]; \ 656 *slot_ = obj_; \ 657 \ 658 return slot_; \ 659} \ 660 \ 661static inline T VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \ 662{ \ 663 T obj_; \ 664 \ 665 VEC_ASSERT (vec_->num, "pop", T, base); \ 666 obj_ = vec_->vec[--vec_->num]; \ 667 \ 668 return obj_; \ 669} \ 670 \ 671static inline void VEC_OP (T,base,truncate) \ 672 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \ 673{ \ 674 VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base); \ 675 if (vec_) \ 676 vec_->num = size_; \ 677} \ 678 \ 679static inline T VEC_OP (T,base,replace) \ 680 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \ 681{ \ 682 T old_obj_; \ 683 \ 684 VEC_ASSERT (ix_ < vec_->num, "replace", T, base); \ 685 old_obj_ = vec_->vec[ix_]; \ 686 vec_->vec[ix_] = obj_; \ 687 \ 688 return old_obj_; \ 689} \ 690 \ 691static inline T *VEC_OP (T,base,quick_insert) \ 692 (VEC(T,base) *vec_, unsigned ix_, T obj_ VEC_CHECK_DECL) \ 693{ \ 694 T *slot_; \ 695 \ 696 VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base); \ 697 VEC_ASSERT (ix_ <= vec_->num, "insert", T, base); \ 698 slot_ = &vec_->vec[ix_]; \ 699 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \ 700 *slot_ = obj_; \ 701 \ 702 return slot_; \ 703} \ 704 \ 705static inline T VEC_OP (T,base,ordered_remove) \ 706 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \ 707{ \ 708 T *slot_; \ 709 T obj_; \ 710 \ 711 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \ 712 slot_ = &vec_->vec[ix_]; \ 713 obj_ = *slot_; \ 714 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \ 715 \ 716 return obj_; \ 717} \ 718 \ 719static inline T VEC_OP (T,base,unordered_remove) \ 720 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \ 721{ \ 722 T *slot_; \ 723 T obj_; \ 724 \ 725 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \ 726 slot_ = &vec_->vec[ix_]; \ 727 obj_ = *slot_; \ 728 *slot_ = vec_->vec[--vec_->num]; \ 729 \ 730 return obj_; \ 731} \ 732 \ 733static inline void VEC_OP (T,base,block_remove) \ 734 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \ 735{ \ 736 T *slot_; \ 737 \ 738 VEC_ASSERT (ix_ + len_ <= vec_->num, "block_remove", T, base); \ 739 slot_ = &vec_->vec[ix_]; \ 740 vec_->num -= len_; \ 741 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \ 742} \ 743 \ 744static inline T *VEC_OP (T,base,address) \ 745 (VEC(T,base) *vec_) \ 746{ \ 747 return vec_ ? vec_->vec : 0; \ 748} \ 749 \ 750static inline unsigned VEC_OP (T,base,lower_bound) \ 751 (VEC(T,base) *vec_, const T obj_, \ 752 bool (*lessthan_)(const T, const T) VEC_CHECK_DECL) \ 753{ \ 754 unsigned int len_ = VEC_OP (T,base, length) (vec_); \ 755 unsigned int half_, middle_; \ 756 unsigned int first_ = 0; \ 757 while (len_ > 0) \ 758 { \ 759 T middle_elem_; \ 760 half_ = len_ >> 1; \ 761 middle_ = first_; \ 762 middle_ += half_; \ 763 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \ 764 if (lessthan_ (middle_elem_, obj_)) \ 765 { \ 766 first_ = middle_; \ 767 ++first_; \ 768 len_ = len_ - half_ - 1; \ 769 } \ 770 else \ 771 len_ = half_; \ 772 } \ 773 return first_; \ 774} 775 776#define DEF_VEC_ALLOC_FUNC_P(T,A) \ 777static inline VEC(T,A) *VEC_OP (T,A,alloc) \ 778 (int alloc_ MEM_STAT_DECL) \ 779{ \ 780 return (VEC(T,A) *) vec_##A##_p_reserve_exact (NULL, alloc_ \ 781 PASS_MEM_STAT); \ 782} 783 784 785#define DEF_VEC_NONALLOC_FUNCS_P(T,A) \ 786static inline void VEC_OP (T,A,free) \ 787 (VEC(T,A) **vec_) \ 788{ \ 789 if (*vec_) \ 790 vec_##A##_free (*vec_); \ 791 *vec_ = NULL; \ 792} \ 793 \ 794static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \ 795{ \ 796 size_t len_ = vec_ ? vec_->num : 0; \ 797 VEC (T,A) *new_vec_ = NULL; \ 798 \ 799 if (len_) \ 800 { \ 801 new_vec_ = (VEC (T,A) *)(vec_##A##_p_reserve_exact \ 802 (NULL, len_ PASS_MEM_STAT)); \ 803 \ 804 new_vec_->base.num = len_; \ 805 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \ 806 } \ 807 return new_vec_; \ 808} \ 809 \ 810static inline int VEC_OP (T,A,reserve) \ 811 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \ 812{ \ 813 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \ 814 VEC_CHECK_PASS); \ 815 \ 816 if (extend) \ 817 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve (*vec_, alloc_ PASS_MEM_STAT); \ 818 \ 819 return extend; \ 820} \ 821 \ 822static inline int VEC_OP (T,A,reserve_exact) \ 823 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \ 824{ \ 825 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \ 826 VEC_CHECK_PASS); \ 827 \ 828 if (extend) \ 829 *vec_ = (VEC(T,A) *) vec_##A##_p_reserve_exact (*vec_, alloc_ \ 830 PASS_MEM_STAT); \ 831 \ 832 return extend; \ 833} \ 834 \ 835static inline void VEC_OP (T,A,safe_grow) \ 836 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \ 837{ \ 838 VEC_ASSERT (size_ >= 0 \ 839 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \ 840 "grow", T, A); \ 841 VEC_OP (T,A,reserve_exact) (vec_, \ 842 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \ 843 VEC_CHECK_PASS PASS_MEM_STAT); \ 844 VEC_BASE (*vec_)->num = size_; \ 845} \ 846 \ 847static inline void VEC_OP (T,A,safe_grow_cleared) \ 848 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \ 849{ \ 850 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \ 851 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \ 852 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \ 853 sizeof (T) * (size_ - oldsize)); \ 854} \ 855 \ 856static inline void VEC_OP(T,A,safe_splice) \ 857 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \ 858{ \ 859 if (src_) \ 860 { \ 861 VEC_OP (T,A,reserve_exact) (dst_, src_->num \ 862 VEC_CHECK_PASS MEM_STAT_INFO); \ 863 \ 864 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \ 865 VEC_CHECK_PASS); \ 866 } \ 867} \ 868 \ 869static inline T *VEC_OP (T,A,safe_push) \ 870 (VEC(T,A) **vec_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \ 871{ \ 872 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \ 873 \ 874 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \ 875} \ 876 \ 877static inline T *VEC_OP (T,A,safe_insert) \ 878 (VEC(T,A) **vec_, unsigned ix_, T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \ 879{ \ 880 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \ 881 \ 882 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \ 883 VEC_CHECK_PASS); \ 884} 885 886/* Vector of object. */ 887#define DEF_VEC_O(T) \ 888VEC_T_GTY(T,base); \ 889VEC_TA(T,base,none); \ 890DEF_VEC_FUNC_O(T) \ 891struct vec_swallow_trailing_semi 892#define DEF_VEC_ALLOC_O(T,A) \ 893VEC_TA(T,base,A); \ 894DEF_VEC_ALLOC_FUNC_O(T,A) \ 895DEF_VEC_NONALLOC_FUNCS_O(T,A) \ 896struct vec_swallow_trailing_semi 897 898#define DEF_VEC_FUNC_O(T) \ 899static inline unsigned VEC_OP (T,base,length) (const VEC(T,base) *vec_) \ 900{ \ 901 return vec_ ? vec_->num : 0; \ 902} \ 903 \ 904static inline T *VEC_OP (T,base,last) (VEC(T,base) *vec_ VEC_CHECK_DECL) \ 905{ \ 906 VEC_ASSERT (vec_ && vec_->num, "last", T, base); \ 907 \ 908 return &vec_->vec[vec_->num - 1]; \ 909} \ 910 \ 911static inline T *VEC_OP (T,base,index) \ 912 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \ 913{ \ 914 VEC_ASSERT (vec_ && ix_ < vec_->num, "index", T, base); \ 915 \ 916 return &vec_->vec[ix_]; \ 917} \ 918 \ 919static inline int VEC_OP (T,base,iterate) \ 920 (VEC(T,base) *vec_, unsigned ix_, T **ptr) \ 921{ \ 922 if (vec_ && ix_ < vec_->num) \ 923 { \ 924 *ptr = &vec_->vec[ix_]; \ 925 return 1; \ 926 } \ 927 else \ 928 { \ 929 *ptr = 0; \ 930 return 0; \ 931 } \ 932} \ 933 \ 934static inline size_t VEC_OP (T,base,embedded_size) \ 935 (int alloc_) \ 936{ \ 937 return offsetof (VEC(T,base),vec) + alloc_ * sizeof(T); \ 938} \ 939 \ 940static inline void VEC_OP (T,base,embedded_init) \ 941 (VEC(T,base) *vec_, int alloc_) \ 942{ \ 943 vec_->num = 0; \ 944 vec_->alloc = alloc_; \ 945} \ 946 \ 947static inline int VEC_OP (T,base,space) \ 948 (VEC(T,base) *vec_, int alloc_ VEC_CHECK_DECL) \ 949{ \ 950 VEC_ASSERT (alloc_ >= 0, "space", T, base); \ 951 return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_; \ 952} \ 953 \ 954static inline void VEC_OP(T,base,splice) \ 955 (VEC(T,base) *dst_, VEC(T,base) *src_ VEC_CHECK_DECL) \ 956{ \ 957 if (src_) \ 958 { \ 959 unsigned len_ = src_->num; \ 960 VEC_ASSERT (dst_->num + len_ <= dst_->alloc, "splice", T, base); \ 961 \ 962 memcpy (&dst_->vec[dst_->num], &src_->vec[0], len_ * sizeof (T)); \ 963 dst_->num += len_; \ 964 } \ 965} \ 966 \ 967static inline T *VEC_OP (T,base,quick_push) \ 968 (VEC(T,base) *vec_, const T *obj_ VEC_CHECK_DECL) \ 969{ \ 970 T *slot_; \ 971 \ 972 VEC_ASSERT (vec_->num < vec_->alloc, "push", T, base); \ 973 slot_ = &vec_->vec[vec_->num++]; \ 974 if (obj_) \ 975 *slot_ = *obj_; \ 976 \ 977 return slot_; \ 978} \ 979 \ 980static inline void VEC_OP (T,base,pop) (VEC(T,base) *vec_ VEC_CHECK_DECL) \ 981{ \ 982 VEC_ASSERT (vec_->num, "pop", T, base); \ 983 --vec_->num; \ 984} \ 985 \ 986static inline void VEC_OP (T,base,truncate) \ 987 (VEC(T,base) *vec_, unsigned size_ VEC_CHECK_DECL) \ 988{ \ 989 VEC_ASSERT (vec_ ? vec_->num >= size_ : !size_, "truncate", T, base); \ 990 if (vec_) \ 991 vec_->num = size_; \ 992} \ 993 \ 994static inline T *VEC_OP (T,base,replace) \ 995 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \ 996{ \ 997 T *slot_; \ 998 \ 999 VEC_ASSERT (ix_ < vec_->num, "replace", T, base); \ 1000 slot_ = &vec_->vec[ix_]; \ 1001 if (obj_) \ 1002 *slot_ = *obj_; \ 1003 \ 1004 return slot_; \ 1005} \ 1006 \ 1007static inline T *VEC_OP (T,base,quick_insert) \ 1008 (VEC(T,base) *vec_, unsigned ix_, const T *obj_ VEC_CHECK_DECL) \ 1009{ \ 1010 T *slot_; \ 1011 \ 1012 VEC_ASSERT (vec_->num < vec_->alloc, "insert", T, base); \ 1013 VEC_ASSERT (ix_ <= vec_->num, "insert", T, base); \ 1014 slot_ = &vec_->vec[ix_]; \ 1015 memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T)); \ 1016 if (obj_) \ 1017 *slot_ = *obj_; \ 1018 \ 1019 return slot_; \ 1020} \ 1021 \ 1022static inline void VEC_OP (T,base,ordered_remove) \ 1023 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \ 1024{ \ 1025 T *slot_; \ 1026 \ 1027 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \ 1028 slot_ = &vec_->vec[ix_]; \ 1029 memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T)); \ 1030} \ 1031 \ 1032static inline void VEC_OP (T,base,unordered_remove) \ 1033 (VEC(T,base) *vec_, unsigned ix_ VEC_CHECK_DECL) \ 1034{ \ 1035 VEC_ASSERT (ix_ < vec_->num, "remove", T, base); \ 1036 vec_->vec[ix_] = vec_->vec[--vec_->num]; \ 1037} \ 1038 \ 1039static inline void VEC_OP (T,base,block_remove) \ 1040 (VEC(T,base) *vec_, unsigned ix_, unsigned len_ VEC_CHECK_DECL) \ 1041{ \ 1042 T *slot_; \ 1043 \ 1044 VEC_ASSERT (ix_ + len_ <= vec_->num, "block_remove", T, base); \ 1045 slot_ = &vec_->vec[ix_]; \ 1046 vec_->num -= len_; \ 1047 memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T)); \ 1048} \ 1049 \ 1050static inline T *VEC_OP (T,base,address) \ 1051 (VEC(T,base) *vec_) \ 1052{ \ 1053 return vec_ ? vec_->vec : 0; \ 1054} \ 1055 \ 1056static inline unsigned VEC_OP (T,base,lower_bound) \ 1057 (VEC(T,base) *vec_, const T *obj_, \ 1058 bool (*lessthan_)(const T *, const T *) VEC_CHECK_DECL) \ 1059{ \ 1060 unsigned int len_ = VEC_OP (T, base, length) (vec_); \ 1061 unsigned int half_, middle_; \ 1062 unsigned int first_ = 0; \ 1063 while (len_ > 0) \ 1064 { \ 1065 T *middle_elem_; \ 1066 half_ = len_ >> 1; \ 1067 middle_ = first_; \ 1068 middle_ += half_; \ 1069 middle_elem_ = VEC_OP (T,base,index) (vec_, middle_ VEC_CHECK_PASS); \ 1070 if (lessthan_ (middle_elem_, obj_)) \ 1071 { \ 1072 first_ = middle_; \ 1073 ++first_; \ 1074 len_ = len_ - half_ - 1; \ 1075 } \ 1076 else \ 1077 len_ = half_; \ 1078 } \ 1079 return first_; \ 1080} 1081 1082#define DEF_VEC_ALLOC_FUNC_O(T,A) \ 1083static inline VEC(T,A) *VEC_OP (T,A,alloc) \ 1084 (int alloc_ MEM_STAT_DECL) \ 1085{ \ 1086 return (VEC(T,A) *) vec_##A##_o_reserve_exact (NULL, alloc_, \ 1087 offsetof (VEC(T,A),base.vec), \ 1088 sizeof (T) \ 1089 PASS_MEM_STAT); \ 1090} 1091 1092#define DEF_VEC_NONALLOC_FUNCS_O(T,A) \ 1093static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \ 1094{ \ 1095 size_t len_ = vec_ ? vec_->num : 0; \ 1096 VEC (T,A) *new_vec_ = NULL; \ 1097 \ 1098 if (len_) \ 1099 { \ 1100 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \ 1101 (NULL, len_, \ 1102 offsetof (VEC(T,A),base.vec), sizeof (T) \ 1103 PASS_MEM_STAT)); \ 1104 \ 1105 new_vec_->base.num = len_; \ 1106 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \ 1107 } \ 1108 return new_vec_; \ 1109} \ 1110 \ 1111static inline void VEC_OP (T,A,free) \ 1112 (VEC(T,A) **vec_) \ 1113{ \ 1114 if (*vec_) \ 1115 vec_##A##_free (*vec_); \ 1116 *vec_ = NULL; \ 1117} \ 1118 \ 1119static inline int VEC_OP (T,A,reserve) \ 1120 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1121{ \ 1122 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \ 1123 VEC_CHECK_PASS); \ 1124 \ 1125 if (extend) \ 1126 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \ 1127 offsetof (VEC(T,A),base.vec),\ 1128 sizeof (T) \ 1129 PASS_MEM_STAT); \ 1130 \ 1131 return extend; \ 1132} \ 1133 \ 1134static inline int VEC_OP (T,A,reserve_exact) \ 1135 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1136{ \ 1137 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \ 1138 VEC_CHECK_PASS); \ 1139 \ 1140 if (extend) \ 1141 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \ 1142 (*vec_, alloc_, \ 1143 offsetof (VEC(T,A),base.vec), \ 1144 sizeof (T) PASS_MEM_STAT); \ 1145 \ 1146 return extend; \ 1147} \ 1148 \ 1149static inline void VEC_OP (T,A,safe_grow) \ 1150 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1151{ \ 1152 VEC_ASSERT (size_ >= 0 \ 1153 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \ 1154 "grow", T, A); \ 1155 VEC_OP (T,A,reserve_exact) (vec_, \ 1156 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \ 1157 VEC_CHECK_PASS PASS_MEM_STAT); \ 1158 VEC_BASE (*vec_)->num = size_; \ 1159} \ 1160 \ 1161static inline void VEC_OP (T,A,safe_grow_cleared) \ 1162 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1163{ \ 1164 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \ 1165 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \ 1166 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \ 1167 sizeof (T) * (size_ - oldsize)); \ 1168} \ 1169 \ 1170static inline void VEC_OP(T,A,safe_splice) \ 1171 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1172{ \ 1173 if (src_) \ 1174 { \ 1175 VEC_OP (T,A,reserve_exact) (dst_, src_->num \ 1176 VEC_CHECK_PASS MEM_STAT_INFO); \ 1177 \ 1178 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \ 1179 VEC_CHECK_PASS); \ 1180 } \ 1181} \ 1182 \ 1183static inline T *VEC_OP (T,A,safe_push) \ 1184 (VEC(T,A) **vec_, const T *obj_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1185{ \ 1186 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \ 1187 \ 1188 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \ 1189} \ 1190 \ 1191static inline T *VEC_OP (T,A,safe_insert) \ 1192 (VEC(T,A) **vec_, unsigned ix_, const T *obj_ \ 1193 VEC_CHECK_DECL MEM_STAT_DECL) \ 1194{ \ 1195 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \ 1196 \ 1197 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \ 1198 VEC_CHECK_PASS); \ 1199} 1200 1201#define DEF_VEC_ALLOC_FUNC_I(T,A) \ 1202static inline VEC(T,A) *VEC_OP (T,A,alloc) \ 1203 (int alloc_ MEM_STAT_DECL) \ 1204{ \ 1205 return (VEC(T,A) *) vec_##A##_o_reserve_exact \ 1206 (NULL, alloc_, offsetof (VEC(T,A),base.vec), \ 1207 sizeof (T) PASS_MEM_STAT); \ 1208} 1209 1210#define DEF_VEC_NONALLOC_FUNCS_I(T,A) \ 1211static inline VEC(T,A) *VEC_OP (T,A,copy) (VEC(T,base) *vec_ MEM_STAT_DECL) \ 1212{ \ 1213 size_t len_ = vec_ ? vec_->num : 0; \ 1214 VEC (T,A) *new_vec_ = NULL; \ 1215 \ 1216 if (len_) \ 1217 { \ 1218 new_vec_ = (VEC (T,A) *)(vec_##A##_o_reserve_exact \ 1219 (NULL, len_, \ 1220 offsetof (VEC(T,A),base.vec), sizeof (T) \ 1221 PASS_MEM_STAT)); \ 1222 \ 1223 new_vec_->base.num = len_; \ 1224 memcpy (new_vec_->base.vec, vec_->vec, sizeof (T) * len_); \ 1225 } \ 1226 return new_vec_; \ 1227} \ 1228 \ 1229static inline void VEC_OP (T,A,free) \ 1230 (VEC(T,A) **vec_) \ 1231{ \ 1232 if (*vec_) \ 1233 vec_##A##_free (*vec_); \ 1234 *vec_ = NULL; \ 1235} \ 1236 \ 1237static inline int VEC_OP (T,A,reserve) \ 1238 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1239{ \ 1240 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \ 1241 VEC_CHECK_PASS); \ 1242 \ 1243 if (extend) \ 1244 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve (*vec_, alloc_, \ 1245 offsetof (VEC(T,A),base.vec),\ 1246 sizeof (T) \ 1247 PASS_MEM_STAT); \ 1248 \ 1249 return extend; \ 1250} \ 1251 \ 1252static inline int VEC_OP (T,A,reserve_exact) \ 1253 (VEC(T,A) **vec_, int alloc_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1254{ \ 1255 int extend = !VEC_OP (T,base,space) (VEC_BASE(*vec_), alloc_ \ 1256 VEC_CHECK_PASS); \ 1257 \ 1258 if (extend) \ 1259 *vec_ = (VEC(T,A) *) vec_##A##_o_reserve_exact \ 1260 (*vec_, alloc_, offsetof (VEC(T,A),base.vec), \ 1261 sizeof (T) PASS_MEM_STAT); \ 1262 \ 1263 return extend; \ 1264} \ 1265 \ 1266static inline void VEC_OP (T,A,safe_grow) \ 1267 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1268{ \ 1269 VEC_ASSERT (size_ >= 0 \ 1270 && VEC_OP(T,base,length) VEC_BASE(*vec_) <= (unsigned)size_, \ 1271 "grow", T, A); \ 1272 VEC_OP (T,A,reserve_exact) (vec_, \ 1273 size_ - (int)(*vec_ ? VEC_BASE(*vec_)->num : 0) \ 1274 VEC_CHECK_PASS PASS_MEM_STAT); \ 1275 VEC_BASE (*vec_)->num = size_; \ 1276} \ 1277 \ 1278static inline void VEC_OP (T,A,safe_grow_cleared) \ 1279 (VEC(T,A) **vec_, int size_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1280{ \ 1281 int oldsize = VEC_OP(T,base,length) VEC_BASE(*vec_); \ 1282 VEC_OP (T,A,safe_grow) (vec_, size_ VEC_CHECK_PASS PASS_MEM_STAT); \ 1283 memset (&(VEC_OP (T,base,address) VEC_BASE(*vec_))[oldsize], 0, \ 1284 sizeof (T) * (size_ - oldsize)); \ 1285} \ 1286 \ 1287static inline void VEC_OP(T,A,safe_splice) \ 1288 (VEC(T,A) **dst_, VEC(T,base) *src_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1289{ \ 1290 if (src_) \ 1291 { \ 1292 VEC_OP (T,A,reserve_exact) (dst_, src_->num \ 1293 VEC_CHECK_PASS MEM_STAT_INFO); \ 1294 \ 1295 VEC_OP (T,base,splice) (VEC_BASE (*dst_), src_ \ 1296 VEC_CHECK_PASS); \ 1297 } \ 1298} \ 1299 \ 1300static inline T *VEC_OP (T,A,safe_push) \ 1301 (VEC(T,A) **vec_, const T obj_ VEC_CHECK_DECL MEM_STAT_DECL) \ 1302{ \ 1303 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \ 1304 \ 1305 return VEC_OP (T,base,quick_push) (VEC_BASE(*vec_), obj_ VEC_CHECK_PASS); \ 1306} \ 1307 \ 1308static inline T *VEC_OP (T,A,safe_insert) \ 1309 (VEC(T,A) **vec_, unsigned ix_, const T obj_ \ 1310 VEC_CHECK_DECL MEM_STAT_DECL) \ 1311{ \ 1312 VEC_OP (T,A,reserve) (vec_, 1 VEC_CHECK_PASS PASS_MEM_STAT); \ 1313 \ 1314 return VEC_OP (T,base,quick_insert) (VEC_BASE(*vec_), ix_, obj_ \ 1315 VEC_CHECK_PASS); \ 1316} 1317 1318/* We support a vector which starts out with space on the stack and 1319 switches to heap space when forced to reallocate. This works a 1320 little differently. Instead of DEF_VEC_ALLOC_P(TYPE, heap|gc), use 1321 DEF_VEC_ALLOC_P_STACK(TYPE). This uses alloca to get the initial 1322 space; because alloca can not be usefully called in an inline 1323 function, and because a macro can not define a macro, you must then 1324 write a #define for each type: 1325 1326 #define VEC_{TYPE}_stack_alloc(alloc) \ 1327 VEC_stack_alloc({TYPE}, alloc) 1328 1329 This is really a hack and perhaps can be made better. Note that 1330 this macro will wind up evaluating the ALLOC parameter twice. 1331 1332 Only the initial allocation will be made using alloca, so pass a 1333 reasonable estimate that doesn't use too much stack space; don't 1334 pass zero. Don't return a VEC(TYPE,stack) vector from the function 1335 which allocated it. */ 1336 1337extern void *vec_stack_p_reserve (void *, int MEM_STAT_DECL); 1338extern void *vec_stack_p_reserve_exact (void *, int MEM_STAT_DECL); 1339extern void *vec_stack_p_reserve_exact_1 (int, void *); 1340extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL); 1341extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t 1342 MEM_STAT_DECL); 1343extern void vec_stack_free (void *); 1344 1345#ifdef GATHER_STATISTICS 1346#define VEC_stack_alloc(T,alloc,name,line,function) \ 1347 (VEC_OP (T,stack,alloc1) \ 1348 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc)))) 1349#else 1350#define VEC_stack_alloc(T,alloc) \ 1351 (VEC_OP (T,stack,alloc1) \ 1352 (alloc, XALLOCAVAR (VEC(T,stack), VEC_embedded_size (T, alloc)))) 1353#endif 1354 1355#define DEF_VEC_ALLOC_P_STACK(T) \ 1356VEC_TA(T,base,stack); \ 1357DEF_VEC_ALLOC_FUNC_P_STACK(T) \ 1358DEF_VEC_NONALLOC_FUNCS_P(T,stack) \ 1359struct vec_swallow_trailing_semi 1360 1361#define DEF_VEC_ALLOC_FUNC_P_STACK(T) \ 1362static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \ 1363 (int alloc_, VEC(T,stack)* space) \ 1364{ \ 1365 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \ 1366} 1367 1368#define DEF_VEC_ALLOC_O_STACK(T) \ 1369VEC_TA(T,base,stack); \ 1370DEF_VEC_ALLOC_FUNC_O_STACK(T) \ 1371DEF_VEC_NONALLOC_FUNCS_O(T,stack) \ 1372struct vec_swallow_trailing_semi 1373 1374#define DEF_VEC_ALLOC_FUNC_O_STACK(T) \ 1375static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \ 1376 (int alloc_, VEC(T,stack)* space) \ 1377{ \ 1378 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \ 1379} 1380 1381#define DEF_VEC_ALLOC_I_STACK(T) \ 1382VEC_TA(T,base,stack); \ 1383DEF_VEC_ALLOC_FUNC_I_STACK(T) \ 1384DEF_VEC_NONALLOC_FUNCS_I(T,stack) \ 1385struct vec_swallow_trailing_semi 1386 1387#define DEF_VEC_ALLOC_FUNC_I_STACK(T) \ 1388static inline VEC(T,stack) *VEC_OP (T,stack,alloc1) \ 1389 (int alloc_, VEC(T,stack)* space) \ 1390{ \ 1391 return (VEC(T,stack) *) vec_stack_p_reserve_exact_1 (alloc_, space); \ 1392} 1393 1394#endif /* GCC_VEC_H */ 1395