]> Git Repo - binutils.git/blob - gdb/common/vec.h
update copyright year range in GDB files
[binutils.git] / gdb / common / vec.h
1 /* Vector API for GDB.
2    Copyright (C) 2004-2017 Free Software Foundation, Inc.
3    Contributed by Nathan Sidwell <[email protected]>
4
5    This file is part of GDB.
6
7    This program is free software; you can redistribute it and/or modify
8    it under the terms of the GNU General Public License as published by
9    the Free Software Foundation; either version 3 of the License, or
10    (at your option) any later version.
11
12    This program is distributed in the hope that it will be useful,
13    but WITHOUT ANY WARRANTY; without even the implied warranty of
14    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15    GNU General Public License for more details.
16
17    You should have received a copy of the GNU General Public License
18    along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
19
20 #if !defined (GDB_VEC_H)
21 #define GDB_VEC_H
22
23 /* The macros here implement a set of templated vector types and
24    associated interfaces.  These templates are implemented with
25    macros, as we're not in C++ land.  The interface functions are
26    typesafe and use static inline functions, sometimes backed by
27    out-of-line generic functions.
28
29    Because of the different behavior of structure objects, scalar
30    objects and of pointers, there are three flavors, one for each of
31    these variants.  Both the structure object and pointer variants
32    pass pointers to objects around -- in the former case the pointers
33    are stored into the vector and in the latter case the pointers are
34    dereferenced and the objects copied into the vector.  The scalar
35    object variant is suitable for int-like objects, and the vector
36    elements are returned by value.
37
38    There are both 'index' and 'iterate' accessors.  The iterator
39    returns a boolean iteration condition and updates the iteration
40    variable passed by reference.  Because the iterator will be
41    inlined, the address-of can be optimized away.
42
43    The vectors are implemented using the trailing array idiom, thus
44    they are not resizeable without changing the address of the vector
45    object itself.  This means you cannot have variables or fields of
46    vector type -- always use a pointer to a vector.  The one exception
47    is the final field of a structure, which could be a vector type.
48    You will have to use the embedded_size & embedded_init calls to
49    create such objects, and they will probably not be resizeable (so
50    don't use the 'safe' allocation variants).  The trailing array
51    idiom is used (rather than a pointer to an array of data), because,
52    if we allow NULL to also represent an empty vector, empty vectors
53    occupy minimal space in the structure containing them.
54
55    Each operation that increases the number of active elements is
56    available in 'quick' and 'safe' variants.  The former presumes that
57    there is sufficient allocated space for the operation to succeed
58    (it dies if there is not).  The latter will reallocate the
59    vector, if needed.  Reallocation causes an exponential increase in
60    vector size.  If you know you will be adding N elements, it would
61    be more efficient to use the reserve operation before adding the
62    elements with the 'quick' operation.  This will ensure there are at
63    least as many elements as you ask for, it will exponentially
64    increase if there are too few spare slots.  If you want reserve a
65    specific number of slots, but do not want the exponential increase
66    (for instance, you know this is the last allocation), use a
67    negative number for reservation.  You can also create a vector of a
68    specific size from the get go.
69
70    You should prefer the push and pop operations, as they append and
71    remove from the end of the vector.  If you need to remove several
72    items in one go, use the truncate operation.  The insert and remove
73    operations allow you to change elements in the middle of the
74    vector.  There are two remove operations, one which preserves the
75    element ordering 'ordered_remove', and one which does not
76    'unordered_remove'.  The latter function copies the end element
77    into the removed slot, rather than invoke a memmove operation.  The
78    'lower_bound' function will determine where to place an item in the
79    array using insert that will maintain sorted order.
80
81    If you need to directly manipulate a vector, then the 'address'
82    accessor will return the address of the start of the vector.  Also
83    the 'space' predicate will tell you whether there is spare capacity
84    in the vector.  You will not normally need to use these two functions.
85
86    Vector types are defined using a DEF_VEC_{O,P,I}(TYPEDEF) macro.
87    Variables of vector type are declared using a VEC(TYPEDEF) macro.
88    The characters O, P and I indicate whether TYPEDEF is a pointer
89    (P), object (O) or integral (I) type.  Be careful to pick the
90    correct one, as you'll get an awkward and inefficient API if you
91    use the wrong one.  There is a check, which results in a
92    compile-time warning, for the P and I versions, but there is no
93    check for the O versions, as that is not possible in plain C.
94
95    An example of their use would be,
96
97    DEF_VEC_P(tree);   // non-managed tree vector.
98
99    struct my_struct {
100      VEC(tree) *v;      // A (pointer to) a vector of tree pointers.
101    };
102
103    struct my_struct *s;
104
105    if (VEC_length(tree, s->v)) { we have some contents }
106    VEC_safe_push(tree, s->v, decl); // append some decl onto the end
107    for (ix = 0; VEC_iterate(tree, s->v, ix, elt); ix++)
108      { do something with elt }
109
110 */
111
112 /* Macros to invoke API calls.  A single macro works for both pointer
113    and object vectors, but the argument and return types might well be
114    different.  In each macro, T is the typedef of the vector elements.
115    Some of these macros pass the vector, V, by reference (by taking
116    its address), this is noted in the descriptions.  */
117
118 /* Length of vector
119    unsigned VEC_T_length(const VEC(T) *v);
120
121    Return the number of active elements in V.  V can be NULL, in which
122    case zero is returned.  */
123
124 #define VEC_length(T,V) (VEC_OP(T,length)(V))
125
126
127 /* Check if vector is empty
128    int VEC_T_empty(const VEC(T) *v);
129
130    Return nonzero if V is an empty vector (or V is NULL), zero otherwise.  */
131
132 #define VEC_empty(T,V)  (VEC_length (T,V) == 0)
133
134
135 /* Get the final element of the vector.
136    T VEC_T_last(VEC(T) *v); // Integer
137    T VEC_T_last(VEC(T) *v); // Pointer
138    T *VEC_T_last(VEC(T) *v); // Object
139
140    Return the final element.  V must not be empty.  */
141
142 #define VEC_last(T,V)   (VEC_OP(T,last)(V VEC_ASSERT_INFO))
143
144 /* Index into vector
145    T VEC_T_index(VEC(T) *v, unsigned ix); // Integer
146    T VEC_T_index(VEC(T) *v, unsigned ix); // Pointer
147    T *VEC_T_index(VEC(T) *v, unsigned ix); // Object
148
149    Return the IX'th element.  If IX must be in the domain of V.  */
150
151 #define VEC_index(T,V,I) (VEC_OP(T,index)(V,I VEC_ASSERT_INFO))
152
153 /* Iterate over vector
154    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Integer
155    int VEC_T_iterate(VEC(T) *v, unsigned ix, T &ptr); // Pointer
156    int VEC_T_iterate(VEC(T) *v, unsigned ix, T *&ptr); // Object
157
158    Return iteration condition and update PTR to point to the IX'th
159    element.  At the end of iteration, sets PTR to NULL.  Use this to
160    iterate over the elements of a vector as follows,
161
162      for (ix = 0; VEC_iterate(T,v,ix,ptr); ix++)
163        continue;  */
164
165 #define VEC_iterate(T,V,I,P)    (VEC_OP(T,iterate)(V,I,&(P)))
166
167 /* Allocate new vector.
168    VEC(T,A) *VEC_T_alloc(int reserve);
169
170    Allocate a new vector with space for RESERVE objects.  If RESERVE
171    is zero, NO vector is created.  */
172
173 #define VEC_alloc(T,N)  (VEC_OP(T,alloc)(N))
174
175 /* Free a vector.
176    void VEC_T_free(VEC(T,A) *&);
177
178    Free a vector and set it to NULL.  */
179
180 #define VEC_free(T,V)   (VEC_OP(T,free)(&V))
181
182 /* A cleanup function for a vector.
183    void VEC_T_cleanup(void *);
184    
185    Clean up a vector.  */
186
187 #define VEC_cleanup(T)  (VEC_OP(T,cleanup))
188
189 /* Use these to determine the required size and initialization of a
190    vector embedded within another structure (as the final member).
191
192    size_t VEC_T_embedded_size(int reserve);
193    void VEC_T_embedded_init(VEC(T) *v, int reserve);
194
195    These allow the caller to perform the memory allocation.  */
196
197 #define VEC_embedded_size(T,N)   (VEC_OP(T,embedded_size)(N))
198 #define VEC_embedded_init(T,O,N) (VEC_OP(T,embedded_init)(VEC_BASE(O),N))
199
200 /* Copy a vector.
201    VEC(T,A) *VEC_T_copy(VEC(T) *);
202
203    Copy the live elements of a vector into a new vector.  The new and
204    old vectors need not be allocated by the same mechanism.  */
205
206 #define VEC_copy(T,V) (VEC_OP(T,copy)(V))
207
208 /* Merge two vectors.
209    VEC(T,A) *VEC_T_merge(VEC(T) *, VEC(T) *);
210
211    Copy the live elements of both vectors into a new vector.  The new
212    and old vectors need not be allocated by the same mechanism.  */
213 #define VEC_merge(T,V1,V2) (VEC_OP(T,merge)(V1, V2))
214
215 /* Determine if a vector has additional capacity.
216
217    int VEC_T_space (VEC(T) *v,int reserve)
218
219    If V has space for RESERVE additional entries, return nonzero.  You
220    usually only need to use this if you are doing your own vector
221    reallocation, for instance on an embedded vector.  This returns
222    nonzero in exactly the same circumstances that VEC_T_reserve
223    will.  */
224
225 #define VEC_space(T,V,R) (VEC_OP(T,space)(V,R VEC_ASSERT_INFO))
226
227 /* Reserve space.
228    int VEC_T_reserve(VEC(T,A) *&v, int reserve);
229
230    Ensure that V has at least abs(RESERVE) slots available.  The
231    signedness of RESERVE determines the reallocation behavior.  A
232    negative value will not create additional headroom beyond that
233    requested.  A positive value will create additional headroom.  Note
234    this can cause V to be reallocated.  Returns nonzero iff
235    reallocation actually occurred.  */
236
237 #define VEC_reserve(T,V,R) (VEC_OP(T,reserve)(&(V),R VEC_ASSERT_INFO))
238
239 /* Push object with no reallocation
240    T *VEC_T_quick_push (VEC(T) *v, T obj); // Integer
241    T *VEC_T_quick_push (VEC(T) *v, T obj); // Pointer
242    T *VEC_T_quick_push (VEC(T) *v, T *obj); // Object
243
244    Push a new element onto the end, returns a pointer to the slot
245    filled in.  For object vectors, the new value can be NULL, in which
246    case NO initialization is performed.  There must
247    be sufficient space in the vector.  */
248
249 #define VEC_quick_push(T,V,O) (VEC_OP(T,quick_push)(V,O VEC_ASSERT_INFO))
250
251 /* Push object with reallocation
252    T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Integer
253    T *VEC_T_safe_push (VEC(T,A) *&v, T obj); // Pointer
254    T *VEC_T_safe_push (VEC(T,A) *&v, T *obj); // Object
255
256    Push a new element onto the end, returns a pointer to the slot
257    filled in.  For object vectors, the new value can be NULL, in which
258    case NO initialization is performed.  Reallocates V, if needed.  */
259
260 #define VEC_safe_push(T,V,O) (VEC_OP(T,safe_push)(&(V),O VEC_ASSERT_INFO))
261
262 /* Pop element off end
263    T VEC_T_pop (VEC(T) *v);             // Integer
264    T VEC_T_pop (VEC(T) *v);             // Pointer
265    void VEC_T_pop (VEC(T) *v);          // Object
266
267    Pop the last element off the end.  Returns the element popped, for
268    pointer vectors.  */
269
270 #define VEC_pop(T,V)    (VEC_OP(T,pop)(V VEC_ASSERT_INFO))
271
272 /* Truncate to specific length
273    void VEC_T_truncate (VEC(T) *v, unsigned len);
274
275    Set the length as specified.  The new length must be less than or
276    equal to the current length.  This is an O(1) operation.  */
277
278 #define VEC_truncate(T,V,I)             \
279         (VEC_OP(T,truncate)(V,I VEC_ASSERT_INFO))
280
281 /* Grow to a specific length.
282    void VEC_T_safe_grow (VEC(T,A) *&v, int len);
283
284    Grow the vector to a specific length.  The LEN must be as
285    long or longer than the current length.  The new elements are
286    uninitialized.  */
287
288 #define VEC_safe_grow(T,V,I)            \
289         (VEC_OP(T,safe_grow)(&(V),I VEC_ASSERT_INFO))
290
291 /* Replace element
292    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Integer
293    T VEC_T_replace (VEC(T) *v, unsigned ix, T val); // Pointer
294    T *VEC_T_replace (VEC(T) *v, unsigned ix, T *val);  // Object
295
296    Replace the IXth element of V with a new value, VAL.  For pointer
297    vectors returns the original value.  For object vectors returns a
298    pointer to the new value.  For object vectors the new value can be
299    NULL, in which case no overwriting of the slot is actually
300    performed.  */
301
302 #define VEC_replace(T,V,I,O) (VEC_OP(T,replace)(V,I,O VEC_ASSERT_INFO))
303
304 /* Insert object with no reallocation
305    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Integer
306    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T val); // Pointer
307    T *VEC_T_quick_insert (VEC(T) *v, unsigned ix, T *val); // Object
308
309    Insert an element, VAL, at the IXth position of V.  Return a pointer
310    to the slot created.  For vectors of object, the new value can be
311    NULL, in which case no initialization of the inserted slot takes
312    place.  There must be sufficient space.  */
313
314 #define VEC_quick_insert(T,V,I,O) \
315         (VEC_OP(T,quick_insert)(V,I,O VEC_ASSERT_INFO))
316
317 /* Insert object with reallocation
318    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Integer
319    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T val); // Pointer
320    T *VEC_T_safe_insert (VEC(T,A) *&v, unsigned ix, T *val); // Object
321
322    Insert an element, VAL, at the IXth position of V.  Return a pointer
323    to the slot created.  For vectors of object, the new value can be
324    NULL, in which case no initialization of the inserted slot takes
325    place.  Reallocate V, if necessary.  */
326
327 #define VEC_safe_insert(T,V,I,O)        \
328         (VEC_OP(T,safe_insert)(&(V),I,O VEC_ASSERT_INFO))
329
330 /* Remove element retaining order
331    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Integer
332    T VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Pointer
333    void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
334
335    Remove an element from the IXth position of V.  Ordering of
336    remaining elements is preserved.  For pointer vectors returns the
337    removed object.  This is an O(N) operation due to a memmove.  */
338
339 #define VEC_ordered_remove(T,V,I)       \
340         (VEC_OP(T,ordered_remove)(V,I VEC_ASSERT_INFO))
341
342 /* Remove element destroying order
343    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Integer
344    T VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Pointer
345    void VEC_T_unordered_remove (VEC(T) *v, unsigned ix); // Object
346
347    Remove an element from the IXth position of V.  Ordering of
348    remaining elements is destroyed.  For pointer vectors returns the
349    removed object.  This is an O(1) operation.  */
350
351 #define VEC_unordered_remove(T,V,I)     \
352         (VEC_OP(T,unordered_remove)(V,I VEC_ASSERT_INFO))
353
354 /* Remove a block of elements
355    void VEC_T_block_remove (VEC(T) *v, unsigned ix, unsigned len);
356
357    Remove LEN elements starting at the IXth.  Ordering is retained.
358    This is an O(N) operation due to memmove.  */
359
360 #define VEC_block_remove(T,V,I,L)       \
361         (VEC_OP(T,block_remove)(V,I,L VEC_ASSERT_INFO))
362
363 /* Get the address of the array of elements
364    T *VEC_T_address (VEC(T) v)
365
366    If you need to directly manipulate the array (for instance, you
367    want to feed it to qsort), use this accessor.  */
368
369 #define VEC_address(T,V)                (VEC_OP(T,address)(V))
370
371 /* Find the first index in the vector not less than the object.
372    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
373                                int (*lessthan) (const T, const T)); // Integer
374    unsigned VEC_T_lower_bound (VEC(T) *v, const T val,
375                                int (*lessthan) (const T, const T)); // Pointer
376    unsigned VEC_T_lower_bound (VEC(T) *v, const T *val,
377                                int (*lessthan) (const T*, const T*)); // Object
378
379    Find the first position in which VAL could be inserted without
380    changing the ordering of V.  LESSTHAN is a function that returns
381    true if the first argument is strictly less than the second.  */
382
383 #define VEC_lower_bound(T,V,O,LT)    \
384        (VEC_OP(T,lower_bound)(V,O,LT VEC_ASSERT_INFO))
385
386 /* Reallocate an array of elements with prefix.  */
387 extern void *vec_p_reserve (void *, int);
388 extern void *vec_o_reserve (void *, int, size_t, size_t);
389 #define vec_free_(V) xfree (V)
390
391 #define VEC_ASSERT_INFO ,__FILE__,__LINE__
392 #define VEC_ASSERT_DECL ,const char *file_,unsigned line_
393 #define VEC_ASSERT_PASS ,file_,line_
394 #define vec_assert(expr, op) \
395   ((void)((expr) ? 0 : (gdb_assert_fail (op, file_, line_, \
396                                          FUNCTION_NAME), 0)))
397
398 #define VEC(T) VEC_##T
399 #define VEC_OP(T,OP) VEC_##T##_##OP
400
401 #define VEC_T(T)                                                          \
402 typedef struct VEC(T)                                                     \
403 {                                                                         \
404   unsigned num;                                                           \
405   unsigned alloc;                                                         \
406   T vec[1];                                                               \
407 } VEC(T)
408
409 /* Vector of integer-like object.  */
410 #define DEF_VEC_I(T)                                                      \
411 static inline void VEC_OP (T,must_be_integral_type) (void)                \
412 {                                                                         \
413   (void)~(T)0;                                                            \
414 }                                                                         \
415                                                                           \
416 VEC_T(T);                                                                 \
417 DEF_VEC_FUNC_P(T)                                                         \
418 DEF_VEC_ALLOC_FUNC_I(T)                                                   \
419 struct vec_swallow_trailing_semi
420
421 /* Vector of pointer to object.  */
422 #define DEF_VEC_P(T)                                                      \
423 static inline void VEC_OP (T,must_be_pointer_type) (void)                 \
424 {                                                                         \
425   (void)((T)1 == (void *)1);                                              \
426 }                                                                         \
427                                                                           \
428 VEC_T(T);                                                                 \
429 DEF_VEC_FUNC_P(T)                                                         \
430 DEF_VEC_ALLOC_FUNC_P(T)                                                   \
431 struct vec_swallow_trailing_semi
432
433 /* Vector of object.  */
434 #define DEF_VEC_O(T)                                                      \
435 VEC_T(T);                                                                 \
436 DEF_VEC_FUNC_O(T)                                                         \
437 DEF_VEC_ALLOC_FUNC_O(T)                                                   \
438 struct vec_swallow_trailing_semi
439
440 /* Avoid offsetof (or its usual C implementation) as it triggers
441    -Winvalid-offsetof warnings with enum_flags types with G++ <= 4.4,
442    even though those types are memcpyable.  This requires allocating a
443    dummy local VEC in all routines that use this, but that has the
444    advantage that it only works if T is default constructible, which
445    is exactly a check we want, to keep C compatibility.  */
446 #define vec_offset(T, VPTR) ((size_t) ((char *) &(VPTR)->vec - (char *) VPTR))
447
448 #define DEF_VEC_ALLOC_FUNC_I(T)                                           \
449 static inline VEC(T) *VEC_OP (T,alloc)                                    \
450      (int alloc_)                                                         \
451 {                                                                         \
452   VEC(T) dummy;                                                           \
453                                                                           \
454   /* We must request exact size allocation, hence the negation.  */       \
455   return (VEC(T) *) vec_o_reserve (NULL, -alloc_,                         \
456                                    vec_offset (T, &dummy), sizeof (T));   \
457 }                                                                         \
458                                                                           \
459 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)                      \
460 {                                                                         \
461   size_t len_ = vec_ ? vec_->num : 0;                                     \
462   VEC (T) *new_vec_ = NULL;                                               \
463                                                                           \
464   if (len_)                                                               \
465     {                                                                     \
466       VEC(T) dummy;                                                       \
467                                                                           \
468       /* We must request exact size allocation, hence the negation.  */   \
469       new_vec_ = (VEC (T) *)                                              \
470         vec_o_reserve (NULL, -len_, vec_offset (T, &dummy), sizeof (T));        \
471                                                                           \
472       new_vec_->num = len_;                                               \
473       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);               \
474     }                                                                     \
475   return new_vec_;                                                        \
476 }                                                                         \
477                                                                           \
478 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)     \
479 {                                                                         \
480   if (vec1_ && vec2_)                                                     \
481     {                                                                     \
482       VEC(T) dummy;                                                       \
483       size_t len_ = vec1_->num + vec2_->num;                              \
484       VEC (T) *new_vec_ = NULL;                                           \
485                                                                           \
486       /* We must request exact size allocation, hence the negation.  */   \
487       new_vec_ = (VEC (T) *)                                              \
488         vec_o_reserve (NULL, -len_, vec_offset (T, &dummy), sizeof (T));          \
489                                                                           \
490       new_vec_->num = len_;                                               \
491       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);        \
492       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,                     \
493               sizeof (T) * vec2_->num);                                   \
494                                                                           \
495       return new_vec_;                                                    \
496     }                                                                     \
497   else                                                                    \
498     return VEC_copy (T, vec1_ ? vec1_ : vec2_);                           \
499 }                                                                         \
500                                                                           \
501 static inline void VEC_OP (T,free)                                        \
502      (VEC(T) **vec_)                                                      \
503 {                                                                         \
504   if (*vec_)                                                              \
505     vec_free_ (*vec_);                                                    \
506   *vec_ = NULL;                                                           \
507 }                                                                         \
508                                                                           \
509 static inline void VEC_OP (T,cleanup)                                     \
510      (void *arg_)                                                         \
511 {                                                                         \
512   VEC(T) **vec_ = (VEC(T) **) arg_;                                       \
513   if (*vec_)                                                              \
514     vec_free_ (*vec_);                                                    \
515   *vec_ = NULL;                                                           \
516 }                                                                         \
517                                                                           \
518 static inline int VEC_OP (T,reserve)                                      \
519      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)                          \
520 {                                                                         \
521   VEC(T) dummy;                                                           \
522   int extend = !VEC_OP (T,space)                                          \
523         (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS);           \
524                                                                           \
525   if (extend)                                                             \
526     *vec_ = (VEC(T) *) vec_o_reserve (*vec_, alloc_,                      \
527                                       vec_offset (T, &dummy), sizeof (T)); \
528                                                                           \
529   return extend;                                                          \
530 }                                                                         \
531                                                                           \
532 static inline void VEC_OP (T,safe_grow)                                   \
533      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)                           \
534 {                                                                         \
535   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
536         "safe_grow");                                                     \
537   VEC_OP (T,reserve) (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_       \
538                         VEC_ASSERT_PASS);                                 \
539   (*vec_)->num = size_;                                                   \
540 }                                                                         \
541                                                                           \
542 static inline T *VEC_OP (T,safe_push)                                     \
543      (VEC(T) **vec_, const T obj_ VEC_ASSERT_DECL)                        \
544 {                                                                         \
545   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
546                                                                           \
547   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);             \
548 }                                                                         \
549                                                                           \
550 static inline T *VEC_OP (T,safe_insert)                                   \
551      (VEC(T) **vec_, unsigned ix_, const T obj_ VEC_ASSERT_DECL)          \
552 {                                                                         \
553   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
554                                                                           \
555   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);      \
556 }
557
558 #define DEF_VEC_FUNC_P(T)                                                 \
559 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_)             \
560 {                                                                         \
561   return vec_ ? vec_->num : 0;                                            \
562 }                                                                         \
563                                                                           \
564 static inline T VEC_OP (T,last)                                           \
565         (const VEC(T) *vec_ VEC_ASSERT_DECL)                              \
566 {                                                                         \
567   vec_assert (vec_ && vec_->num, "last");                                 \
568                                                                           \
569   return vec_->vec[vec_->num - 1];                                        \
570 }                                                                         \
571                                                                           \
572 static inline T VEC_OP (T,index)                                          \
573      (const VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                   \
574 {                                                                         \
575   vec_assert (vec_ && ix_ < vec_->num, "index");                          \
576                                                                           \
577   return vec_->vec[ix_];                                                  \
578 }                                                                         \
579                                                                           \
580 static inline int VEC_OP (T,iterate)                                      \
581      (const VEC(T) *vec_, unsigned ix_, T *ptr)                           \
582 {                                                                         \
583   if (vec_ && ix_ < vec_->num)                                            \
584     {                                                                     \
585       *ptr = vec_->vec[ix_];                                              \
586       return 1;                                                           \
587     }                                                                     \
588   else                                                                    \
589     {                                                                     \
590       *ptr = (T) 0;                                                       \
591       return 0;                                                           \
592     }                                                                     \
593 }                                                                         \
594                                                                           \
595 static inline size_t VEC_OP (T,embedded_size)                             \
596      (int alloc_)                                                         \
597 {                                                                         \
598   VEC(T) dummy;                                                           \
599                                                                           \
600   return vec_offset (T, &dummy) + alloc_ * sizeof(T);                     \
601 }                                                                         \
602                                                                           \
603 static inline void VEC_OP (T,embedded_init)                               \
604      (VEC(T) *vec_, int alloc_)                                           \
605 {                                                                         \
606   vec_->num = 0;                                                          \
607   vec_->alloc = alloc_;                                                   \
608 }                                                                         \
609                                                                           \
610 static inline int VEC_OP (T,space)                                        \
611      (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL)                           \
612 {                                                                         \
613   vec_assert (alloc_ >= 0, "space");                                      \
614   return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;    \
615 }                                                                         \
616                                                                           \
617 static inline T *VEC_OP (T,quick_push)                                    \
618      (VEC(T) *vec_, T obj_ VEC_ASSERT_DECL)                               \
619 {                                                                         \
620   T *slot_;                                                               \
621                                                                           \
622   vec_assert (vec_->num < vec_->alloc, "quick_push");                     \
623   slot_ = &vec_->vec[vec_->num++];                                        \
624   *slot_ = obj_;                                                          \
625                                                                           \
626   return slot_;                                                           \
627 }                                                                         \
628                                                                           \
629 static inline T VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL)             \
630 {                                                                         \
631   T obj_;                                                                 \
632                                                                           \
633   vec_assert (vec_->num, "pop");                                          \
634   obj_ = vec_->vec[--vec_->num];                                          \
635                                                                           \
636   return obj_;                                                            \
637 }                                                                         \
638                                                                           \
639 static inline void VEC_OP (T,truncate)                                    \
640      (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL)                       \
641 {                                                                         \
642   vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate");            \
643   if (vec_)                                                               \
644     vec_->num = size_;                                                    \
645 }                                                                         \
646                                                                           \
647 static inline T VEC_OP (T,replace)                                        \
648      (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)                 \
649 {                                                                         \
650   T old_obj_;                                                             \
651                                                                           \
652   vec_assert (ix_ < vec_->num, "replace");                                \
653   old_obj_ = vec_->vec[ix_];                                              \
654   vec_->vec[ix_] = obj_;                                                  \
655                                                                           \
656   return old_obj_;                                                        \
657 }                                                                         \
658                                                                           \
659 static inline T *VEC_OP (T,quick_insert)                                  \
660      (VEC(T) *vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)                 \
661 {                                                                         \
662   T *slot_;                                                               \
663                                                                           \
664   vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
665   slot_ = &vec_->vec[ix_];                                                \
666   memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));           \
667   *slot_ = obj_;                                                          \
668                                                                           \
669   return slot_;                                                           \
670 }                                                                         \
671                                                                           \
672 static inline T VEC_OP (T,ordered_remove)                                 \
673      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
674 {                                                                         \
675   T *slot_;                                                               \
676   T obj_;                                                                 \
677                                                                           \
678   vec_assert (ix_ < vec_->num, "ordered_remove");                         \
679   slot_ = &vec_->vec[ix_];                                                \
680   obj_ = *slot_;                                                          \
681   memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));           \
682                                                                           \
683   return obj_;                                                            \
684 }                                                                         \
685                                                                           \
686 static inline T VEC_OP (T,unordered_remove)                               \
687      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
688 {                                                                         \
689   T *slot_;                                                               \
690   T obj_;                                                                 \
691                                                                           \
692   vec_assert (ix_ < vec_->num, "unordered_remove");                       \
693   slot_ = &vec_->vec[ix_];                                                \
694   obj_ = *slot_;                                                          \
695   *slot_ = vec_->vec[--vec_->num];                                        \
696                                                                           \
697   return obj_;                                                            \
698 }                                                                         \
699                                                                           \
700 static inline void VEC_OP (T,block_remove)                                \
701      (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL)          \
702 {                                                                         \
703   T *slot_;                                                               \
704                                                                           \
705   vec_assert (ix_ + len_ <= vec_->num, "block_remove");                   \
706   slot_ = &vec_->vec[ix_];                                                \
707   vec_->num -= len_;                                                      \
708   memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T));          \
709 }                                                                         \
710                                                                           \
711 static inline T *VEC_OP (T,address)                                       \
712      (VEC(T) *vec_)                                                       \
713 {                                                                         \
714   return vec_ ? vec_->vec : 0;                                            \
715 }                                                                         \
716                                                                           \
717 static inline unsigned VEC_OP (T,lower_bound)                             \
718      (VEC(T) *vec_, const T obj_,                                         \
719       int (*lessthan_)(const T, const T) VEC_ASSERT_DECL)                 \
720 {                                                                         \
721    unsigned int len_ = VEC_OP (T, length) (vec_);                         \
722    unsigned int half_, middle_;                                           \
723    unsigned int first_ = 0;                                               \
724    while (len_ > 0)                                                       \
725      {                                                                    \
726         T middle_elem_;                                                   \
727         half_ = len_ >> 1;                                                \
728         middle_ = first_;                                                 \
729         middle_ += half_;                                                 \
730         middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS);  \
731         if (lessthan_ (middle_elem_, obj_))                               \
732           {                                                               \
733              first_ = middle_;                                            \
734              ++first_;                                                    \
735              len_ = len_ - half_ - 1;                                     \
736           }                                                               \
737         else                                                              \
738           len_ = half_;                                                   \
739      }                                                                    \
740    return first_;                                                         \
741 }
742
743 #define DEF_VEC_ALLOC_FUNC_P(T)                                           \
744 static inline VEC(T) *VEC_OP (T,alloc)                                    \
745      (int alloc_)                                                         \
746 {                                                                         \
747   /* We must request exact size allocation, hence the negation.  */       \
748   return (VEC(T) *) vec_p_reserve (NULL, -alloc_);                        \
749 }                                                                         \
750                                                                           \
751 static inline void VEC_OP (T,free)                                        \
752      (VEC(T) **vec_)                                                      \
753 {                                                                         \
754   if (*vec_)                                                              \
755     vec_free_ (*vec_);                                                    \
756   *vec_ = NULL;                                                           \
757 }                                                                         \
758                                                                           \
759 static inline void VEC_OP (T,cleanup)                                     \
760      (void *arg_)                                                         \
761 {                                                                         \
762   VEC(T) **vec_ = (VEC(T) **) arg_;                                       \
763   if (*vec_)                                                              \
764     vec_free_ (*vec_);                                                    \
765   *vec_ = NULL;                                                           \
766 }                                                                         \
767                                                                           \
768 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)                      \
769 {                                                                         \
770   size_t len_ = vec_ ? vec_->num : 0;                                     \
771   VEC (T) *new_vec_ = NULL;                                               \
772                                                                           \
773   if (len_)                                                               \
774     {                                                                     \
775       /* We must request exact size allocation, hence the negation.  */   \
776       new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_));                \
777                                                                           \
778       new_vec_->num = len_;                                               \
779       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);               \
780     }                                                                     \
781   return new_vec_;                                                        \
782 }                                                                         \
783                                                                           \
784 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)     \
785 {                                                                         \
786   if (vec1_ && vec2_)                                                     \
787     {                                                                     \
788       size_t len_ = vec1_->num + vec2_->num;                              \
789       VEC (T) *new_vec_ = NULL;                                           \
790                                                                           \
791       /* We must request exact size allocation, hence the negation.  */   \
792       new_vec_ = (VEC (T) *)(vec_p_reserve (NULL, -len_));                \
793                                                                           \
794       new_vec_->num = len_;                                               \
795       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);        \
796       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,                     \
797               sizeof (T) * vec2_->num);                                   \
798                                                                           \
799       return new_vec_;                                                    \
800     }                                                                     \
801   else                                                                    \
802     return VEC_copy (T, vec1_ ? vec1_ : vec2_);                           \
803 }                                                                         \
804                                                                           \
805 static inline int VEC_OP (T,reserve)                                      \
806      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)                          \
807 {                                                                         \
808   int extend = !VEC_OP (T,space)                                          \
809         (*vec_, alloc_ < 0 ? -alloc_ : alloc_ VEC_ASSERT_PASS);           \
810                                                                           \
811   if (extend)                                                             \
812     *vec_ = (VEC(T) *) vec_p_reserve (*vec_, alloc_);                     \
813                                                                           \
814   return extend;                                                          \
815 }                                                                         \
816                                                                           \
817 static inline void VEC_OP (T,safe_grow)                                   \
818      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)                           \
819 {                                                                         \
820   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
821         "safe_grow");                                                     \
822   VEC_OP (T,reserve)                                                      \
823         (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS);  \
824   (*vec_)->num = size_;                                                   \
825 }                                                                         \
826                                                                           \
827 static inline T *VEC_OP (T,safe_push)                                     \
828      (VEC(T) **vec_, T obj_ VEC_ASSERT_DECL)                              \
829 {                                                                         \
830   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
831                                                                           \
832   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);             \
833 }                                                                         \
834                                                                           \
835 static inline T *VEC_OP (T,safe_insert)                                   \
836      (VEC(T) **vec_, unsigned ix_, T obj_ VEC_ASSERT_DECL)                \
837 {                                                                         \
838   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
839                                                                           \
840   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);      \
841 }
842
843 #define DEF_VEC_FUNC_O(T)                                                 \
844 static inline unsigned VEC_OP (T,length) (const VEC(T) *vec_)             \
845 {                                                                         \
846   return vec_ ? vec_->num : 0;                                            \
847 }                                                                         \
848                                                                           \
849 static inline T *VEC_OP (T,last) (VEC(T) *vec_ VEC_ASSERT_DECL)           \
850 {                                                                         \
851   vec_assert (vec_ && vec_->num, "last");                                 \
852                                                                           \
853   return &vec_->vec[vec_->num - 1];                                       \
854 }                                                                         \
855                                                                           \
856 static inline T *VEC_OP (T,index)                                         \
857      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
858 {                                                                         \
859   vec_assert (vec_ && ix_ < vec_->num, "index");                          \
860                                                                           \
861   return &vec_->vec[ix_];                                                 \
862 }                                                                         \
863                                                                           \
864 static inline int VEC_OP (T,iterate)                                      \
865      (VEC(T) *vec_, unsigned ix_, T **ptr)                                \
866 {                                                                         \
867   if (vec_ && ix_ < vec_->num)                                            \
868     {                                                                     \
869       *ptr = &vec_->vec[ix_];                                             \
870       return 1;                                                           \
871     }                                                                     \
872   else                                                                    \
873     {                                                                     \
874       *ptr = 0;                                                           \
875       return 0;                                                           \
876     }                                                                     \
877 }                                                                         \
878                                                                           \
879 static inline size_t VEC_OP (T,embedded_size)                             \
880      (int alloc_)                                                         \
881 {                                                                         \
882   VEC(T) dummy;                                                           \
883                                                                           \
884   return vec_offset (T, &dummy) + alloc_ * sizeof(T);                     \
885 }                                                                         \
886                                                                           \
887 static inline void VEC_OP (T,embedded_init)                               \
888      (VEC(T) *vec_, int alloc_)                                           \
889 {                                                                         \
890   vec_->num = 0;                                                          \
891   vec_->alloc = alloc_;                                                   \
892 }                                                                         \
893                                                                           \
894 static inline int VEC_OP (T,space)                                        \
895      (VEC(T) *vec_, int alloc_ VEC_ASSERT_DECL)                           \
896 {                                                                         \
897   vec_assert (alloc_ >= 0, "space");                                      \
898   return vec_ ? vec_->alloc - vec_->num >= (unsigned)alloc_ : !alloc_;    \
899 }                                                                         \
900                                                                           \
901 static inline T *VEC_OP (T,quick_push)                                    \
902      (VEC(T) *vec_, const T *obj_ VEC_ASSERT_DECL)                        \
903 {                                                                         \
904   T *slot_;                                                               \
905                                                                           \
906   vec_assert (vec_->num < vec_->alloc, "quick_push");                     \
907   slot_ = &vec_->vec[vec_->num++];                                        \
908   if (obj_)                                                               \
909     *slot_ = *obj_;                                                       \
910                                                                           \
911   return slot_;                                                           \
912 }                                                                         \
913                                                                           \
914 static inline void VEC_OP (T,pop) (VEC(T) *vec_ VEC_ASSERT_DECL)          \
915 {                                                                         \
916   vec_assert (vec_->num, "pop");                                          \
917   --vec_->num;                                                            \
918 }                                                                         \
919                                                                           \
920 static inline void VEC_OP (T,truncate)                                    \
921      (VEC(T) *vec_, unsigned size_ VEC_ASSERT_DECL)                       \
922 {                                                                         \
923   vec_assert (vec_ ? vec_->num >= size_ : !size_, "truncate");            \
924   if (vec_)                                                               \
925     vec_->num = size_;                                                    \
926 }                                                                         \
927                                                                           \
928 static inline T *VEC_OP (T,replace)                                       \
929      (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)          \
930 {                                                                         \
931   T *slot_;                                                               \
932                                                                           \
933   vec_assert (ix_ < vec_->num, "replace");                                \
934   slot_ = &vec_->vec[ix_];                                                \
935   if (obj_)                                                               \
936     *slot_ = *obj_;                                                       \
937                                                                           \
938   return slot_;                                                           \
939 }                                                                         \
940                                                                           \
941 static inline T *VEC_OP (T,quick_insert)                                  \
942      (VEC(T) *vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)          \
943 {                                                                         \
944   T *slot_;                                                               \
945                                                                           \
946   vec_assert (vec_->num < vec_->alloc && ix_ <= vec_->num, "quick_insert"); \
947   slot_ = &vec_->vec[ix_];                                                \
948   memmove (slot_ + 1, slot_, (vec_->num++ - ix_) * sizeof (T));           \
949   if (obj_)                                                               \
950     *slot_ = *obj_;                                                       \
951                                                                           \
952   return slot_;                                                           \
953 }                                                                         \
954                                                                           \
955 static inline void VEC_OP (T,ordered_remove)                              \
956      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
957 {                                                                         \
958   T *slot_;                                                               \
959                                                                           \
960   vec_assert (ix_ < vec_->num, "ordered_remove");                         \
961   slot_ = &vec_->vec[ix_];                                                \
962   memmove (slot_, slot_ + 1, (--vec_->num - ix_) * sizeof (T));           \
963 }                                                                         \
964                                                                           \
965 static inline void VEC_OP (T,unordered_remove)                            \
966      (VEC(T) *vec_, unsigned ix_ VEC_ASSERT_DECL)                         \
967 {                                                                         \
968   vec_assert (ix_ < vec_->num, "unordered_remove");                       \
969   vec_->vec[ix_] = vec_->vec[--vec_->num];                                \
970 }                                                                         \
971                                                                           \
972 static inline void VEC_OP (T,block_remove)                                \
973      (VEC(T) *vec_, unsigned ix_, unsigned len_ VEC_ASSERT_DECL)          \
974 {                                                                         \
975   T *slot_;                                                               \
976                                                                           \
977   vec_assert (ix_ + len_ <= vec_->num, "block_remove");                   \
978   slot_ = &vec_->vec[ix_];                                                \
979   vec_->num -= len_;                                                      \
980   memmove (slot_, slot_ + len_, (vec_->num - ix_) * sizeof (T));          \
981 }                                                                         \
982                                                                           \
983 static inline T *VEC_OP (T,address)                                       \
984      (VEC(T) *vec_)                                                       \
985 {                                                                         \
986   return vec_ ? vec_->vec : 0;                                            \
987 }                                                                         \
988                                                                           \
989 static inline unsigned VEC_OP (T,lower_bound)                             \
990      (VEC(T) *vec_, const T *obj_,                                        \
991       int (*lessthan_)(const T *, const T *) VEC_ASSERT_DECL)             \
992 {                                                                         \
993    unsigned int len_ = VEC_OP (T, length) (vec_);                         \
994    unsigned int half_, middle_;                                           \
995    unsigned int first_ = 0;                                               \
996    while (len_ > 0)                                                       \
997      {                                                                    \
998         T *middle_elem_;                                                  \
999         half_ = len_ >> 1;                                                \
1000         middle_ = first_;                                                 \
1001         middle_ += half_;                                                 \
1002         middle_elem_ = VEC_OP (T,index) (vec_, middle_ VEC_ASSERT_PASS);  \
1003         if (lessthan_ (middle_elem_, obj_))                               \
1004           {                                                               \
1005              first_ = middle_;                                            \
1006              ++first_;                                                    \
1007              len_ = len_ - half_ - 1;                                     \
1008           }                                                               \
1009         else                                                              \
1010           len_ = half_;                                                   \
1011      }                                                                    \
1012    return first_;                                                         \
1013 }
1014
1015 #define DEF_VEC_ALLOC_FUNC_O(T)                                           \
1016 static inline VEC(T) *VEC_OP (T,alloc)                                    \
1017      (int alloc_)                                                         \
1018 {                                                                         \
1019   VEC(T) dummy;                                                           \
1020                                                                           \
1021   /* We must request exact size allocation, hence the negation.  */       \
1022   return (VEC(T) *) vec_o_reserve (NULL, -alloc_,                         \
1023                                    vec_offset (T, &dummy), sizeof (T));   \
1024 }                                                                         \
1025                                                                           \
1026 static inline VEC(T) *VEC_OP (T,copy) (VEC(T) *vec_)                      \
1027 {                                                                         \
1028   size_t len_ = vec_ ? vec_->num : 0;                                     \
1029   VEC (T) *new_vec_ = NULL;                                               \
1030                                                                           \
1031   if (len_)                                                               \
1032     {                                                                     \
1033       VEC(T) dummy;                                                       \
1034                                                                           \
1035       /* We must request exact size allocation, hence the negation.  */   \
1036       new_vec_ = (VEC (T) *)                                              \
1037         vec_o_reserve  (NULL, -len_, vec_offset (T, &dummy), sizeof (T)); \
1038                                                                           \
1039       new_vec_->num = len_;                                               \
1040       memcpy (new_vec_->vec, vec_->vec, sizeof (T) * len_);               \
1041     }                                                                     \
1042   return new_vec_;                                                        \
1043 }                                                                         \
1044                                                                           \
1045 static inline VEC(T) *VEC_OP (T,merge) (VEC(T) *vec1_, VEC(T) *vec2_)     \
1046 {                                                                         \
1047   if (vec1_ && vec2_)                                                     \
1048     {                                                                     \
1049       VEC(T) dummy;                                                       \
1050       size_t len_ = vec1_->num + vec2_->num;                              \
1051       VEC (T) *new_vec_ = NULL;                                           \
1052                                                                           \
1053       /* We must request exact size allocation, hence the negation.  */   \
1054       new_vec_ = (VEC (T) *)                                              \
1055         vec_o_reserve (NULL, -len_, vec_offset (T, &dummy), sizeof (T));  \
1056                                                                           \
1057       new_vec_->num = len_;                                               \
1058       memcpy (new_vec_->vec, vec1_->vec, sizeof (T) * vec1_->num);        \
1059       memcpy (new_vec_->vec + vec1_->num, vec2_->vec,                     \
1060               sizeof (T) * vec2_->num);                                   \
1061                                                                           \
1062       return new_vec_;                                                    \
1063     }                                                                     \
1064   else                                                                    \
1065     return VEC_copy (T, vec1_ ? vec1_ : vec2_);                           \
1066 }                                                                         \
1067                                                                           \
1068 static inline void VEC_OP (T,free)                                        \
1069      (VEC(T) **vec_)                                                      \
1070 {                                                                         \
1071   if (*vec_)                                                              \
1072     vec_free_ (*vec_);                                                    \
1073   *vec_ = NULL;                                                           \
1074 }                                                                         \
1075                                                                           \
1076 static inline void VEC_OP (T,cleanup)                                     \
1077      (void *arg_)                                                         \
1078 {                                                                         \
1079   VEC(T) **vec_ = (VEC(T) **) arg_;                                       \
1080   if (*vec_)                                                              \
1081     vec_free_ (*vec_);                                                    \
1082   *vec_ = NULL;                                                           \
1083 }                                                                         \
1084                                                                           \
1085 static inline int VEC_OP (T,reserve)                                      \
1086      (VEC(T) **vec_, int alloc_ VEC_ASSERT_DECL)                          \
1087 {                                                                         \
1088   VEC(T) dummy;                                                           \
1089   int extend = !VEC_OP (T,space) (*vec_, alloc_ < 0 ? -alloc_ : alloc_    \
1090                                   VEC_ASSERT_PASS);                       \
1091                                                                           \
1092   if (extend)                                                             \
1093     *vec_ = (VEC(T) *)                                                    \
1094       vec_o_reserve (*vec_, alloc_, vec_offset (T, &dummy), sizeof (T));  \
1095                                                                           \
1096   return extend;                                                          \
1097 }                                                                         \
1098                                                                           \
1099 static inline void VEC_OP (T,safe_grow)                                   \
1100      (VEC(T) **vec_, int size_ VEC_ASSERT_DECL)                           \
1101 {                                                                         \
1102   vec_assert (size_ >= 0 && VEC_OP(T,length) (*vec_) <= (unsigned)size_,  \
1103         "safe_grow");                                                     \
1104   VEC_OP (T,reserve)                                                      \
1105         (vec_, (int)(*vec_ ? (*vec_)->num : 0) - size_ VEC_ASSERT_PASS);  \
1106   (*vec_)->num = size_;                                                   \
1107 }                                                                         \
1108                                                                           \
1109 static inline T *VEC_OP (T,safe_push)                                     \
1110      (VEC(T) **vec_, const T *obj_ VEC_ASSERT_DECL)                       \
1111 {                                                                         \
1112   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
1113                                                                           \
1114   return VEC_OP (T,quick_push) (*vec_, obj_ VEC_ASSERT_PASS);             \
1115 }                                                                         \
1116                                                                           \
1117 static inline T *VEC_OP (T,safe_insert)                                   \
1118      (VEC(T) **vec_, unsigned ix_, const T *obj_ VEC_ASSERT_DECL)         \
1119 {                                                                         \
1120   VEC_OP (T,reserve) (vec_, 1 VEC_ASSERT_PASS);                           \
1121                                                                           \
1122   return VEC_OP (T,quick_insert) (*vec_, ix_, obj_ VEC_ASSERT_PASS);      \
1123 }
1124
1125 #endif /* GDB_VEC_H */
This page took 0.102864 seconds and 4 git commands to generate.