summaryrefslogtreecommitdiff
path: root/gcc
diff options
context:
space:
mode:
authorDiego Novillo <dnovillo@google.com>2012-11-17 21:54:30 -0500
committerDiego Novillo <dnovillo@gcc.gnu.org>2012-11-17 21:54:30 -0500
commit9771b26396c39dfaecd5a76dd359fb65d3be4cb6 (patch)
tree1b9f930d315fa3e0a5ed7fa6e27ec5bd0a3436a4 /gcc
parent0f4119158064e271e48a14ce3f88a67e7baf14e0 (diff)
This patch rewrites the old VEC macro-based interface into a new one based on the template class 'vec'.
This patch rewrites the old VEC macro-based interface into a new one based on the template class 'vec'. The user-visible changes are described in http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec. I have tested the patch pretty extensively: - Regular bootstraps on x86_64, ppc, ia64, sparc and hppa. - Bootstraps with --enable-checking=release - Bootstraps with --enable-checking=gc,gcac - Basic builds on all targets (using contrib/config-list.mk). We no longer access the vectors via VEC_* macros. The pattern is "VEC_operation (T, A, V, args)" becomes "V.operation (args)". The only thing I could not do is create proper ctors and dtors for the vec class. Since these vectors are stored in unions, we have to keep them as PODs (C++03 does not allow non-PODs in unions). This means that creation and destruction must be explicit. There is a new method vec<type, allocation, layout>::create() and another vec<type, allocation, layout>::destroy() to allocate the internal vector. For vectors that must be pointers, there is a family of free functions that implement the operations that need to tolerate NULL vectors. These functions all start with the prefix 'vec_safe_'. See the wiki page for details. The gengtype change removes the special handling for VEC() that used to exist in gengtype. Additionally, it allows gengtype to recognize templates of more than one argument and introduces the concept of an undefined type (useful for template arguments that may or may not be types). When a TYPE_UNDEFINED is reached, gengtype will ignore it if it happens inside a type marked with GTY((user)). Otherwise, it will emit an error. Finally, gengtype rejects root types marked GTY((user)) that are not first class pointers. 2012-11-16 Diego Novillo <dnovillo@google.com> VEC API overhaul (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec) * vec.c (register_overhead): Convert it into member function of vec_prefix. (release_overhead): Likewise. (calculate_allocation): Likewise. (vec_heap_free): Remove. (vec_gc_o_reserve_1): Remove. (vec_heap_o_reserve_1): Remove. (vec_stack_o_reserve_1): Remove. (vec_stack_o_reserve_exact): Remove. (register_stack_vec): New. (stack_vec_register_index): New. (unregister_stack_vec): New. (vec_assert_fail): Remove. * vec.h: Conditionally include ggc.h. Document conditional hackery. Update top-level documentation. (ALONE_VEC_CHECK_INFO): Remove. (VEC_CHECK_INFO): Remove. (ALONE_VEC_CHECK_DECL): Remove. (VEC_CHECK_DECL): Remove. (ALONE_VEC_CHECK_PASS): Remove. (VEC_CHECK_PASS): Remove. (VEC_ASSERT): Remove. (vec_prefix): Add friends va_gc, va_gc_atomic, va_heap and va_stack. Mark fields alloc_ and num_ as protected. (struct vec_t): Remove. Remove all function members. (struct vl_embed): Declare. (struct vl_ptr): Declare. (free): Remove. (reserve_exact): Remove. (reserve): Remove. (safe_splice): Remove. (safe_push): Remove. (safe_grow): Remove. (safe_grow_cleared): Remove. (safe_insert): Remove. (DEF_VEC_I): Remove. (DEF_VEC_ALLOC_I): Remove. (DEF_VEC_P): Remove. (DEF_VEC_ALLOC_P): Remove. (DEF_VEC_O): Remove. (DEF_VEC_ALLOC_O): Remove. (DEF_VEC_ALLOC_P_STACK): Remove. (DEF_VEC_ALLOC_O_STACK): Remove. (DEF_VEC_ALLOC_I_STACK): Remove. (DEF_VEC_A): Remove. (DEF_VEC_ALLOC_A): Remove. (vec_stack_p_reserve_exact_1): Remove. (vec_stack_o_reserve): Remove. (vec_stack_o_reserve_exact): Remove. (VEC_length): Remove. (VEC_empty): Remove. (VEC_address): Remove. (vec_address): Remove. (VEC_last): Remove. (VEC_index): Remove. (VEC_iterate): Remove. (VEC_embedded_size): Remove. (VEC_embedded_init): Remove. (VEC_free): Remove. (VEC_copy): Remove. (VEC_space): Remove. (VEC_reserve): Remove. (VEC_reserve_exact): Remove. (VEC_splice): Remove. (VEC_safe_splice): Remove. (VEC_quick_push): Remove. (VEC_safe_push): Remove. (VEC_pop): Remove. (VEC_truncate): Remove. (VEC_safe_grow): Remove. (VEC_replace): Remove. (VEC_quick_insert): Remove. (VEC_safe_insert): Remove. (VEC_ordered_remove): Remove. (VEC_unordered_remove): Remove. (VEC_block_remove): Remove. (VEC_lower_bound): Remove. (VEC_alloc): Remove. (VEC_qsort): Remove. (va_heap): Declare. (va_heap::default_layout): New typedef to vl_ptr. (va_heap::reserve): New. (va_heap::release): New. (va_gc): Declare. (va_gc::default_layout): New typedef to vl_embed. (va_gc::reserve): New. (va_gc::release): New. (va_gc_atomic): Declare. Inherit from va_gc. (va_stack): Declare. (va_stack::default_layout): New typedef to vl_ptr. (va_stack::alloc): New. (va_stack::reserve): New. (va_stack::release): New. (register_stack_vec): Declare. (stack_vec_register_index): Declare. (unregister_stack_vec): Declare. (vec<T, A = va_heap, L = typename A::default_layout>): Declare empty vec template. (vec<T, A, vl_embed>): Partial specialization for embedded layout. (vec<T, A, vl_embed>::allocated): New. (vec<T, A, vl_embed>::length): New. (vec<T, A, vl_embed>::is_empty): New. (vec<T, A, vl_embed>::address): New. (vec<T, A, vl_embed>::operator[]): New. (vec<T, A, vl_embed>::last New. (vec<T, A, vl_embed>::space): New. (vec<T, A, vl_embed>::iterate): New. (vec<T, A, vl_embed>::iterate): New. (vec<T, A, vl_embed>::copy): New. (vec<T, A, vl_embed>::splice): New. (vec<T, A, vl_embed>::quick_push New. (vec<T, A, vl_embed>::pop New. (vec<T, A, vl_embed>::truncate): New. (vec<T, A, vl_embed>::quick_insert): New. (vec<T, A, vl_embed>::ordered_remove): New. (vec<T, A, vl_embed>::unordered_remove): New. (vec<T, A, vl_embed>::block_remove): New. (vec<T, A, vl_embed>::qsort): New. (vec<T, A, vl_embed>::lower_bound): New. (vec<T, A, vl_embed>::embedded_size): New. (vec<T, A, vl_embed>::embedded_init): New. (vec<T, A, vl_embed>::quick_grow): New. (vec<T, A, vl_embed>::quick_grow_cleared): New. (vec_safe_space): New. (vec_safe_length): New. (vec_safe_address): New. (vec_safe_is_empty): New. (vec_safe_reserve): New. (vec_safe_reserve_exact): New. (vec_alloc): New. (vec_free): New. (vec_safe_grow): New. (vec_safe_grow_cleared): New. (vec_safe_iterate): New. (vec_safe_push): New. (vec_safe_insert): New. (vec_safe_truncate): New. (vec_safe_copy): New. (vec_safe_splice): New. (vec<T, A, vl_ptr>): New partial specialization for the space efficient layout. (vec<T, A, vl_ptr>::exists): New. (vec<T, A, vl_ptr>::is_empty): New. (vec<T, A, vl_ptr>::length): New. (vec<T, A, vl_ptr>::address): New. (vec<T, A, vl_ptr>::operator[]): New. (vec<T, A, vl_ptr>::operator!=): New. (vec<T, A, vl_ptr>::operator==): New. (vec<T, A, vl_ptr>::last): New. (vec<T, A, vl_ptr>::space): New. (vec<T, A, vl_ptr>::iterate): New. (vec<T, A, vl_ptr>::copy): New. (vec<T, A, vl_ptr>::reserve): New. (vec<T, A, vl_ptr>::reserve_exact): New. (vec<T, A, vl_ptr>::splice): New. (vec<T, A, vl_ptr>::safe_splice): New. (vec<T, A, vl_ptr>::quick_push): New. (vec<T, A, vl_ptr>::safe_push): New. (vec<T, A, vl_ptr>::pop): New. (vec<T, A, vl_ptr>::truncate): New. (vec<T, A, vl_ptr>::safe_grow): New. (vec<T, A, vl_ptr>::safe_grow_cleared): New. (vec<T, A, vl_ptr>::quick_grow): New. (vec<T, A, vl_ptr>::quick_grow_cleared): New. (vec<T, A, vl_ptr>::quick_insert): New. (vec<T, A, vl_ptr>::safe_insert): New. (vec<T, A, vl_ptr>::ordered_remove): New. (vec<T, A, vl_ptr>::unordered_remove): New. (vec<T, A, vl_ptr>::block_remove): New. (vec<T, A, vl_ptr>::qsort): New. (vec<T, A, vl_ptr>::lower_bound): New. (vec_stack_alloc): Define. (FOR_EACH_VEC_SAFE_ELT): Define. * vecir.h: Remove. Update all users. * vecprim.h: Remove. Update all users. Move uchar to coretypes.h. * Makefile.in (VEC_H): Add $(GGC_H). Remove vecir.h and vecprim.h dependencies everywhere. 2012-11-16 Diego Novillo <dnovillo@google.com> * gengtype-lex.l (VEC): Remove. Add characters in the set [\!\>\.-]. * gengtype-parse.c (token_names): Remove "VEC". (require_template_declaration): Remove handling of VEC_TOKEN. (type): Likewise. Call create_user_defined_type when parsing GTY((user)). * gengtype-state.c (type_lineloc): handle TYPE_UNDEFINED. (write_state_undefined_type): New. (write_state_type): Call write_state_undefined_type for TYPE_UNDEFINED. (read_state_type): Call read_state_undefined_type for TYPE_UNDEFINED. * gengtype.c (dbgprint_count_type_at): Handle TYPE_UNDEFINED. (create_user_defined_type): Make extern. (type_for_name): Factor out of resolve_typedef. (create_undefined_type): New (resolve_typedef): Call it when we cannot find a previous typedef and the type is not a template. (find_structure): Accept TYPE_UNDEFINED. (set_gc_used_type): Add argument ALLOWED_UNDEFINED_TYPES, default to false. Emit an error for TYPE_UNDEFINED unless LEVEL is GC_UNUSED or ALLOWED_UNDEFINED_TYPES is set. Set ALLOWED_UNDEFINED_TYPES to true for TYPE_USER_STRUCT. (filter_type_name): Accept templates with more than one argument. (output_mangled_typename): Handle TYPE_UNDEFINED (walk_type): Likewise. (write_types_process_field): Likewise. (write_func_for_structure): If CHAIN_NEXT is set, ORIG_S should not be a user-defined type. (write_types_local_user_process_field): Handle TYPE_ARRAY, TYPE_NONE and TYPE_UNDEFINED. (write_types_local_process_field): Likewise. (contains_scalar_p): Return 0 for TYPE_USER_STRUCT. (write_root): Reject user-defined types that are not pointers. Handle TYPE_NONE, TYPE_UNDEFINED, TYPE_UNION, TYPE_LANG_STRUCT and TYPE_PARAM_STRUCT. (output_typename): Handle TYPE_NONE, TYPE_UNDEFINED, and TYPE_ARRAY. (dump_typekind): Handle TYPE_UNDEFINED. * gengtype.h (enum typekind): Add TYPE_UNDEFINED. (create_user_defined_type): Declare. (enum gty_token): Remove VEC_TOKEN. 2012-11-16 Diego Novillo <dnovillo@google.com> Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec) * coretypes.h (uchar): Define. * alias.c: Use new vec API in vec.h. * asan.c: Likewise. * attribs.c: Likewise. * basic-block.h: Likewise. * bb-reorder.c: Likewise. * builtins.c: Likewise. * calls.c: Likewise. * cfg.c: Likewise. * cfganal.c: Likewise. * cfgcleanup.c: Likewise. * cfgexpand.c: Likewise. * cfghooks.c: Likewise. * cfghooks.h: Likewise. * cfgloop.c: Likewise. * cfgloop.h: Likewise. * cfgloopanal.c: Likewise. * cfgloopmanip.c: Likewise. * cfgrtl.c: Likewise. * cgraph.c: Likewise. * cgraph.h: Likewise. * cgraphclones.c: Likewise. * cgraphunit.c: Likewise. * combine.c: Likewise. * compare-elim.c: Likewise. * coverage.c: Likewise. * cprop.c: Likewise. * data-streamer.h: Likewise. * dbxout.c: Likewise. * dce.c: Likewise. * df-core.c: Likewise. * df-problems.c: Likewise. * df-scan.c: Likewise. * dominance.c: Likewise. * domwalk.c: Likewise. * domwalk.h: Likewise. * dse.c: Likewise. * dwarf2cfi.c: Likewise. * dwarf2out.c: Likewise. * dwarf2out.h: Likewise. * emit-rtl.c: Likewise. * except.c: Likewise. * except.h: Likewise. * expr.c: Likewise. * expr.h: Likewise. * final.c: Likewise. * fold-const.c: Likewise. * function.c: Likewise. * function.h: Likewise. * fwprop.c: Likewise. * gcc.c: Likewise. * gcse.c: Likewise. * genattr.c: Likewise. * genattrtab.c: Likewise. * genautomata.c: Likewise. * genextract.c: Likewise. * genopinit.c: Likewise * ggc-common.c: Likewise. * ggc.h: Likewise. * gimple-low.c: Likewise. * gimple-ssa-strength-reduction.c: Likewise. * gimple-streamer-in.c: Likewise. * gimple.c: Likewise. * gimple.h: Likewise. * gimplify.c: Likewise. * graph.c: Likewise. * graphds.c: Likewise. * graphds.h: Likewise. * graphite-blocking.c: Likewise. * graphite-clast-to-gimple.c: Likewise. * graphite-dependences.c: Likewise. * graphite-interchange.c: Likewise. * graphite-optimize-isl.c: Likewise. * graphite-poly.c: Likewise. * graphite-poly.h: Likewise. * graphite-scop-detection.c: Likewise. * graphite-scop-detection.h: Likewise. * graphite-sese-to-poly.c: Likewise. * graphite.c: Likewise. * godump.c: Likewise. * haifa-sched.c: Likewise. * hw-doloop.c: Likewise. * hw-doloop.h: Likewise. * ifcvt.c: Likewise. * insn-addr.h: Likewise. * ipa-cp.c: Likewise. * ipa-inline-analysis.c: Likewise. * ipa-inline-transform.c: Likewise. * ipa-inline.c: Likewise. * ipa-inline.h: Likewise. * ipa-prop.c: Likewise. * ipa-prop.h: Likewise. * ipa-pure-const.c: Likewise. * ipa-ref-inline.h: Likewise. * ipa-ref.c: Likewise. * ipa-ref.h: Likewise. * ipa-reference.c: Likewise. * ipa-split.c: Likewise. * ipa-utils.c: Likewise. * ipa-utils.h: Likewise. * ipa.c: Likewise. * ira-build.c: Likewise. * ira-color.c: Likewise. * ira-emit.c: Likewise. * ira-int.h: Likewise. * ira.c: Likewise. * loop-invariant.c: Likewise. * loop-unroll.c: Likewise. * lower-subreg.c: Likewise. * lra-lives.c: Likewise. * lra.c: Likewise. * lto-cgraph.c: Likewise. * lto-section-out.c: Likewise. * lto-streamer-in.c: Likewise. * lto-streamer-out.c: Likewise. * lto-streamer.h: Likewise. * lto-symtab.c: Likewise. * mcf.c: Likewise. * modulo-sched.c: Likewise. * omp-low.c: Likewise. * opts-common.c: Likewise. * opts-global.c: Likewise. * opts.c: Likewise. * opts.h: Likewise. * passes.c: Likewise. * predict.c: Likewise. * print-tree.c: Likewise. * profile.c: Likewise. * profile.h: Likewise. * read-rtl.c: Likewise. * ree.c: Likewise. * reg-stack.c: Likewise. * regrename.c: Likewise. * regrename.h: Likewise. * reload.c: Likewise. * reload.h: Likewise. * reload1.c: Likewise. * rtl.h: Likewise. * sched-deps.c: Likewise. * sched-int.h: Likewise. * sdbout.c: Likewise. * sel-sched-dump.c: Likewise. * sel-sched-ir.c: Likewise. * sel-sched-ir.h: Likewise. * sel-sched.c: Likewise. * sese.c: Likewise. * sese.h: Likewise. * statistics.h: Likewise. * stmt.c: Likewise. * stor-layout.c: Likewise. * store-motion.c: Likewise. * tlink.c: Likewise. * toplev.c: Likewise. * trans-mem.c: Likewise. * tree-browser.c: Likewise. * tree-call-cdce.c: Likewise. * tree-cfg.c: Likewise. * tree-cfgcleanup.c: Likewise. * tree-chrec.c: Likewise. * tree-chrec.h: Likewise. * tree-complex.c: Likewise. * tree-data-ref.c: Likewise. * tree-data-ref.h: Likewise. * tree-dfa.c: Likewise. * tree-diagnostic.c: Likewise. * tree-dump.c: Likewise. * tree-eh.c: Likewise. * tree-emutls.c: Likewise. * tree-flow.h: Likewise. * tree-if-conv.c: Likewise. * tree-inline.c: Likewise. * tree-inline.h: Likewise. * tree-into-ssa.c: Likewise. * tree-iterator.c: Likewise. * tree-loop-distribution.c: Likewise. * tree-mudflap.c: Likewise. * tree-optimize.c: Likewise. * tree-outof-ssa.c: Likewise. * tree-parloops.c: Likewise. * tree-phinodes.c: Likewise. * tree-predcom.c: Likewise. * tree-pretty-print.c: Likewise. * tree-scalar-evolution.c: Likewise. * tree-sra.c: Likewise. * tree-ssa-address.c: Likewise. * tree-ssa-alias.c: Likewise. * tree-ssa-ccp.c: Likewise. * tree-ssa-coalesce.c: Likewise. * tree-ssa-dce.c: Likewise. * tree-ssa-dom.c: Likewise. * tree-ssa-forwprop.c: Likewise. * tree-ssa-live.c: Likewise. * tree-ssa-live.h: Likewise. * tree-ssa-loop-im.c: Likewise. * tree-ssa-loop-ivcanon.c: Likewise. * tree-ssa-loop-ivopts.c: Likewise. * tree-ssa-loop-manip.c: Likewise. * tree-ssa-loop-niter.c: Likewise. * tree-ssa-loop-prefetch.c: Likewise. * tree-ssa-math-opts.c: Likewise. * tree-ssa-operands.c: Likewise. * tree-ssa-phiopt.c: Likewise. * tree-ssa-phiprop.c: Likewise. * tree-ssa-pre.c: Likewise. * tree-ssa-propagate.c: Likewise. * tree-ssa-reassoc.c: Likewise. * tree-ssa-sccvn.c: Likewise. * tree-ssa-sccvn.h: Likewise. * tree-ssa-strlen.c: Likewise. * tree-ssa-structalias.c: Likewise. * tree-ssa-tail-merge.c: Likewise. * tree-ssa-threadedge.c: Likewise. * tree-ssa-threadupdate.c: Likewise. * tree-ssa-uncprop.c: Likewise. * tree-ssa-uninit.c: Likewise. * tree-ssa.c: Likewise. * tree-ssanames.c: Likewise. * tree-stdarg.c: Likewise. * tree-streamer-in.c: Likewise. * tree-streamer-out.c: Likewise. * tree-streamer.c: Likewise. * tree-streamer.h: Likewise. * tree-switch-conversion.c: Likewise. * tree-vect-data-refs.c: Likewise. * tree-vect-generic.c: Likewise. * tree-vect-loop-manip.c: Likewise. * tree-vect-loop.c: Likewise. * tree-vect-patterns.c: Likewise. * tree-vect-slp.c: Likewise. * tree-vect-stmts.c: Likewise. * tree-vectorizer.c: Likewise. * tree-vectorizer.h: Likewise. * tree-vrp.c: Likewise. * tree.c: Likewise. * tree.h: Likewise. * value-prof.c: Likewise. * value-prof.h: Likewise. * var-tracking.c: Likewise. * varasm.c: Likewise. * varpool.c: Likewise. * vmsdbgout.c: Likewise. * config/bfin/bfin.c: Likewise. * config/c6x/c6x.c: Likewise. * config/darwin.c: Likewise. * config/i386/i386.c: Likewise. * config/ia64/ia64.c: Likewise. * config/mep/mep.c: Likewise. * config/mips/mips.c: Likewise. * config/pa/pa.c: Likewise. * config/rs6000/rs6000-c.c: Likewise. * config/rs6000/rs6000.c: Likewise. * config/rx/rx.c: Likewise. * config/spu/spu-c.c: Likewise. * config/vms/vms.c: Likewise. * config/vxworks.c: Likewise. * config/epiphany/resolve-sw-modes.c: Likewise. From-SVN: r193595
Diffstat (limited to 'gcc')
-rw-r--r--gcc/ChangeLog498
-rw-r--r--gcc/Makefile.in63
-rw-r--r--gcc/ada/ChangeLog10
-rw-r--r--gcc/ada/gcc-interface/decl.c84
-rw-r--r--gcc/ada/gcc-interface/gigi.h2
-rw-r--r--gcc/ada/gcc-interface/trans.c190
-rw-r--r--gcc/ada/gcc-interface/utils.c108
-rw-r--r--gcc/ada/gcc-interface/utils2.c29
-rw-r--r--gcc/alias.c67
-rw-r--r--gcc/asan.c11
-rw-r--r--gcc/attribs.c23
-rw-r--r--gcc/basic-block.h52
-rw-r--r--gcc/bb-reorder.c18
-rw-r--r--gcc/builtins.c14
-rw-r--r--gcc/c-family/c-common.c245
-rw-r--r--gcc/c-family/c-common.h32
-rw-r--r--gcc/c-family/c-gimplify.c4
-rw-r--r--gcc/c-family/c-pragma.c75
-rw-r--r--gcc/c-family/c-pretty-print.c4
-rw-r--r--gcc/c-family/c-pretty-print.h2
-rw-r--r--gcc/c-family/c-semantics.c8
-rw-r--r--gcc/c/ChangeLog16
-rw-r--r--gcc/c/c-decl.c98
-rw-r--r--gcc/c/c-parser.c94
-rw-r--r--gcc/c/c-tree.h9
-rw-r--r--gcc/c/c-typeck.c89
-rw-r--r--gcc/calls.c22
-rw-r--r--gcc/cfg.c16
-rw-r--r--gcc/cfganal.c17
-rw-r--r--gcc/cfgcleanup.c8
-rw-r--r--gcc/cfgexpand.c60
-rw-r--r--gcc/cfghooks.c11
-rw-r--r--gcc/cfghooks.h4
-rw-r--r--gcc/cfgloop.c106
-rw-r--r--gcc/cfgloop.h51
-rw-r--r--gcc/cfgloopanal.c16
-rw-r--r--gcc/cfgloopmanip.c36
-rw-r--r--gcc/cfgrtl.c8
-rw-r--r--gcc/cgraph.c19
-rw-r--r--gcc/cgraph.h59
-rw-r--r--gcc/cgraphclones.c30
-rw-r--r--gcc/cgraphunit.c34
-rw-r--r--gcc/combine.c60
-rw-r--r--gcc/compare-elim.c15
-rw-r--r--gcc/config/bfin/bfin.c8
-rw-r--r--gcc/config/c6x/c6x.c43
-rw-r--r--gcc/config/darwin.c28
-rw-r--r--gcc/config/epiphany/resolve-sw-modes.c14
-rw-r--r--gcc/config/i386/i386.c20
-rw-r--r--gcc/config/ia64/ia64.c27
-rw-r--r--gcc/config/mep/mep.c86
-rw-r--r--gcc/config/mips/mips.c19
-rw-r--r--gcc/config/pa/pa.c47
-rw-r--r--gcc/config/rs6000/rs6000-c.c26
-rw-r--r--gcc/config/rs6000/rs6000.c16
-rw-r--r--gcc/config/rx/rx.c70
-rw-r--r--gcc/config/spu/spu-c.c6
-rw-r--r--gcc/config/vms/vms.c4
-rw-r--r--gcc/config/vxworks.c9
-rw-r--r--gcc/coretypes.h5
-rw-r--r--gcc/coverage.c26
-rw-r--r--gcc/cp/ChangeLog30
-rw-r--r--gcc/cp/Make-lang.in2
-rw-r--r--gcc/cp/call.c218
-rw-r--r--gcc/cp/class.c173
-rw-r--r--gcc/cp/cp-gimplify.c17
-rw-r--r--gcc/cp/cp-tree.h91
-rw-r--r--gcc/cp/cvt.c4
-rw-r--r--gcc/cp/decl.c126
-rw-r--r--gcc/cp/decl2.c74
-rw-r--r--gcc/cp/error.c26
-rw-r--r--gcc/cp/except.c10
-rw-r--r--gcc/cp/init.c91
-rw-r--r--gcc/cp/mangle.c28
-rw-r--r--gcc/cp/method.c16
-rw-r--r--gcc/cp/name-lookup.c186
-rw-r--r--gcc/cp/name-lookup.h18
-rw-r--r--gcc/cp/parser.c246
-rw-r--r--gcc/cp/parser.h29
-rw-r--r--gcc/cp/pt.c99
-rw-r--r--gcc/cp/repo.c6
-rw-r--r--gcc/cp/rtti.c79
-rw-r--r--gcc/cp/search.c43
-rw-r--r--gcc/cp/semantics.c226
-rw-r--r--gcc/cp/tree.c40
-rw-r--r--gcc/cp/typeck.c57
-rw-r--r--gcc/cp/typeck2.c44
-rw-r--r--gcc/cprop.c2
-rw-r--r--gcc/data-streamer.h2
-rw-r--r--gcc/dbxout.c21
-rw-r--r--gcc/dce.c10
-rw-r--r--gcc/df-core.c10
-rw-r--r--gcc/df-problems.c1
-rw-r--r--gcc/df-scan.c215
-rw-r--r--gcc/dominance.c78
-rw-r--r--gcc/domwalk.c22
-rw-r--r--gcc/domwalk.h6
-rw-r--r--gcc/dse.c39
-rw-r--r--gcc/dwarf2cfi.c144
-rw-r--r--gcc/dwarf2out.c377
-rw-r--r--gcc/dwarf2out.h5
-rw-r--r--gcc/emit-rtl.c1
-rw-r--r--gcc/except.c164
-rw-r--r--gcc/except.h17
-rw-r--r--gcc/expr.c10
-rw-r--r--gcc/expr.h2
-rw-r--r--gcc/final.c3
-rw-r--r--gcc/fold-const.c20
-rw-r--r--gcc/fortran/ChangeLog16
-rw-r--r--gcc/fortran/frontend-passes.c10
-rw-r--r--gcc/fortran/trans-array.c6
-rw-r--r--gcc/fortran/trans-common.c6
-rw-r--r--gcc/fortran/trans-decl.c28
-rw-r--r--gcc/fortran/trans-expr.c71
-rw-r--r--gcc/fortran/trans-intrinsic.c36
-rw-r--r--gcc/fortran/trans-openmp.c10
-rw-r--r--gcc/fortran/trans-stmt.c7
-rw-r--r--gcc/fortran/trans-types.c17
-rw-r--r--gcc/fortran/trans.h2
-rw-r--r--gcc/function.c161
-rw-r--r--gcc/function.h22
-rw-r--r--gcc/fwprop.c58
-rw-r--r--gcc/gcc.c133
-rw-r--r--gcc/gcse.c68
-rw-r--r--gcc/genattr.c12
-rw-r--r--gcc/genattrtab.c1
-rw-r--r--gcc/genautomata.c521
-rw-r--r--gcc/genextract.c94
-rw-r--r--gcc/gengtype-lex.l3
-rw-r--r--gcc/gengtype-parse.c26
-rw-r--r--gcc/gengtype-state.c61
-rw-r--r--gcc/gengtype.c147
-rw-r--r--gcc/gengtype.h5
-rw-r--r--gcc/genopinit.c14
-rw-r--r--gcc/ggc-common.c16
-rw-r--r--gcc/ggc.h2
-rw-r--r--gcc/gimple-low.c32
-rw-r--r--gcc/gimple-ssa-strength-reduction.c18
-rw-r--r--gcc/gimple-streamer-in.c2
-rw-r--r--gcc/gimple.c62
-rw-r--r--gcc/gimple.h27
-rw-r--r--gcc/gimplify.c153
-rw-r--r--gcc/go/ChangeLog6
-rw-r--r--gcc/go/go-lang.c13
-rw-r--r--gcc/godump.c10
-rw-r--r--gcc/graph.c2
-rw-r--r--gcc/graphds.c23
-rw-r--r--gcc/graphds.h2
-rw-r--r--gcc/graphite-blocking.c4
-rw-r--r--gcc/graphite-clast-to-gimple.c51
-rw-r--r--gcc/graphite-dependences.c45
-rw-r--r--gcc/graphite-interchange.c20
-rw-r--r--gcc/graphite-optimize-isl.c4
-rw-r--r--gcc/graphite-poly.c84
-rw-r--r--gcc/graphite-poly.h119
-rw-r--r--gcc/graphite-scop-detection.c132
-rw-r--r--gcc/graphite-scop-detection.h4
-rw-r--r--gcc/graphite-sese-to-poly.c291
-rw-r--r--gcc/graphite.c8
-rw-r--r--gcc/haifa-sched.c182
-rw-r--r--gcc/hw-doloop.c44
-rw-r--r--gcc/hw-doloop.h8
-rw-r--r--gcc/ifcvt.c17
-rw-r--r--gcc/insn-addr.h22
-rw-r--r--gcc/ipa-cp.c289
-rw-r--r--gcc/ipa-inline-analysis.c512
-rw-r--r--gcc/ipa-inline-transform.c12
-rw-r--r--gcc/ipa-inline.c20
-rw-r--r--gcc/ipa-inline.h67
-rw-r--r--gcc/ipa-prop.c254
-rw-r--r--gcc/ipa-prop.h93
-rw-r--r--gcc/ipa-pure-const.c30
-rw-r--r--gcc/ipa-ref-inline.h16
-rw-r--r--gcc/ipa-ref.c57
-rw-r--r--gcc/ipa-ref.h8
-rw-r--r--gcc/ipa-reference.c68
-rw-r--r--gcc/ipa-split.c84
-rw-r--r--gcc/ipa-utils.c38
-rw-r--r--gcc/ipa-utils.h2
-rw-r--r--gcc/ipa.c50
-rw-r--r--gcc/ira-build.c166
-rw-r--r--gcc/ira-color.c76
-rw-r--r--gcc/ira-emit.c49
-rw-r--r--gcc/ira-int.h6
-rw-r--r--gcc/ira.c16
-rw-r--r--gcc/java/ChangeLog14
-rw-r--r--gcc/java/boehm.c2
-rw-r--r--gcc/java/class.c127
-rw-r--r--gcc/java/constants.c18
-rw-r--r--gcc/java/decl.c2
-rw-r--r--gcc/java/expr.c90
-rw-r--r--gcc/java/java-tree.h36
-rw-r--r--gcc/java/jcf-parse.c37
-rw-r--r--gcc/java/resource.c8
-rw-r--r--gcc/java/verify-glue.c6
-rw-r--r--gcc/loop-invariant.c40
-rw-r--r--gcc/loop-unroll.c70
-rw-r--r--gcc/lower-subreg.c20
-rw-r--r--gcc/lra-lives.c12
-rw-r--r--gcc/lra.c49
-rw-r--r--gcc/lto-cgraph.c109
-rw-r--r--gcc/lto-section-out.c15
-rw-r--r--gcc/lto-streamer-in.c72
-rw-r--r--gcc/lto-streamer-out.c45
-rw-r--r--gcc/lto-streamer.h44
-rw-r--r--gcc/lto-symtab.c15
-rw-r--r--gcc/lto/ChangeLog9
-rw-r--r--gcc/lto/lto-lang.c8
-rw-r--r--gcc/lto/lto-partition.c19
-rw-r--r--gcc/lto/lto-partition.h4
-rw-r--r--gcc/lto/lto.c108
-rw-r--r--gcc/mcf.c18
-rw-r--r--gcc/modulo-sched.c37
-rw-r--r--gcc/objc/ChangeLog13
-rw-r--r--gcc/objc/objc-act.c30
-rw-r--r--gcc/objc/objc-act.h2
-rw-r--r--gcc/objc/objc-gnu-runtime-abi-01.c69
-rw-r--r--gcc/objc/objc-next-runtime-abi-01.c64
-rw-r--r--gcc/objc/objc-next-runtime-abi-02.c220
-rw-r--r--gcc/objc/objc-runtime-hooks.h2
-rw-r--r--gcc/objc/objc-runtime-shared-support.c14
-rw-r--r--gcc/objc/objc-runtime-shared-support.h2
-rw-r--r--gcc/omp-low.c65
-rw-r--r--gcc/opts-common.c12
-rw-r--r--gcc/opts-global.c20
-rw-r--r--gcc/opts.c12
-rw-r--r--gcc/opts.h2
-rw-r--r--gcc/passes.c65
-rw-r--r--gcc/predict.c10
-rw-r--r--gcc/print-tree.c10
-rw-r--r--gcc/profile.c18
-rw-r--r--gcc/profile.h2
-rw-r--r--gcc/read-rtl.c42
-rw-r--r--gcc/ree.c92
-rw-r--r--gcc/reg-stack.c20
-rw-r--r--gcc/regrename.c39
-rw-r--r--gcc/regrename.h6
-rw-r--r--gcc/reload.c4
-rw-r--r--gcc/reload.h18
-rw-r--r--gcc/reload1.c29
-rw-r--r--gcc/rtl.h5
-rw-r--r--gcc/sched-deps.c14
-rw-r--r--gcc/sched-int.h33
-rw-r--r--gcc/sdbout.c8
-rw-r--r--gcc/sel-sched-dump.c6
-rw-r--r--gcc/sel-sched-ir.c273
-rw-r--r--gcc/sel-sched-ir.h84
-rw-r--r--gcc/sel-sched.c132
-rw-r--r--gcc/sese.c26
-rw-r--r--gcc/sese.h16
-rw-r--r--gcc/statistics.h6
-rw-r--r--gcc/stmt.c12
-rw-r--r--gcc/stor-layout.c34
-rw-r--r--gcc/store-motion.c2
-rw-r--r--gcc/tlink.c10
-rw-r--r--gcc/toplev.c6
-rw-r--r--gcc/trans-mem.c265
-rw-r--r--gcc/tree-browser.c10
-rw-r--r--gcc/tree-call-cdce.c66
-rw-r--r--gcc/tree-cfg.c117
-rw-r--r--gcc/tree-cfgcleanup.c10
-rw-r--r--gcc/tree-chrec.c4
-rw-r--r--gcc/tree-chrec.h2
-rw-r--r--gcc/tree-complex.c44
-rw-r--r--gcc/tree-data-ref.c379
-rw-r--r--gcc/tree-data-ref.h113
-rw-r--r--gcc/tree-dfa.c23
-rw-r--r--gcc/tree-diagnostic.c10
-rw-r--r--gcc/tree-dump.c8
-rw-r--r--gcc/tree-eh.c74
-rw-r--r--gcc/tree-emutls.c49
-rw-r--r--gcc/tree-flow.h30
-rw-r--r--gcc/tree-if-conv.c44
-rw-r--r--gcc/tree-inline.c101
-rw-r--r--gcc/tree-inline.h4
-rw-r--r--gcc/tree-into-ssa.c140
-rw-r--r--gcc/tree-iterator.c8
-rw-r--r--gcc/tree-loop-distribution.c210
-rw-r--r--gcc/tree-mudflap.c8
-rw-r--r--gcc/tree-optimize.c5
-rw-r--r--gcc/tree-outof-ssa.c122
-rw-r--r--gcc/tree-parloops.c40
-rw-r--r--gcc/tree-phinodes.c13
-rw-r--r--gcc/tree-predcom.c268
-rw-r--r--gcc/tree-pretty-print.c8
-rw-r--r--gcc/tree-scalar-evolution.c20
-rw-r--r--gcc/tree-sra.c179
-rw-r--r--gcc/tree-ssa-address.c13
-rw-r--r--gcc/tree-ssa-alias.c1
-rw-r--r--gcc/tree-ssa-ccp.c3
-rw-r--r--gcc/tree-ssa-coalesce.c30
-rw-r--r--gcc/tree-ssa-dce.c28
-rw-r--r--gcc/tree-ssa-dom.c77
-rw-r--r--gcc/tree-ssa-forwprop.c17
-rw-r--r--gcc/tree-ssa-live.c8
-rw-r--r--gcc/tree-ssa-live.h3
-rw-r--r--gcc/tree-ssa-loop-im.c157
-rw-r--r--gcc/tree-ssa-loop-ivcanon.c57
-rw-r--r--gcc/tree-ssa-loop-ivopts.c70
-rw-r--r--gcc/tree-ssa-loop-manip.c30
-rw-r--r--gcc/tree-ssa-loop-niter.c91
-rw-r--r--gcc/tree-ssa-loop-prefetch.c32
-rw-r--r--gcc/tree-ssa-math-opts.c14
-rw-r--r--gcc/tree-ssa-operands.c34
-rw-r--r--gcc/tree-ssa-phiopt.c68
-rw-r--r--gcc/tree-ssa-phiprop.c6
-rw-r--r--gcc/tree-ssa-pre.c307
-rw-r--r--gcc/tree-ssa-propagate.c68
-rw-r--r--gcc/tree-ssa-reassoc.c351
-rw-r--r--gcc/tree-ssa-sccvn.c323
-rw-r--r--gcc/tree-ssa-sccvn.h18
-rw-r--r--gcc/tree-ssa-strlen.c81
-rw-r--r--gcc/tree-ssa-structalias.c593
-rw-r--r--gcc/tree-ssa-tail-merge.c87
-rw-r--r--gcc/tree-ssa-threadedge.c63
-rw-r--r--gcc/tree-ssa-threadupdate.c25
-rw-r--r--gcc/tree-ssa-uncprop.c30
-rw-r--r--gcc/tree-ssa-uninit.c278
-rw-r--r--gcc/tree-ssa.c44
-rw-r--r--gcc/tree-ssanames.c36
-rw-r--r--gcc/tree-stdarg.c12
-rw-r--r--gcc/tree-streamer-in.c16
-rw-r--r--gcc/tree-streamer-out.c6
-rw-r--r--gcc/tree-streamer.c14
-rw-r--r--gcc/tree-streamer.h4
-rw-r--r--gcc/tree-switch-conversion.c62
-rw-r--r--gcc/tree-vect-data-refs.c219
-rw-r--r--gcc/tree-vect-generic.c16
-rw-r--r--gcc/tree-vect-loop-manip.c40
-rw-r--r--gcc/tree-vect-loop.c284
-rw-r--r--gcc/tree-vect-patterns.c129
-rw-r--r--gcc/tree-vect-slp.c507
-rw-r--r--gcc/tree-vect-stmts.c527
-rw-r--r--gcc/tree-vectorizer.c2
-rw-r--r--gcc/tree-vectorizer.h104
-rw-r--r--gcc/tree-vrp.c32
-rw-r--r--gcc/tree.c131
-rw-r--r--gcc/tree.h86
-rw-r--r--gcc/value-prof.c60
-rw-r--r--gcc/value-prof.h4
-rw-r--r--gcc/var-tracking.c160
-rw-r--r--gcc/varasm.c58
-rw-r--r--gcc/varpool.c2
-rw-r--r--gcc/vec.c243
-rw-r--r--gcc/vec.h1957
-rw-r--r--gcc/vecir.h57
-rw-r--r--gcc/vecprim.h37
-rw-r--r--gcc/vmsdbgout.c27
348 files changed, 11862 insertions, 11784 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 935e3c0f908..d4d54d4cf8d 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,501 @@
+2012-11-17 Diego Novillo <dnovillo@google.com>
+
+ VEC API overhaul (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * vec.c (register_overhead): Convert it into
+ member function of vec_prefix.
+ (release_overhead): Likewise.
+ (calculate_allocation): Likewise.
+ (vec_heap_free): Remove.
+ (vec_gc_o_reserve_1): Remove.
+ (vec_heap_o_reserve_1): Remove.
+ (vec_stack_o_reserve_1): Remove.
+ (vec_stack_o_reserve_exact): Remove.
+ (register_stack_vec): New.
+ (stack_vec_register_index): New.
+ (unregister_stack_vec): New.
+ (vec_assert_fail): Remove.
+ * vec.h: Conditionally include ggc.h. Document conditional
+ hackery.
+ Update top-level documentation.
+ (ALONE_VEC_CHECK_INFO): Remove.
+ (VEC_CHECK_INFO): Remove.
+ (ALONE_VEC_CHECK_DECL): Remove.
+ (VEC_CHECK_DECL): Remove.
+ (ALONE_VEC_CHECK_PASS): Remove.
+ (VEC_CHECK_PASS): Remove.
+ (VEC_ASSERT): Remove.
+ (vec_prefix): Add friends va_gc, va_gc_atomic, va_heap and
+ va_stack.
+ Mark fields alloc_ and num_ as protected.
+ (struct vec_t): Remove. Remove all function members.
+ (struct vl_embed): Declare.
+ (struct vl_ptr): Declare.
+ (free): Remove.
+ (reserve_exact): Remove.
+ (reserve): Remove.
+ (safe_splice): Remove.
+ (safe_push): Remove.
+ (safe_grow): Remove.
+ (safe_grow_cleared): Remove.
+ (safe_insert): Remove.
+ (DEF_VEC_I): Remove.
+ (DEF_VEC_ALLOC_I): Remove.
+ (DEF_VEC_P): Remove.
+ (DEF_VEC_ALLOC_P): Remove.
+ (DEF_VEC_O): Remove.
+ (DEF_VEC_ALLOC_O): Remove.
+ (DEF_VEC_ALLOC_P_STACK): Remove.
+ (DEF_VEC_ALLOC_O_STACK): Remove.
+ (DEF_VEC_ALLOC_I_STACK): Remove.
+ (DEF_VEC_A): Remove.
+ (DEF_VEC_ALLOC_A): Remove.
+ (vec_stack_p_reserve_exact_1): Remove.
+ (vec_stack_o_reserve): Remove.
+ (vec_stack_o_reserve_exact): Remove.
+ (VEC_length): Remove.
+ (VEC_empty): Remove.
+ (VEC_address): Remove.
+ (vec_address): Remove.
+ (VEC_last): Remove.
+ (VEC_index): Remove.
+ (VEC_iterate): Remove.
+ (VEC_embedded_size): Remove.
+ (VEC_embedded_init): Remove.
+ (VEC_free): Remove.
+ (VEC_copy): Remove.
+ (VEC_space): Remove.
+ (VEC_reserve): Remove.
+ (VEC_reserve_exact): Remove.
+ (VEC_splice): Remove.
+ (VEC_safe_splice): Remove.
+ (VEC_quick_push): Remove.
+ (VEC_safe_push): Remove.
+ (VEC_pop): Remove.
+ (VEC_truncate): Remove.
+ (VEC_safe_grow): Remove.
+ (VEC_replace): Remove.
+ (VEC_quick_insert): Remove.
+ (VEC_safe_insert): Remove.
+ (VEC_ordered_remove): Remove.
+ (VEC_unordered_remove): Remove.
+ (VEC_block_remove): Remove.
+ (VEC_lower_bound): Remove.
+ (VEC_alloc): Remove.
+ (VEC_qsort): Remove.
+
+ (va_heap): Declare.
+ (va_heap::default_layout): New typedef to vl_ptr.
+ (va_heap::reserve): New.
+ (va_heap::release): New.
+ (va_gc): Declare.
+ (va_gc::default_layout): New typedef to vl_embed.
+ (va_gc::reserve): New.
+ (va_gc::release): New.
+ (va_gc_atomic): Declare. Inherit from va_gc.
+ (va_stack): Declare.
+ (va_stack::default_layout): New typedef to vl_ptr.
+ (va_stack::alloc): New.
+ (va_stack::reserve): New.
+ (va_stack::release): New.
+ (register_stack_vec): Declare.
+ (stack_vec_register_index): Declare.
+ (unregister_stack_vec): Declare.
+
+ (vec<T, A = va_heap, L = typename A::default_layout>): Declare
+ empty vec template.
+ (vec<T, A, vl_embed>): Partial specialization for embedded
+ layout.
+ (vec<T, A, vl_embed>::allocated): New.
+ (vec<T, A, vl_embed>::length): New.
+ (vec<T, A, vl_embed>::is_empty): New.
+ (vec<T, A, vl_embed>::address): New.
+ (vec<T, A, vl_embed>::operator[]): New.
+ (vec<T, A, vl_embed>::last New.
+ (vec<T, A, vl_embed>::space): New.
+ (vec<T, A, vl_embed>::iterate): New.
+ (vec<T, A, vl_embed>::iterate): New.
+ (vec<T, A, vl_embed>::copy): New.
+ (vec<T, A, vl_embed>::splice): New.
+ (vec<T, A, vl_embed>::quick_push New.
+ (vec<T, A, vl_embed>::pop New.
+ (vec<T, A, vl_embed>::truncate): New.
+ (vec<T, A, vl_embed>::quick_insert): New.
+ (vec<T, A, vl_embed>::ordered_remove): New.
+ (vec<T, A, vl_embed>::unordered_remove): New.
+ (vec<T, A, vl_embed>::block_remove): New.
+ (vec<T, A, vl_embed>::qsort): New.
+ (vec<T, A, vl_embed>::lower_bound): New.
+ (vec<T, A, vl_embed>::embedded_size): New.
+ (vec<T, A, vl_embed>::embedded_init): New.
+ (vec<T, A, vl_embed>::quick_grow): New.
+ (vec<T, A, vl_embed>::quick_grow_cleared): New.
+ (vec_safe_space): New.
+ (vec_safe_length): New.
+ (vec_safe_address): New.
+ (vec_safe_is_empty): New.
+ (vec_safe_reserve): New.
+ (vec_safe_reserve_exact): New.
+ (vec_alloc): New.
+ (vec_free): New.
+ (vec_safe_grow): New.
+ (vec_safe_grow_cleared): New.
+ (vec_safe_iterate): New.
+ (vec_safe_push): New.
+ (vec_safe_insert): New.
+ (vec_safe_truncate): New.
+ (vec_safe_copy): New.
+ (vec_safe_splice): New.
+
+ (vec<T, A, vl_ptr>): New partial specialization for the space
+ efficient layout.
+ (vec<T, A, vl_ptr>::exists): New.
+ (vec<T, A, vl_ptr>::is_empty): New.
+ (vec<T, A, vl_ptr>::length): New.
+ (vec<T, A, vl_ptr>::address): New.
+ (vec<T, A, vl_ptr>::operator[]): New.
+ (vec<T, A, vl_ptr>::operator!=): New.
+ (vec<T, A, vl_ptr>::operator==): New.
+ (vec<T, A, vl_ptr>::last): New.
+ (vec<T, A, vl_ptr>::space): New.
+ (vec<T, A, vl_ptr>::iterate): New.
+ (vec<T, A, vl_ptr>::copy): New.
+ (vec<T, A, vl_ptr>::reserve): New.
+ (vec<T, A, vl_ptr>::reserve_exact): New.
+ (vec<T, A, vl_ptr>::splice): New.
+ (vec<T, A, vl_ptr>::safe_splice): New.
+ (vec<T, A, vl_ptr>::quick_push): New.
+ (vec<T, A, vl_ptr>::safe_push): New.
+ (vec<T, A, vl_ptr>::pop): New.
+ (vec<T, A, vl_ptr>::truncate): New.
+ (vec<T, A, vl_ptr>::safe_grow): New.
+ (vec<T, A, vl_ptr>::safe_grow_cleared): New.
+ (vec<T, A, vl_ptr>::quick_grow): New.
+ (vec<T, A, vl_ptr>::quick_grow_cleared): New.
+ (vec<T, A, vl_ptr>::quick_insert): New.
+ (vec<T, A, vl_ptr>::safe_insert): New.
+ (vec<T, A, vl_ptr>::ordered_remove): New.
+ (vec<T, A, vl_ptr>::unordered_remove): New.
+ (vec<T, A, vl_ptr>::block_remove): New.
+ (vec<T, A, vl_ptr>::qsort): New.
+ (vec<T, A, vl_ptr>::lower_bound): New.
+ (vec_stack_alloc): Define.
+ (FOR_EACH_VEC_SAFE_ELT): Define.
+ * vecir.h: Remove. Update all users.
+ * vecprim.h: Remove. Update all users.
+ Move uchar to coretypes.h.
+
+ * Makefile.in (VEC_H): Add $(GGC_H).
+ Remove vecir.h and vecprim.h dependencies everywhere.
+
+2012-11-16 Diego Novillo <dnovillo@google.com>
+
+ * gengtype-lex.l (VEC): Remove.
+ Add characters in the set [\!\>\.-].
+ * gengtype-parse.c (token_names): Remove "VEC".
+ (require_template_declaration): Remove handling of VEC_TOKEN.
+ (type): Likewise.
+ Call create_user_defined_type when parsing GTY((user)).
+ * gengtype-state.c (type_lineloc): handle TYPE_UNDEFINED.
+ (write_state_undefined_type): New.
+ (write_state_type): Call write_state_undefined_type for
+ TYPE_UNDEFINED.
+ (read_state_type): Call read_state_undefined_type for
+ TYPE_UNDEFINED.
+ * gengtype.c (dbgprint_count_type_at): Handle TYPE_UNDEFINED.
+ (create_user_defined_type): Make extern.
+ (type_for_name): Factor out of resolve_typedef.
+ (create_undefined_type): New
+ (resolve_typedef): Call it when we cannot find a previous
+ typedef and the type is not a template.
+ (find_structure): Accept TYPE_UNDEFINED.
+ (set_gc_used_type): Add argument ALLOWED_UNDEFINED_TYPES,
+ default to false.
+ Emit an error for TYPE_UNDEFINED unless LEVEL is GC_UNUSED or
+ ALLOWED_UNDEFINED_TYPES is set.
+ Set ALLOWED_UNDEFINED_TYPES to true for TYPE_USER_STRUCT.
+ (filter_type_name): Accept templates with more than one
+ argument.
+ (output_mangled_typename): Handle TYPE_UNDEFINED
+ (walk_type): Likewise.
+ (write_types_process_field): Likewise.
+ (write_func_for_structure): If CHAIN_NEXT is set, ORIG_S
+ should not be a user-defined type.
+ (write_types_local_user_process_field): Handle TYPE_ARRAY,
+ TYPE_NONE and TYPE_UNDEFINED.
+ (write_types_local_process_field): Likewise.
+ (contains_scalar_p): Return 0 for TYPE_USER_STRUCT.
+ (write_root): Reject user-defined types that are not pointers.
+ Handle TYPE_NONE, TYPE_UNDEFINED, TYPE_UNION, TYPE_LANG_STRUCT
+ and TYPE_PARAM_STRUCT.
+ (output_typename): Handle TYPE_NONE, TYPE_UNDEFINED, and
+ TYPE_ARRAY.
+ (dump_typekind): Handle TYPE_UNDEFINED.
+ * gengtype.h (enum typekind): Add TYPE_UNDEFINED.
+ (create_user_defined_type): Declare.
+ (enum gty_token): Remove VEC_TOKEN.
+
+2012-11-16 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * coretypes.h (uchar): Define.
+ * alias.c: Use new vec API in vec.h.
+ * asan.c: Likewise.
+ * attribs.c: Likewise.
+ * basic-block.h: Likewise.
+ * bb-reorder.c: Likewise.
+ * builtins.c: Likewise.
+ * calls.c: Likewise.
+ * cfg.c: Likewise.
+ * cfganal.c: Likewise.
+ * cfgcleanup.c: Likewise.
+ * cfgexpand.c: Likewise.
+ * cfghooks.c: Likewise.
+ * cfghooks.h: Likewise.
+ * cfgloop.c: Likewise.
+ * cfgloop.h: Likewise.
+ * cfgloopanal.c: Likewise.
+ * cfgloopmanip.c: Likewise.
+ * cfgrtl.c: Likewise.
+ * cgraph.c: Likewise.
+ * cgraph.h: Likewise.
+ * cgraphclones.c: Likewise.
+ * cgraphunit.c: Likewise.
+ * combine.c: Likewise.
+ * compare-elim.c: Likewise.
+ * coverage.c: Likewise.
+ * cprop.c: Likewise.
+ * data-streamer.h: Likewise.
+ * dbxout.c: Likewise.
+ * dce.c: Likewise.
+ * df-core.c: Likewise.
+ * df-problems.c: Likewise.
+ * df-scan.c: Likewise.
+ * dominance.c: Likewise.
+ * domwalk.c: Likewise.
+ * domwalk.h: Likewise.
+ * dse.c: Likewise.
+ * dwarf2cfi.c: Likewise.
+ * dwarf2out.c: Likewise.
+ * dwarf2out.h: Likewise.
+ * emit-rtl.c: Likewise.
+ * except.c: Likewise.
+ * except.h: Likewise.
+ * expr.c: Likewise.
+ * expr.h: Likewise.
+ * final.c: Likewise.
+ * fold-const.c: Likewise.
+ * function.c: Likewise.
+ * function.h: Likewise.
+ * fwprop.c: Likewise.
+ * gcc.c: Likewise.
+ * gcse.c: Likewise.
+ * genattr.c: Likewise.
+ * genattrtab.c: Likewise.
+ * genautomata.c: Likewise.
+ * genextract.c: Likewise.
+ * genopinit.c: Likewise
+ * ggc-common.c: Likewise.
+ * ggc.h: Likewise.
+ * gimple-low.c: Likewise.
+ * gimple-ssa-strength-reduction.c: Likewise.
+ * gimple-streamer-in.c: Likewise.
+ * gimple.c: Likewise.
+ * gimple.h: Likewise.
+ * gimplify.c: Likewise.
+ * graph.c: Likewise.
+ * graphds.c: Likewise.
+ * graphds.h: Likewise.
+ * graphite-blocking.c: Likewise.
+ * graphite-clast-to-gimple.c: Likewise.
+ * graphite-dependences.c: Likewise.
+ * graphite-interchange.c: Likewise.
+ * graphite-optimize-isl.c: Likewise.
+ * graphite-poly.c: Likewise.
+ * graphite-poly.h: Likewise.
+ * graphite-scop-detection.c: Likewise.
+ * graphite-scop-detection.h: Likewise.
+ * graphite-sese-to-poly.c: Likewise.
+ * graphite.c: Likewise.
+ * godump.c: Likewise.
+ * haifa-sched.c: Likewise.
+ * hw-doloop.c: Likewise.
+ * hw-doloop.h: Likewise.
+ * ifcvt.c: Likewise.
+ * insn-addr.h: Likewise.
+ * ipa-cp.c: Likewise.
+ * ipa-inline-analysis.c: Likewise.
+ * ipa-inline-transform.c: Likewise.
+ * ipa-inline.c: Likewise.
+ * ipa-inline.h: Likewise.
+ * ipa-prop.c: Likewise.
+ * ipa-prop.h: Likewise.
+ * ipa-pure-const.c: Likewise.
+ * ipa-ref-inline.h: Likewise.
+ * ipa-ref.c: Likewise.
+ * ipa-ref.h: Likewise.
+ * ipa-reference.c: Likewise.
+ * ipa-split.c: Likewise.
+ * ipa-utils.c: Likewise.
+ * ipa-utils.h: Likewise.
+ * ipa.c: Likewise.
+ * ira-build.c: Likewise.
+ * ira-color.c: Likewise.
+ * ira-emit.c: Likewise.
+ * ira-int.h: Likewise.
+ * ira.c: Likewise.
+ * loop-invariant.c: Likewise.
+ * loop-unroll.c: Likewise.
+ * lower-subreg.c: Likewise.
+ * lra-lives.c: Likewise.
+ * lra.c: Likewise.
+ * lto-cgraph.c: Likewise.
+ * lto-section-out.c: Likewise.
+ * lto-streamer-in.c: Likewise.
+ * lto-streamer-out.c: Likewise.
+ * lto-streamer.h: Likewise.
+ * lto-symtab.c: Likewise.
+ * mcf.c: Likewise.
+ * modulo-sched.c: Likewise.
+ * omp-low.c: Likewise.
+ * opts-common.c: Likewise.
+ * opts-global.c: Likewise.
+ * opts.c: Likewise.
+ * opts.h: Likewise.
+ * passes.c: Likewise.
+ * predict.c: Likewise.
+ * print-tree.c: Likewise.
+ * profile.c: Likewise.
+ * profile.h: Likewise.
+ * read-rtl.c: Likewise.
+ * ree.c: Likewise.
+ * reg-stack.c: Likewise.
+ * regrename.c: Likewise.
+ * regrename.h: Likewise.
+ * reload.c: Likewise.
+ * reload.h: Likewise.
+ * reload1.c: Likewise.
+ * rtl.h: Likewise.
+ * sched-deps.c: Likewise.
+ * sched-int.h: Likewise.
+ * sdbout.c: Likewise.
+ * sel-sched-dump.c: Likewise.
+ * sel-sched-ir.c: Likewise.
+ * sel-sched-ir.h: Likewise.
+ * sel-sched.c: Likewise.
+ * sese.c: Likewise.
+ * sese.h: Likewise.
+ * statistics.h: Likewise.
+ * stmt.c: Likewise.
+ * stor-layout.c: Likewise.
+ * store-motion.c: Likewise.
+ * tlink.c: Likewise.
+ * toplev.c: Likewise.
+ * trans-mem.c: Likewise.
+ * tree-browser.c: Likewise.
+ * tree-call-cdce.c: Likewise.
+ * tree-cfg.c: Likewise.
+ * tree-cfgcleanup.c: Likewise.
+ * tree-chrec.c: Likewise.
+ * tree-chrec.h: Likewise.
+ * tree-complex.c: Likewise.
+ * tree-data-ref.c: Likewise.
+ * tree-data-ref.h: Likewise.
+ * tree-dfa.c: Likewise.
+ * tree-diagnostic.c: Likewise.
+ * tree-dump.c: Likewise.
+ * tree-eh.c: Likewise.
+ * tree-emutls.c: Likewise.
+ * tree-flow.h: Likewise.
+ * tree-if-conv.c: Likewise.
+ * tree-inline.c: Likewise.
+ * tree-inline.h: Likewise.
+ * tree-into-ssa.c: Likewise.
+ * tree-iterator.c: Likewise.
+ * tree-loop-distribution.c: Likewise.
+ * tree-mudflap.c: Likewise.
+ * tree-optimize.c: Likewise.
+ * tree-outof-ssa.c: Likewise.
+ * tree-parloops.c: Likewise.
+ * tree-phinodes.c: Likewise.
+ * tree-predcom.c: Likewise.
+ * tree-pretty-print.c: Likewise.
+ * tree-scalar-evolution.c: Likewise.
+ * tree-sra.c: Likewise.
+ * tree-ssa-address.c: Likewise.
+ * tree-ssa-alias.c: Likewise.
+ * tree-ssa-ccp.c: Likewise.
+ * tree-ssa-coalesce.c: Likewise.
+ * tree-ssa-dce.c: Likewise.
+ * tree-ssa-dom.c: Likewise.
+ * tree-ssa-forwprop.c: Likewise.
+ * tree-ssa-live.c: Likewise.
+ * tree-ssa-live.h: Likewise.
+ * tree-ssa-loop-im.c: Likewise.
+ * tree-ssa-loop-ivcanon.c: Likewise.
+ * tree-ssa-loop-ivopts.c: Likewise.
+ * tree-ssa-loop-manip.c: Likewise.
+ * tree-ssa-loop-niter.c: Likewise.
+ * tree-ssa-loop-prefetch.c: Likewise.
+ * tree-ssa-math-opts.c: Likewise.
+ * tree-ssa-operands.c: Likewise.
+ * tree-ssa-phiopt.c: Likewise.
+ * tree-ssa-phiprop.c: Likewise.
+ * tree-ssa-pre.c: Likewise.
+ * tree-ssa-propagate.c: Likewise.
+ * tree-ssa-reassoc.c: Likewise.
+ * tree-ssa-sccvn.c: Likewise.
+ * tree-ssa-sccvn.h: Likewise.
+ * tree-ssa-strlen.c: Likewise.
+ * tree-ssa-structalias.c: Likewise.
+ * tree-ssa-tail-merge.c: Likewise.
+ * tree-ssa-threadedge.c: Likewise.
+ * tree-ssa-threadupdate.c: Likewise.
+ * tree-ssa-uncprop.c: Likewise.
+ * tree-ssa-uninit.c: Likewise.
+ * tree-ssa.c: Likewise.
+ * tree-ssanames.c: Likewise.
+ * tree-stdarg.c: Likewise.
+ * tree-streamer-in.c: Likewise.
+ * tree-streamer-out.c: Likewise.
+ * tree-streamer.c: Likewise.
+ * tree-streamer.h: Likewise.
+ * tree-switch-conversion.c: Likewise.
+ * tree-vect-data-refs.c: Likewise.
+ * tree-vect-generic.c: Likewise.
+ * tree-vect-loop-manip.c: Likewise.
+ * tree-vect-loop.c: Likewise.
+ * tree-vect-patterns.c: Likewise.
+ * tree-vect-slp.c: Likewise.
+ * tree-vect-stmts.c: Likewise.
+ * tree-vectorizer.c: Likewise.
+ * tree-vectorizer.h: Likewise.
+ * tree-vrp.c: Likewise.
+ * tree.c: Likewise.
+ * tree.h: Likewise.
+ * value-prof.c: Likewise.
+ * value-prof.h: Likewise.
+ * var-tracking.c: Likewise.
+ * varasm.c: Likewise.
+ * varpool.c: Likewise.
+ * vmsdbgout.c: Likewise.
+ * config/bfin/bfin.c: Likewise.
+ * config/c6x/c6x.c: Likewise.
+ * config/darwin.c: Likewise.
+ * config/i386/i386.c: Likewise.
+ * config/ia64/ia64.c: Likewise.
+ * config/mep/mep.c: Likewise.
+ * config/mips/mips.c: Likewise.
+ * config/pa/pa.c: Likewise.
+ * config/rs6000/rs6000-c.c: Likewise.
+ * config/rs6000/rs6000.c: Likewise.
+ * config/rx/rx.c: Likewise.
+ * config/spu/spu-c.c: Likewise.
+ * config/vms/vms.c: Likewise.
+ * config/vxworks.c: Likewise.
+ * config/epiphany/resolve-sw-modes.c: Likewise.
+
2012-11-17 Jakub Jelinek <jakub@redhat.com>
PR tree-optimization/55236
diff --git a/gcc/Makefile.in b/gcc/Makefile.in
index ec13e2d5d1f..0a46425a409 100644
--- a/gcc/Makefile.in
+++ b/gcc/Makefile.in
@@ -835,9 +835,9 @@ REVISION_s := "\"\""
endif
# Shorthand variables for dependency lists.
-VEC_H = vec.h statistics.h
+VEC_H = vec.h statistics.h $(GGC_H)
HASH_TABLE_H = $(HASHTAB_H) hash-table.h
-EXCEPT_H = except.h $(HASHTAB_H) vecprim.h vecir.h
+EXCEPT_H = except.h $(HASHTAB_H)
TARGET_DEF = target.def target-hooks-macros.h
C_TARGET_DEF = c-family/c-target.def target-hooks-macros.h
COMMON_TARGET_DEF = common/common-target.def target-hooks-macros.h
@@ -857,7 +857,7 @@ RTL_BASE_H = coretypes.h rtl.h rtl.def $(MACHMODE_H) reg-notes.def \
insn-notes.def $(INPUT_H) $(REAL_H) statistics.h $(VEC_H) \
$(FIXED_VALUE_H) alias.h $(HASHTAB_H)
FIXED_VALUE_H = fixed-value.h $(MACHMODE_H) double-int.h
-RTL_H = $(RTL_BASE_H) $(FLAGS_H) genrtl.h vecir.h
+RTL_H = $(RTL_BASE_H) $(FLAGS_H) genrtl.h
RTL_ERROR_H = rtl-error.h $(RTL_H) $(DIAGNOSTIC_CORE_H)
READ_MD_H = $(OBSTACK_H) $(HASHTAB_H) read-md.h
PARAMS_H = params.h params.def
@@ -868,13 +868,13 @@ INTERNAL_FN_H = internal-fn.h $(INTERNAL_FN_DEF)
TREE_H = coretypes.h tree.h all-tree.def tree.def c-family/c-common.def \
$(lang_tree_files) $(MACHMODE_H) tree-check.h $(BUILTINS_DEF) \
$(INPUT_H) statistics.h $(VEC_H) treestruct.def $(HASHTAB_H) \
- double-int.h alias.h $(SYMTAB_H) $(FLAGS_H) vecir.h \
+ double-int.h alias.h $(SYMTAB_H) $(FLAGS_H) \
$(REAL_H) $(FIXED_VALUE_H)
REGSET_H = regset.h $(BITMAP_H) hard-reg-set.h
BASIC_BLOCK_H = basic-block.h $(PREDICT_H) $(VEC_H) $(FUNCTION_H) \
cfg-flags.def cfghooks.h
GIMPLE_H = gimple.h gimple.def gsstruct.def pointer-set.h $(VEC_H) \
- vecir.h $(GGC_H) $(BASIC_BLOCK_H) $(TREE_H) tree-ssa-operands.h \
+ $(GGC_H) $(BASIC_BLOCK_H) $(TREE_H) tree-ssa-operands.h \
tree-ssa-alias.h $(INTERNAL_FN_H)
TRANS_MEM_H = trans-mem.h
GCOV_IO_H = gcov-io.h gcov-iov.h auto-host.h
@@ -885,17 +885,17 @@ ALIAS_H = alias.h
EMIT_RTL_H = emit-rtl.h
FLAGS_H = flags.h flag-types.h $(OPTIONS_H)
OPTIONS_H = options.h flag-types.h $(OPTIONS_H_EXTRA)
-FUNCTION_H = function.h $(HASHTAB_H) vecprim.h $(TM_H) hard-reg-set.h \
- $(VEC_H) vecir.h $(INPUT_H) $(MACHMODE_H)
+FUNCTION_H = function.h $(HASHTAB_H) $(TM_H) hard-reg-set.h \
+ $(VEC_H) $(INPUT_H) $(MACHMODE_H)
EXPR_H = expr.h insn-config.h $(FUNCTION_H) $(RTL_H) $(FLAGS_H) $(TREE_H) $(MACHMODE_H) $(EMIT_RTL_H)
OPTABS_H = optabs.h insn-codes.h insn-opinit.h
REGS_H = regs.h $(MACHMODE_H) hard-reg-set.h
SCHED_INT_H = sched-int.h $(INSN_ATTR_H) $(BASIC_BLOCK_H) $(RTL_H) $(DF_H) \
- vecprim.h $(REGSET_H)
+ $(REGSET_H)
SEL_SCHED_IR_H = sel-sched-ir.h $(INSN_ATTR_H) $(BASIC_BLOCK_H) $(RTL_H) \
- $(GGC_H) $(BITMAP_H) vecprim.h $(SCHED_INT_H) $(CFGLOOP_H) $(REGSET_H)
+ $(GGC_H) $(BITMAP_H) $(SCHED_INT_H) $(CFGLOOP_H) $(REGSET_H)
SEL_SCHED_DUMP_H = sel-sched-dump.h $(SEL_SCHED_IR_H)
-CFGLOOP_H = cfgloop.h $(BASIC_BLOCK_H) vecprim.h double-int.h \
+CFGLOOP_H = cfgloop.h $(BASIC_BLOCK_H) double-int.h \
$(BITMAP_H) sbitmap.h
IPA_UTILS_H = ipa-utils.h $(TREE_H) $(CGRAPH_H)
IPA_REFERENCE_H = ipa-reference.h $(BITMAP_H) $(TREE_H)
@@ -912,7 +912,7 @@ GGC_H = ggc.h gtype-desc.h statistics.h
GGC_INTERNAL_H = ggc-internal.h $(GGC_H)
TIMEVAR_H = timevar.h timevar.def
INSN_ATTR_H = insn-attr.h insn-attr-common.h $(INSN_ADDR_H)
-INSN_ADDR_H = $(srcdir)/insn-addr.h vecprim.h
+INSN_ADDR_H = $(srcdir)/insn-addr.h
C_COMMON_H = c-family/c-common.h c-family/c-common.def $(TREE_H) \
$(SPLAY_TREE_H) $(CPPLIB_H) $(GGC_H) $(DIAGNOSTIC_CORE_H)
C_PRAGMA_H = c-family/c-pragma.h $(CPPLIB_H)
@@ -938,7 +938,7 @@ TREE_FLOW_H = tree-flow.h tree-flow-inline.h tree-ssa-operands.h \
$(BITMAP_H) sbitmap.h $(BASIC_BLOCK_H) $(GIMPLE_H) \
$(HASHTAB_H) $(CGRAPH_H) $(IPA_REFERENCE_H) \
tree-ssa-alias.h
-TREE_SSA_LIVE_H = tree-ssa-live.h $(PARTITION_H) vecprim.h
+TREE_SSA_LIVE_H = tree-ssa-live.h $(PARTITION_H)
SSAEXPAND_H = ssaexpand.h $(TREE_SSA_LIVE_H)
PRETTY_PRINT_H = pretty-print.h $(INPUT_H) $(OBSTACK_H)
TREE_PRETTY_PRINT_H = tree-pretty-print.h $(PRETTY_PRINT_H)
@@ -951,13 +951,13 @@ C_PRETTY_PRINT_H = c-family/c-pretty-print.h $(PRETTY_PRINT_H) \
SCEV_H = tree-scalar-evolution.h $(GGC_H) tree-chrec.h $(PARAMS_H)
OMEGA_H = omega.h $(PARAMS_H)
TREE_DATA_REF_H = tree-data-ref.h $(OMEGA_H) graphds.h $(SCEV_H)
-TREE_INLINE_H = tree-inline.h vecir.h
+TREE_INLINE_H = tree-inline.h
REAL_H = real.h $(MACHMODE_H)
IRA_INT_H = ira.h ira-int.h $(CFGLOOP_H) alloc-pool.h
LRA_INT_H = lra.h $(BITMAP_H) $(RECOG_H) $(INSN_ATTR_H) insn-codes.h lra-int.h
DBGCNT_H = dbgcnt.h dbgcnt.def
LTO_STREAMER_H = lto-streamer.h $(LINKER_PLUGIN_API_H) $(TARGET_H) \
- $(CGRAPH_H) $(VEC_H) vecprim.h $(TREE_H) $(GIMPLE_H) \
+ $(CGRAPH_H) $(VEC_H) $(TREE_H) $(GIMPLE_H) \
$(GCOV_IO_H) $(DIAGNOSTIC_H) alloc-pool.h
DATA_STREAMER_H = data-streamer.h $(VEC_H) $(LTO_STREAMER_H)
GIMPLE_STREAMER_H = gimple-streamer.h $(LTO_STREAMER_H) $(BASIC_BLOCK_H) \
@@ -2258,8 +2258,7 @@ tree-into-ssa.o : tree-into-ssa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(FUNCTION_H) $(TM_H) coretypes.h \
langhooks.h domwalk.h $(TREE_PASS_H) $(PARAMS_H) $(BASIC_BLOCK_H) \
$(BITMAP_H) $(CFGLOOP_H) $(FLAGS_H) $(HASHTAB_H) \
- $(GIMPLE_H) $(TREE_INLINE_H) vecprim.h \
- $(GIMPLE_PRETTY_PRINT_H)
+ $(GIMPLE_H) $(TREE_INLINE_H) $(GIMPLE_PRETTY_PRINT_H)
tree-ssa-ter.o : tree-ssa-ter.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
$(TREE_H) $(DIAGNOSTIC_H) $(TM_H) coretypes.h dumpfile.h \
$(TREE_SSA_LIVE_H) $(BITMAP_H) $(FLAGS_H) \
@@ -2488,7 +2487,7 @@ tree-ssa-alias.o : tree-ssa-alias.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
langhooks.h \
$(PARAMS_H) $(BASIC_BLOCK_H) $(DIAGNOSTIC_H) \
$(GIMPLE_H) $(VEC_H) $(TARGET_H) \
- vecprim.h pointer-set.h alloc-pool.h \
+ pointer-set.h alloc-pool.h \
$(TREE_PRETTY_PRINT_H)
tree-ssa-reassoc.o : tree-ssa-reassoc.c $(TREE_FLOW_H) $(CONFIG_H) \
$(SYSTEM_H) $(TREE_H) $(DIAGNOSTIC_H) \
@@ -2740,7 +2739,7 @@ function.o : function.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_ERROR_
$(OPTABS_H) $(LIBFUNCS_H) $(REGS_H) hard-reg-set.h insn-config.h $(RECOG_H) \
output.h $(EXCEPT_H) $(HASHTAB_H) $(GGC_H) $(TM_P_H) langhooks.h \
gt-function.h $(TARGET_H) $(BASIC_BLOCK_H) $(PREDICT_H) \
- $(TREE_PASS_H) $(DF_H) vecprim.h $(PARAMS_H) bb-reorder.h \
+ $(TREE_PASS_H) $(DF_H) $(PARAMS_H) bb-reorder.h \
$(COMMON_TARGET_H)
statistics.o : statistics.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
$(TREE_PASS_H) $(TREE_DUMP_H) $(HASHTAB_H) statistics.h $(FUNCTION_H)
@@ -2763,12 +2762,12 @@ expr.o : expr.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
typeclass.h hard-reg-set.h toplev.h $(DIAGNOSTIC_CORE_H) hard-reg-set.h $(EXCEPT_H) \
reload.h langhooks.h intl.h $(TM_P_H) $(TARGET_H) \
tree-iterator.h gt-expr.h $(MACHMODE_H) $(TIMEVAR_H) $(TREE_FLOW_H) \
- $(TREE_PASS_H) $(DF_H) $(DIAGNOSTIC_H) vecprim.h $(SSAEXPAND_H) \
+ $(TREE_PASS_H) $(DF_H) $(DIAGNOSTIC_H) $(SSAEXPAND_H) \
$(PARAMS_H) $(COMMON_TARGET_H) target-globals.h
dojump.o : dojump.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(TM_P_H) \
$(RTL_H) $(TREE_H) \
$(FLAGS_H) $(FUNCTION_H) $(EXPR_H) $(OPTABS_H) $(INSN_ATTR_H) insn-config.h \
- langhooks.h $(GGC_H) gt-dojump.h vecprim.h $(BASIC_BLOCK_H)
+ langhooks.h $(GGC_H) gt-dojump.h $(BASIC_BLOCK_H)
builtins.o : builtins.c builtins.h $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_H) $(TREE_H) $(GIMPLE_H) $(FLAGS_H) $(TARGET_H) $(FUNCTION_H) $(REGS_H) \
$(EXPR_H) $(OPTABS_H) insn-config.h $(RECOG_H) output.h typeclass.h \
@@ -2826,7 +2825,7 @@ xcoffout.o : xcoffout.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
godump.o : godump.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(DIAGNOSTIC_CORE_H) \
$(TREE_H) $(GGC_H) pointer-set.h $(OBSTACK_H) debug.h gt-godump.h
emit-rtl.o : emit-rtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
- $(TREE_H) $(FLAGS_H) $(FUNCTION_H) $(REGS_H) insn-config.h $(RECOG_H) vecprim.h \
+ $(TREE_H) $(FLAGS_H) $(FUNCTION_H) $(REGS_H) insn-config.h $(RECOG_H) \
$(GGC_H) $(EXPR_H) hard-reg-set.h $(BITMAP_H) $(DIAGNOSTIC_CORE_H) $(BASIC_BLOCK_H) \
$(HASHTAB_H) $(TM_P_H) debug.h langhooks.h $(TREE_PASS_H) gt-emit-rtl.h \
$(DF_H) $(PARAMS_H) $(TARGET_H)
@@ -3045,7 +3044,7 @@ df-core.o : df-core.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
df-problems.o : df-problems.c $(CONFIG_H) $(SYSTEM_H) coretypes.h dumpfile.h $(TM_H) \
$(RTL_H) insn-config.h $(RECOG_H) $(FUNCTION_H) $(REGS_H) alloc-pool.h \
hard-reg-set.h $(BASIC_BLOCK_H) $(DF_H) $(BITMAP_H) sbitmap.h $(TIMEVAR_H) \
- $(TM_P_H) $(TARGET_H) $(FLAGS_H) $(EXCEPT_H) dce.h vecprim.h $(VALTRACK_H)
+ $(TM_P_H) $(TARGET_H) $(FLAGS_H) $(EXCEPT_H) dce.h $(VALTRACK_H)
df-scan.o : df-scan.c $(CONFIG_H) $(SYSTEM_H) coretypes.h dumpfile.h $(TM_H) $(RTL_H) \
insn-config.h $(RECOG_H) $(FUNCTION_H) $(REGS_H) alloc-pool.h \
hard-reg-set.h $(BASIC_BLOCK_H) $(DF_H) $(BITMAP_H) sbitmap.h \
@@ -3105,7 +3104,7 @@ cfgrtl.o : cfgrtl.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_ERROR_H) \
$(CFGLOOP_H) $(OBSTACK_H) $(TARGET_H) $(TREE_H) \
$(TREE_PASS_H) $(DF_H) $(GGC_H) $(COMMON_TARGET_H) gt-cfgrtl.h
cfganal.o : cfganal.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(BASIC_BLOCK_H) \
- $(TIMEVAR_H) vecprim.h sbitmap.h $(BITMAP_H)
+ $(TIMEVAR_H) sbitmap.h $(BITMAP_H)
cfgbuild.o : cfgbuild.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(FLAGS_H) $(BASIC_BLOCK_H) $(REGS_H) hard-reg-set.h $(DIAGNOSTIC_CORE_H) \
$(FUNCTION_H) $(EXCEPT_H) $(TIMEVAR_H) $(TREE_H) $(EXPR_H) sbitmap.h
@@ -3122,7 +3121,7 @@ cfgloopanal.o : cfgloopanal.c coretypes.h dumpfile.h $(CONFIG_H) $(SYSTEM_H) $(R
$(BASIC_BLOCK_H) hard-reg-set.h $(CFGLOOP_H) $(EXPR_H) $(TM_H) \
$(OBSTACK_H) graphds.h $(PARAMS_H)
graphds.o : graphds.c graphds.h $(CONFIG_H) $(SYSTEM_H) $(BITMAP_H) $(OBSTACK_H) \
- coretypes.h $(VEC_H) vecprim.h
+ coretypes.h $(VEC_H)
loop-iv.o : loop-iv.c $(CONFIG_H) $(SYSTEM_H) coretypes.h dumpfile.h \
$(RTL_H) $(BASIC_BLOCK_H) \
hard-reg-set.h $(CFGLOOP_H) $(EXPR_H) $(TM_H) $(OBSTACK_H) \
@@ -3147,7 +3146,7 @@ loop-unroll.o: loop-unroll.c $(CONFIG_H) $(SYSTEM_H) coretypes.h dumpfile.h \
$(OBSTACK_H)
dominance.o : dominance.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
hard-reg-set.h $(BASIC_BLOCK_H) et-forest.h $(OBSTACK_H) $(DIAGNOSTIC_CORE_H) \
- $(TIMEVAR_H) graphds.h vecprim.h pointer-set.h $(BITMAP_H)
+ $(TIMEVAR_H) graphds.h pointer-set.h $(BITMAP_H)
et-forest.o : et-forest.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
et-forest.h alloc-pool.h $(BASIC_BLOCK_H)
combine.o : combine.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
@@ -3156,7 +3155,7 @@ combine.o : combine.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(DIAGNOSTIC_CORE_H) $(TM_P_H) $(TREE_H) $(TARGET_H) \
output.h $(PARAMS_H) $(OPTABS_H) \
insn-codes.h $(TREE_PASS_H) $(DF_H) $(VALTRACK_H) \
- vecprim.h $(CGRAPH_H) $(OBSTACK_H)
+ $(CGRAPH_H) $(OBSTACK_H)
reginfo.o : reginfo.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
hard-reg-set.h $(FLAGS_H) $(BASIC_BLOCK_H) addresses.h $(REGS_H) \
insn-config.h $(RECOG_H) reload.h $(DIAGNOSTIC_CORE_H) \
@@ -3198,7 +3197,7 @@ caller-save.o : caller-save.c $(CONFIG_H) $(SYSTEM_H) coretypes.h dumpfile.h \
bt-load.o : bt-load.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(EXCEPT_H) \
$(RTL_H) hard-reg-set.h $(REGS_H) $(TM_P_H) $(FIBHEAP_H) $(EXPR_H) \
$(TARGET_H) $(FLAGS_H) $(INSN_ATTR_H) $(FUNCTION_H) $(TREE_PASS_H) \
- $(DIAGNOSTIC_CORE_H) $(DF_H) vecprim.h $(RECOG_H) $(CFGLOOP_H)
+ $(DIAGNOSTIC_CORE_H) $(DF_H) $(RECOG_H) $(CFGLOOP_H)
reorg.o : reorg.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
conditions.h hard-reg-set.h $(BASIC_BLOCK_H) $(REGS_H) insn-config.h \
$(INSN_ATTR_H) $(EXCEPT_H) $(RECOG_H) $(FUNCTION_H) $(FLAGS_H) output.h \
@@ -3350,7 +3349,7 @@ final.o : final.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_ERROR_H) \
$(EXCEPT_H) debug.h xcoffout.h toplev.h $(DIAGNOSTIC_CORE_H) reload.h $(DWARF2OUT_H) \
$(TREE_PASS_H) $(BASIC_BLOCK_H) $(TM_P_H) $(TARGET_H) $(EXPR_H) \
dbxout.h $(CGRAPH_H) $(COVERAGE_H) \
- $(DF_H) vecprim.h $(GGC_H) $(CFGLOOP_H) $(PARAMS_H) $(TREE_FLOW_H) \
+ $(DF_H) $(GGC_H) $(CFGLOOP_H) $(PARAMS_H) $(TREE_FLOW_H) \
$(TARGET_DEF_H) $(TREE_PRETTY_PRINT_H)
recog.o : recog.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_ERROR_H) \
$(FUNCTION_H) $(BASIC_BLOCK_H) $(REGS_H) $(RECOG_H) $(EXPR_H) \
@@ -3361,7 +3360,7 @@ reg-stack.o : reg-stack.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
$(RTL_ERROR_H) $(TREE_H) $(RECOG_H) $(REGS_H) hard-reg-set.h $(FLAGS_H) \
insn-config.h reload.h $(FUNCTION_H) $(TM_P_H) $(GGC_H) \
$(BASIC_BLOCK_H) \
- $(TREE_PASS_H) $(TARGET_H) vecprim.h $(DF_H) $(EMIT_RTL_H)
+ $(TREE_PASS_H) $(TARGET_H) $(DF_H) $(EMIT_RTL_H)
sreal.o: sreal.c $(CONFIG_H) $(SYSTEM_H) coretypes.h sreal.h
predict.o: predict.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) $(RTL_H) \
$(TREE_H) $(FLAGS_H) insn-config.h $(BASIC_BLOCK_H) $(REGS_H) \
@@ -3689,7 +3688,6 @@ s-tm-texi: build/genhooks$(build_exeext) $(srcdir)/doc/tm.texi.in
fi
GTFILES = $(CPP_ID_DATA_H) $(srcdir)/input.h $(srcdir)/coretypes.h \
- $(srcdir)/vecprim.h $(srcdir)/vecir.h \
$(host_xm_file_list) \
$(tm_file_list) $(HASHTAB_H) $(SPLAY_TREE_H) $(srcdir)/bitmap.h \
$(srcdir)/alias.h $(srcdir)/coverage.c $(srcdir)/rtl.h \
@@ -3851,7 +3849,7 @@ build/genattr-common.o : genattr-common.c $(RTL_BASE_H) $(BCONFIG_H) \
$(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(READ_MD_H) gensupport.h
build/genattrtab.o : genattrtab.c $(RTL_BASE_H) $(OBSTACK_H) \
$(BCONFIG_H) $(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(GGC_H) \
- $(READ_MD_H) gensupport.h vecprim.h $(FNMATCH_H)
+ $(READ_MD_H) gensupport.h $(FNMATCH_H)
build/genautomata.o : genautomata.c $(RTL_BASE_H) $(OBSTACK_H) \
$(BCONFIG_H) $(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(VEC_H) \
$(HASHTAB_H) gensupport.h $(FNMATCH_H)
@@ -3873,8 +3871,7 @@ build/genemit.o : genemit.c $(RTL_BASE_H) $(BCONFIG_H) $(SYSTEM_H) \
build/genenums.o : genenums.c $(BCONFIG_H) $(SYSTEM_H) \
coretypes.h errors.h $(READ_MD_H)
build/genextract.o : genextract.c $(RTL_BASE_H) $(BCONFIG_H) \
- $(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(READ_MD_H) gensupport.h \
- vecprim.h
+ $(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(READ_MD_H) gensupport.h
build/genflags.o : genflags.c $(RTL_BASE_H) $(OBSTACK_H) $(BCONFIG_H) \
$(SYSTEM_H) coretypes.h $(GTM_H) errors.h $(READ_MD_H) gensupport.h
build/gengenrtl.o : gengenrtl.c $(BCONFIG_H) $(SYSTEM_H) rtl.def
diff --git a/gcc/ada/ChangeLog b/gcc/ada/ChangeLog
index 433b6b22f15..d01835486ba 100644
--- a/gcc/ada/ChangeLog
+++ b/gcc/ada/ChangeLog
@@ -1,3 +1,13 @@
+2012-11-16 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * gcc-interface/decl.c: Use new vec API in vec.h.
+ * gcc-interface/gigi.h: Likewise.
+ * gcc-interface/trans.c: Likewise.
+ * gcc-interface/utils.c: Likewise.
+ * gcc-interface/utils2.c: Likewise.
+
2012-11-09 Eric Botcazou <ebotcazou@adacore.com>
PR other/52438
diff --git a/gcc/ada/gcc-interface/decl.c b/gcc/ada/gcc-interface/decl.c
index dfefff2e7a7..b3cf22c51c7 100644
--- a/gcc/ada/gcc-interface/decl.c
+++ b/gcc/ada/gcc-interface/decl.c
@@ -106,8 +106,6 @@ typedef struct subst_pair_d {
tree replacement;
} subst_pair;
-DEF_VEC_O(subst_pair);
-DEF_VEC_ALLOC_O(subst_pair,heap);
typedef struct variant_desc_d {
/* The type of the variant. */
@@ -123,8 +121,6 @@ typedef struct variant_desc_d {
tree new_type;
} variant_desc;
-DEF_VEC_O(variant_desc);
-DEF_VEC_ALLOC_O(variant_desc,heap);
/* A hash table used to cache the result of annotate_value. */
static GTY ((if_marked ("tree_int_map_marked_p"),
@@ -153,21 +149,21 @@ static void components_to_record (tree, Node_Id, tree, int, bool, bool, bool,
static Uint annotate_value (tree);
static void annotate_rep (Entity_Id, tree);
static tree build_position_list (tree, bool, tree, tree, unsigned int, tree);
-static VEC(subst_pair,heap) *build_subst_list (Entity_Id, Entity_Id, bool);
-static VEC(variant_desc,heap) *build_variant_list (tree,
- VEC(subst_pair,heap) *,
- VEC(variant_desc,heap) *);
+static vec<subst_pair> build_subst_list (Entity_Id, Entity_Id, bool);
+static vec<variant_desc> build_variant_list (tree,
+ vec<subst_pair> ,
+ vec<variant_desc> );
static tree validate_size (Uint, tree, Entity_Id, enum tree_code, bool, bool);
static void set_rm_size (Uint, tree, Entity_Id);
static unsigned int validate_alignment (Uint, Entity_Id, unsigned int);
static void check_ok_for_atomic (tree, Entity_Id, bool);
static tree create_field_decl_from (tree, tree, tree, tree, tree,
- VEC(subst_pair,heap) *);
+ vec<subst_pair> );
static tree create_rep_part (tree, tree, tree);
static tree get_rep_part (tree);
-static tree create_variant_part_from (tree, VEC(variant_desc,heap) *, tree,
- tree, VEC(subst_pair,heap) *);
-static void copy_and_substitute_in_size (tree, tree, VEC(subst_pair,heap) *);
+static tree create_variant_part_from (tree, vec<variant_desc> , tree,
+ tree, vec<subst_pair> );
+static void copy_and_substitute_in_size (tree, tree, vec<subst_pair> );
/* The relevant constituents of a subprogram binding to a GCC builtin. Used
to pass around calls performing profile compatibility checks. */
@@ -1157,7 +1153,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
= TYPE_PADDING_P (gnu_type)
? TYPE_FIELDS (TREE_TYPE (TYPE_FIELDS (gnu_type)))
: TYPE_FIELDS (gnu_type);
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 1);
tree t = build_template (TREE_TYPE (template_field),
TREE_TYPE (DECL_CHAIN (template_field)),
NULL_TREE);
@@ -1329,8 +1326,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_alloc_type)));
if (TREE_CODE (gnu_expr) == CONSTRUCTOR
- && 1 == VEC_length (constructor_elt,
- CONSTRUCTOR_ELTS (gnu_expr)))
+ && 1 == vec_safe_length (CONSTRUCTOR_ELTS (gnu_expr)))
gnu_expr = 0;
else
gnu_expr
@@ -3293,13 +3289,13 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
&& Present (Discriminant_Constraint (gnat_entity))
&& Stored_Constraint (gnat_entity) != No_Elist)
{
- VEC(subst_pair,heap) *gnu_subst_list
+ vec<subst_pair> gnu_subst_list
= build_subst_list (gnat_entity, gnat_base_type, definition);
tree gnu_unpad_base_type, gnu_rep_part, gnu_variant_part, t;
tree gnu_pos_list, gnu_field_list = NULL_TREE;
bool selected_variant = false;
Entity_Id gnat_field;
- VEC(variant_desc,heap) *gnu_variant_list;
+ vec<variant_desc> gnu_variant_list;
gnu_type = make_node (RECORD_TYPE);
TYPE_NAME (gnu_type) = gnu_entity_name;
@@ -3330,12 +3326,13 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
gnu_variant_list
= build_variant_list (TREE_TYPE (gnu_variant_part),
- gnu_subst_list, NULL);
+ gnu_subst_list,
+ vec<variant_desc>());
/* If all the qualifiers are unconditionally true, the
innermost variant is statically selected. */
selected_variant = true;
- FOR_EACH_VEC_ELT (variant_desc, gnu_variant_list, i, v)
+ FOR_EACH_VEC_ELT (gnu_variant_list, i, v)
if (!integer_onep (v->qual))
{
selected_variant = false;
@@ -3344,7 +3341,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
/* Otherwise, create the new variants. */
if (!selected_variant)
- FOR_EACH_VEC_ELT (variant_desc, gnu_variant_list, i, v)
+ FOR_EACH_VEC_ELT (gnu_variant_list, i, v)
{
tree old_variant = v->type;
tree new_variant = make_node (RECORD_TYPE);
@@ -3362,13 +3359,14 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
}
else
{
- gnu_variant_list = NULL;
+ gnu_variant_list.create (0);
selected_variant = false;
}
gnu_pos_list
= build_position_list (gnu_unpad_base_type,
- gnu_variant_list && !selected_variant,
+ gnu_variant_list.exists ()
+ && !selected_variant,
size_zero_node, bitsize_zero_node,
BIGGEST_ALIGNMENT, NULL_TREE);
@@ -3449,7 +3447,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
unsigned int i;
t = NULL_TREE;
- FOR_EACH_VEC_ELT (variant_desc, gnu_variant_list, i, v)
+ FOR_EACH_VEC_ELT (gnu_variant_list, i, v)
if (gnu_context == v->type
|| ((gnu_rep_part = get_rep_part (v->type))
&& gnu_context == TREE_TYPE (gnu_rep_part)))
@@ -3515,7 +3513,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
/* If there is a variant list and no selected variant, we need
to create the nest of variant parts from the old nest. */
- if (gnu_variant_list && !selected_variant)
+ if (gnu_variant_list.exists () && !selected_variant)
{
tree new_variant_part
= create_variant_part_from (gnu_variant_part,
@@ -3587,8 +3585,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
gnat_entity);
}
- VEC_free (variant_desc, heap, gnu_variant_list);
- VEC_free (subst_pair, heap, gnu_subst_list);
+ gnu_variant_list.release ();
+ gnu_subst_list.release ();
/* Now we can finalize it. */
rest_of_record_type_compilation (gnu_type);
@@ -7551,10 +7549,10 @@ build_position_list (tree gnu_type, bool do_not_flatten_variant, tree gnu_pos,
of operands to SUBSTITUTE_IN_EXPR. DEFINITION is true if this is for
a definition of GNAT_SUBTYPE. */
-static VEC(subst_pair,heap) *
+static vec<subst_pair>
build_subst_list (Entity_Id gnat_subtype, Entity_Id gnat_type, bool definition)
{
- VEC(subst_pair,heap) *gnu_list = NULL;
+ vec<subst_pair> gnu_list = vec<subst_pair>();
Entity_Id gnat_discrim;
Node_Id gnat_value;
@@ -7573,7 +7571,7 @@ build_subst_list (Entity_Id gnat_subtype, Entity_Id gnat_type, bool definition)
get_entity_name (gnat_discrim),
definition, true, false));
subst_pair s = {gnu_field, replacement};
- VEC_safe_push (subst_pair, heap, gnu_list, s);
+ gnu_list.safe_push (s);
}
return gnu_list;
@@ -7584,9 +7582,9 @@ build_subst_list (Entity_Id gnat_subtype, Entity_Id gnat_type, bool definition)
the substitutions described in SUBST_LIST. GNU_LIST is a pre-existing
list to be prepended to the newly created entries. */
-static VEC(variant_desc,heap) *
-build_variant_list (tree qual_union_type, VEC(subst_pair,heap) *subst_list,
- VEC(variant_desc,heap) *gnu_list)
+static vec<variant_desc>
+build_variant_list (tree qual_union_type, vec<subst_pair> subst_list,
+ vec<variant_desc> gnu_list)
{
tree gnu_field;
@@ -7598,7 +7596,7 @@ build_variant_list (tree qual_union_type, VEC(subst_pair,heap) *subst_list,
unsigned int i;
subst_pair *s;
- FOR_EACH_VEC_ELT (subst_pair, subst_list, i, s)
+ FOR_EACH_VEC_ELT (subst_list, i, s)
qual = SUBSTITUTE_IN_EXPR (qual, s->discriminant, s->replacement);
/* If the new qualifier is not unconditionally false, its variant may
@@ -7608,7 +7606,7 @@ build_variant_list (tree qual_union_type, VEC(subst_pair,heap) *subst_list,
tree variant_type = TREE_TYPE (gnu_field), variant_subpart;
variant_desc v = {variant_type, gnu_field, qual, NULL_TREE};
- VEC_safe_push (variant_desc, heap, gnu_list, v);
+ gnu_list.safe_push (v);
/* Recurse on the variant subpart of the variant, if any. */
variant_subpart = get_variant_part (variant_type);
@@ -8170,7 +8168,7 @@ intrin_profiles_compatible_p (intrin_binding_t * inb)
static tree
create_field_decl_from (tree old_field, tree field_type, tree record_type,
tree size, tree pos_list,
- VEC(subst_pair,heap) *subst_list)
+ vec<subst_pair> subst_list)
{
tree t = TREE_VALUE (purpose_member (old_field, pos_list));
tree pos = TREE_VEC_ELT (t, 0), bitpos = TREE_VEC_ELT (t, 2);
@@ -8180,7 +8178,7 @@ create_field_decl_from (tree old_field, tree field_type, tree record_type,
subst_pair *s;
if (CONTAINS_PLACEHOLDER_P (pos))
- FOR_EACH_VEC_ELT (subst_pair, subst_list, i, s)
+ FOR_EACH_VEC_ELT (subst_list, i, s)
pos = SUBSTITUTE_IN_EXPR (pos, s->discriminant, s->replacement);
/* If the position is now a constant, we can set it as the position of the
@@ -8276,9 +8274,9 @@ get_variant_part (tree record_type)
static tree
create_variant_part_from (tree old_variant_part,
- VEC(variant_desc,heap) *variant_list,
+ vec<variant_desc> variant_list,
tree record_type, tree pos_list,
- VEC(subst_pair,heap) *subst_list)
+ vec<subst_pair> subst_list)
{
tree offset = DECL_FIELD_OFFSET (old_variant_part);
tree old_union_type = TREE_TYPE (old_variant_part);
@@ -8315,7 +8313,7 @@ create_variant_part_from (tree old_variant_part,
copy_and_substitute_in_size (new_union_type, old_union_type, subst_list);
/* Now finish up the new variants and populate the union type. */
- FOR_EACH_VEC_ELT_REVERSE (variant_desc, variant_list, i, v)
+ FOR_EACH_VEC_ELT_REVERSE (variant_list, i, v)
{
tree old_field = v->field, new_field;
tree old_variant, old_variant_subpart, new_variant, field_list;
@@ -8397,7 +8395,7 @@ create_variant_part_from (tree old_variant_part,
static void
copy_and_substitute_in_size (tree new_type, tree old_type,
- VEC(subst_pair,heap) *subst_list)
+ vec<subst_pair> subst_list)
{
unsigned int i;
subst_pair *s;
@@ -8409,19 +8407,19 @@ copy_and_substitute_in_size (tree new_type, tree old_type,
relate_alias_sets (new_type, old_type, ALIAS_SET_COPY);
if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (new_type)))
- FOR_EACH_VEC_ELT (subst_pair, subst_list, i, s)
+ FOR_EACH_VEC_ELT (subst_list, i, s)
TYPE_SIZE (new_type)
= SUBSTITUTE_IN_EXPR (TYPE_SIZE (new_type),
s->discriminant, s->replacement);
if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (new_type)))
- FOR_EACH_VEC_ELT (subst_pair, subst_list, i, s)
+ FOR_EACH_VEC_ELT (subst_list, i, s)
TYPE_SIZE_UNIT (new_type)
= SUBSTITUTE_IN_EXPR (TYPE_SIZE_UNIT (new_type),
s->discriminant, s->replacement);
if (CONTAINS_PLACEHOLDER_P (TYPE_ADA_SIZE (new_type)))
- FOR_EACH_VEC_ELT (subst_pair, subst_list, i, s)
+ FOR_EACH_VEC_ELT (subst_list, i, s)
SET_TYPE_ADA_SIZE
(new_type, SUBSTITUTE_IN_EXPR (TYPE_ADA_SIZE (new_type),
s->discriminant, s->replacement));
diff --git a/gcc/ada/gcc-interface/gigi.h b/gcc/ada/gcc-interface/gigi.h
index d4a81762f82..1d0d2fb2167 100644
--- a/gcc/ada/gcc-interface/gigi.h
+++ b/gcc/ada/gcc-interface/gigi.h
@@ -884,7 +884,7 @@ extern tree build_call_raise_column (int msg, Node_Id gnat_node);
/* Return a CONSTRUCTOR of TYPE whose elements are V. This is not the
same as build_constructor in the language-independent tree.c. */
-extern tree gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v);
+extern tree gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v);
/* Return a COMPONENT_REF to access a field that is given by COMPONENT,
an IDENTIFIER_NODE giving the name of the field, FIELD, a FIELD_DECL,
diff --git a/gcc/ada/gcc-interface/trans.c b/gcc/ada/gcc-interface/trans.c
index dbc4689a1d5..2b23627681b 100644
--- a/gcc/ada/gcc-interface/trans.c
+++ b/gcc/ada/gcc-interface/trans.c
@@ -110,11 +110,9 @@ bool type_annotate_only;
/* Current filename without path. */
const char *ref_filename;
-DEF_VEC_I(Node_Id);
-DEF_VEC_ALLOC_I(Node_Id,heap);
/* List of N_Validate_Unchecked_Conversion nodes in the unit. */
-static VEC(Node_Id,heap) *gnat_validate_uc_list;
+static vec<Node_Id> gnat_validate_uc_list;
/* When not optimizing, we cache the 'First, 'Last and 'Length attributes
of unconstrained array IN parameters to avoid emitting a great deal of
@@ -129,13 +127,11 @@ struct GTY (()) parm_attr_d {
typedef struct parm_attr_d *parm_attr;
-DEF_VEC_P(parm_attr);
-DEF_VEC_ALLOC_P(parm_attr,gc);
struct GTY(()) language_function {
- VEC(parm_attr,gc) *parm_attr_cache;
+ vec<parm_attr, va_gc> *parm_attr_cache;
bitmap named_ret_val;
- VEC(tree,gc) *other_ret_val;
+ vec<tree, va_gc> *other_ret_val;
int gnat_ret;
};
@@ -184,21 +180,21 @@ static GTY(()) struct elab_info *elab_info_list;
/* Stack of exception pointer variables. Each entry is the VAR_DECL
that stores the address of the raised exception. Nonzero means we
are in an exception handler. Not used in the zero-cost case. */
-static GTY(()) VEC(tree,gc) *gnu_except_ptr_stack;
+static GTY(()) vec<tree, va_gc> *gnu_except_ptr_stack;
/* In ZCX case, current exception pointer. Used to re-raise it. */
static GTY(()) tree gnu_incoming_exc_ptr;
/* Stack for storing the current elaboration procedure decl. */
-static GTY(()) VEC(tree,gc) *gnu_elab_proc_stack;
+static GTY(()) vec<tree, va_gc> *gnu_elab_proc_stack;
/* Stack of labels to be used as a goto target instead of a return in
some functions. See processing for N_Subprogram_Body. */
-static GTY(()) VEC(tree,gc) *gnu_return_label_stack;
+static GTY(()) vec<tree, va_gc> *gnu_return_label_stack;
/* Stack of variable for the return value of a function with copy-in/copy-out
parameters. See processing for N_Subprogram_Body. */
-static GTY(()) VEC(tree,gc) *gnu_return_var_stack;
+static GTY(()) vec<tree, va_gc> *gnu_return_var_stack;
/* Structure used to record information for a range check. */
struct GTY(()) range_check_info_d {
@@ -210,28 +206,24 @@ struct GTY(()) range_check_info_d {
typedef struct range_check_info_d *range_check_info;
-DEF_VEC_P(range_check_info);
-DEF_VEC_ALLOC_P(range_check_info,gc);
/* Structure used to record information for a loop. */
struct GTY(()) loop_info_d {
tree label;
tree loop_var;
- VEC(range_check_info,gc) *checks;
+ vec<range_check_info, va_gc> *checks;
};
typedef struct loop_info_d *loop_info;
-DEF_VEC_P(loop_info);
-DEF_VEC_ALLOC_P(loop_info,gc);
/* Stack of loop_info structures associated with LOOP_STMT nodes. */
-static GTY(()) VEC(loop_info,gc) *gnu_loop_stack;
+static GTY(()) vec<loop_info, va_gc> *gnu_loop_stack;
/* The stacks for N_{Push,Pop}_*_Label. */
-static GTY(()) VEC(tree,gc) *gnu_constraint_error_label_stack;
-static GTY(()) VEC(tree,gc) *gnu_storage_error_label_stack;
-static GTY(()) VEC(tree,gc) *gnu_program_error_label_stack;
+static GTY(()) vec<tree, va_gc> *gnu_constraint_error_label_stack;
+static GTY(()) vec<tree, va_gc> *gnu_storage_error_label_stack;
+static GTY(()) vec<tree, va_gc> *gnu_program_error_label_stack;
/* Map GNAT tree codes to GCC tree codes for simple expressions. */
static enum tree_code gnu_codes[Number_Node_Kinds];
@@ -242,7 +234,7 @@ static void record_code_position (Node_Id);
static void insert_code_for (Node_Id);
static void add_cleanup (tree, Node_Id);
static void add_stmt_list (List_Id);
-static void push_exception_label_stack (VEC(tree,gc) **, Entity_Id);
+static void push_exception_label_stack (vec<tree, va_gc> **, Entity_Id);
static tree build_stmt_group (List_Id, bool);
static inline bool stmt_group_may_fallthru (void);
static enum gimplify_status gnat_gimplify_stmt (tree *);
@@ -588,14 +580,12 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name ATTRIBUTE_UNUSED,
tree null_node = fold_convert (ptr_void_ftype, null_pointer_node);
tree field_list = NULL_TREE;
int j;
- VEC(constructor_elt,gc) *null_vec = NULL;
+ vec<constructor_elt, va_gc> *null_vec = NULL;
constructor_elt *elt;
fdesc_type_node = make_node (RECORD_TYPE);
- VEC_safe_grow (constructor_elt, gc, null_vec,
- TARGET_VTABLE_USES_DESCRIPTORS);
- elt = (VEC_address (constructor_elt,null_vec)
- + TARGET_VTABLE_USES_DESCRIPTORS - 1);
+ vec_safe_grow (null_vec, TARGET_VTABLE_USES_DESCRIPTORS);
+ elt = (null_vec->address () + TARGET_VTABLE_USES_DESCRIPTORS - 1);
for (j = 0; j < TARGET_VTABLE_USES_DESCRIPTORS; j++)
{
@@ -651,10 +641,10 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name ATTRIBUTE_UNUSED,
user available facilities for Intrinsic imports. */
gnat_install_builtins ();
- VEC_safe_push (tree, gc, gnu_except_ptr_stack, NULL_TREE);
- VEC_safe_push (tree, gc, gnu_constraint_error_label_stack, NULL_TREE);
- VEC_safe_push (tree, gc, gnu_storage_error_label_stack, NULL_TREE);
- VEC_safe_push (tree, gc, gnu_program_error_label_stack, NULL_TREE);
+ vec_safe_push (gnu_except_ptr_stack, NULL_TREE);
+ vec_safe_push (gnu_constraint_error_label_stack, NULL_TREE);
+ vec_safe_push (gnu_storage_error_label_stack, NULL_TREE);
+ vec_safe_push (gnu_program_error_label_stack, NULL_TREE);
/* Process any Pragma Ident for the main unit. */
if (Present (Ident_String (Main_Unit)))
@@ -671,9 +661,9 @@ gigi (Node_Id gnat_root, int max_gnat_node, int number_name ATTRIBUTE_UNUSED,
/* Then process the N_Validate_Unchecked_Conversion nodes. We do this at
the very end to avoid having to second-guess the front-end when we run
into dummy nodes during the regular processing. */
- for (i = 0; VEC_iterate (Node_Id, gnat_validate_uc_list, i, gnat_iter); i++)
+ for (i = 0; gnat_validate_uc_list.iterate (i, &gnat_iter); i++)
validate_unchecked_conversion (gnat_iter);
- VEC_free (Node_Id, heap, gnat_validate_uc_list);
+ gnat_validate_uc_list.release ();
/* Finally see if we have any elaboration procedures to deal with. */
for (info = elab_info_list; info; info = info->next)
@@ -1367,7 +1357,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
/* Descriptors can only be built here for top-level functions. */
bool build_descriptor = (global_bindings_p () != 0);
int i;
- VEC(constructor_elt,gc) *gnu_vec = NULL;
+ vec<constructor_elt, va_gc> *gnu_vec = NULL;
constructor_elt *elt;
gnu_result_type = get_unpadded_type (Etype (gnat_node));
@@ -1383,10 +1373,8 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
gnu_result = build1 (INDIRECT_REF, gnu_result_type, gnu_result);
}
- VEC_safe_grow (constructor_elt, gc, gnu_vec,
- TARGET_VTABLE_USES_DESCRIPTORS);
- elt = (VEC_address (constructor_elt, gnu_vec)
- + TARGET_VTABLE_USES_DESCRIPTORS - 1);
+ vec_safe_grow (gnu_vec, TARGET_VTABLE_USES_DESCRIPTORS);
+ elt = (gnu_vec->address () + TARGET_VTABLE_USES_DESCRIPTORS - 1);
for (gnu_field = TYPE_FIELDS (gnu_result_type), i = 0;
i < TARGET_VTABLE_USES_DESCRIPTORS;
gnu_field = DECL_CHAIN (gnu_field), i++)
@@ -1739,7 +1727,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
and the dimension in the cache and create a new one on failure. */
if (!optimize && Present (gnat_param))
{
- FOR_EACH_VEC_ELT (parm_attr, f_parm_attr_cache, i, pa)
+ FOR_EACH_VEC_SAFE_ELT (f_parm_attr_cache, i, pa)
if (pa->id == gnat_param && pa->dim == Dimension)
break;
@@ -1748,7 +1736,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
pa = ggc_alloc_cleared_parm_attr_d ();
pa->id = gnat_param;
pa->dim = Dimension;
- VEC_safe_push (parm_attr, gc, f_parm_attr_cache, pa);
+ vec_safe_push (f_parm_attr_cache, pa);
}
}
@@ -2210,7 +2198,7 @@ push_range_check_info (tree var)
struct loop_info_d *iter = NULL;
unsigned int i;
- if (VEC_empty (loop_info, gnu_loop_stack))
+ if (vec_safe_is_empty (gnu_loop_stack))
return NULL;
var = remove_conversions (var, false);
@@ -2221,8 +2209,8 @@ push_range_check_info (tree var)
if (decl_function_context (var) != current_function_decl)
return NULL;
- for (i = VEC_length (loop_info, gnu_loop_stack) - 1;
- VEC_iterate (loop_info, gnu_loop_stack, i, iter);
+ for (i = vec_safe_length (gnu_loop_stack) - 1;
+ vec_safe_iterate (gnu_loop_stack, i, &iter);
i--)
if (var == iter->loop_var)
break;
@@ -2230,7 +2218,7 @@ push_range_check_info (tree var)
if (iter)
{
struct range_check_info_d *rci = ggc_alloc_range_check_info_d ();
- VEC_safe_push (range_check_info, gc, iter->checks, rci);
+ vec_safe_push (iter->checks, rci);
return rci;
}
@@ -2312,7 +2300,7 @@ Loop_Statement_to_gnu (Node_Id gnat_node)
tree gnu_result;
/* Push the loop_info structure associated with the LOOP_STMT. */
- VEC_safe_push (loop_info, gc, gnu_loop_stack, gnu_loop_info);
+ vec_safe_push (gnu_loop_stack, gnu_loop_info);
/* Set location information for statement and end label. */
set_expr_location_from_node (gnu_loop_stmt, gnat_node);
@@ -2576,7 +2564,7 @@ Loop_Statement_to_gnu (Node_Id gnat_node)
if (Present (gnat_iter_scheme) && No (Condition (gnat_iter_scheme)))
{
struct range_check_info_d *rci;
- unsigned n_checks = VEC_length (range_check_info, gnu_loop_info->checks);
+ unsigned n_checks = vec_safe_length (gnu_loop_info->checks);
unsigned int i;
/* First, if we have computed a small number of invariant conditions for
@@ -2593,7 +2581,7 @@ Loop_Statement_to_gnu (Node_Id gnat_node)
that can be entirely optimized away in the end. */
if (1 <= n_checks && n_checks <= 4)
for (i = 0;
- VEC_iterate (range_check_info, gnu_loop_info->checks, i, rci);
+ vec_safe_iterate (gnu_loop_info->checks, i, &rci);
i++)
{
tree low_ok
@@ -2636,7 +2624,7 @@ Loop_Statement_to_gnu (Node_Id gnat_node)
else
gnu_result = gnu_loop_stmt;
- VEC_pop (loop_info, gnu_loop_stack);
+ gnu_loop_stack->pop ();
return gnu_result;
}
@@ -2928,10 +2916,8 @@ finalize_nrv_unc_r (tree *tp, int *walk_subtrees, void *data)
{
if (TYPE_IS_FAT_POINTER_P (TREE_TYPE (ret_val)))
ret_val
- = VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS
- (TREE_OPERAND (TREE_OPERAND (ret_val, 0), 1)),
- 1).value;
+ = (*CONSTRUCTOR_ELTS (TREE_OPERAND (TREE_OPERAND (ret_val, 0),
+ 1)))[1].value;
else
ret_val = TREE_OPERAND (TREE_OPERAND (ret_val, 0), 1);
}
@@ -2960,7 +2946,8 @@ finalize_nrv_unc_r (tree *tp, int *walk_subtrees, void *data)
tree saved_current_function_decl = current_function_decl;
tree var = DECL_EXPR_DECL (t);
tree alloc, p_array, new_var, new_ret;
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 2);
/* Create an artificial context to build the allocation. */
current_function_decl = decl_function_context (var);
@@ -2988,19 +2975,15 @@ finalize_nrv_unc_r (tree *tp, int *walk_subtrees, void *data)
DECL_INITIAL (new_var)
= build2 (COMPOUND_EXPR, TREE_TYPE (new_var),
TREE_OPERAND (alloc, 0),
- VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (TREE_OPERAND (alloc, 1)),
- 0).value);
+ (*CONSTRUCTOR_ELTS (TREE_OPERAND (alloc, 1)))[0].value);
/* Build a modified CONSTRUCTOR that references NEW_VAR. */
p_array = TYPE_FIELDS (TREE_TYPE (alloc));
CONSTRUCTOR_APPEND_ELT (v, p_array,
fold_convert (TREE_TYPE (p_array), new_var));
CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (p_array),
- VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS
- (TREE_OPERAND (alloc, 1)),
- 1).value);
+ (*CONSTRUCTOR_ELTS (
+ TREE_OPERAND (alloc, 1)))[1].value);
new_ret = build_constructor (TREE_TYPE (alloc), v);
}
else
@@ -3048,7 +3031,7 @@ finalize_nrv_unc_r (tree *tp, int *walk_subtrees, void *data)
the other return values. GNAT_RET is a representative return node. */
static void
-finalize_nrv (tree fndecl, bitmap nrv, VEC(tree,gc) *other, Node_Id gnat_ret)
+finalize_nrv (tree fndecl, bitmap nrv, vec<tree, va_gc> *other, Node_Id gnat_ret)
{
struct cgraph_node *node;
struct nrv_data data;
@@ -3064,7 +3047,7 @@ finalize_nrv (tree fndecl, bitmap nrv, VEC(tree,gc) *other, Node_Id gnat_ret)
data.nrv = nrv;
data.result = NULL_TREE;
data.visited = NULL;
- for (i = 0; VEC_iterate(tree, other, i, iter); i++)
+ for (i = 0; vec_safe_iterate (other, i, &iter); i++)
walk_tree_without_duplicates (&iter, prune_nrv_r, &data);
if (bitmap_empty_p (nrv))
return;
@@ -3186,7 +3169,7 @@ build_return_expr (tree ret_obj, tree ret_val)
totally transparent given the read-compose-write semantics of
assignments from CONSTRUCTORs. */
else if (EXPR_P (ret_val))
- VEC_safe_push (tree, gc, f_other_ret_val, ret_val);
+ vec_safe_push (f_other_ret_val, ret_val);
}
}
else
@@ -3204,7 +3187,7 @@ build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
tree gnu_subprog_param, gnu_stub_param, gnu_param;
tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
- VEC(tree,gc) *gnu_param_vec = NULL;
+ vec<tree, va_gc> *gnu_param_vec = NULL;
gnu_subprog_type = TREE_TYPE (gnu_subprog);
@@ -3238,7 +3221,7 @@ build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
else
gnu_param = gnu_stub_param;
- VEC_safe_push (tree, gc, gnu_param_vec, gnu_param);
+ vec_safe_push (gnu_param_vec, gnu_param);
}
/* Invoke the internal subprogram. */
@@ -3286,7 +3269,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
tree gnu_return_var_elmt = NULL_TREE;
tree gnu_result;
struct language_function *gnu_subprog_language;
- VEC(parm_attr,gc) *cache;
+ vec<parm_attr, va_gc> *cache;
/* If this is a generic object or if it has been eliminated,
ignore it. */
@@ -3340,7 +3323,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
{
tree gnu_return_var = NULL_TREE;
- VEC_safe_push (tree, gc, gnu_return_label_stack,
+ vec_safe_push (gnu_return_label_stack,
create_artificial_label (input_location));
start_stmt_group ();
@@ -3366,7 +3349,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
TREE_VALUE (gnu_return_var_elmt) = gnu_return_var;
}
- VEC_safe_push (tree, gc, gnu_return_var_stack, gnu_return_var);
+ vec_safe_push (gnu_return_var_stack, gnu_return_var);
/* See whether there are parameters for which we don't have a GCC tree
yet. These must be Out parameters. Make a VAR_DECL for them and
@@ -3392,7 +3375,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
}
}
else
- VEC_safe_push (tree, gc, gnu_return_label_stack, NULL_TREE);
+ vec_safe_push (gnu_return_label_stack, NULL_TREE);
/* Get a tree corresponding to the code for the subprogram. */
start_stmt_group ();
@@ -3433,7 +3416,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
start_stmt_group ();
- FOR_EACH_VEC_ELT (parm_attr, cache, i, pa)
+ FOR_EACH_VEC_ELT (*cache, i, pa)
{
if (pa->first)
add_stmt_with_node_force (pa->first, gnat_node);
@@ -3467,7 +3450,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
add_stmt (gnu_result);
add_stmt (build1 (LABEL_EXPR, void_type_node,
- VEC_last (tree, gnu_return_label_stack)));
+ gnu_return_label_stack->last ()));
if (list_length (gnu_cico_list) == 1)
gnu_retval = TREE_VALUE (gnu_cico_list);
@@ -3481,7 +3464,7 @@ Subprogram_Body_to_gnu (Node_Id gnat_node)
gnu_result = end_stmt_group ();
}
- VEC_pop (tree, gnu_return_label_stack);
+ gnu_return_label_stack->pop ();
/* Attempt setting the end_locus of our GCC body tree, typically a
BIND_EXPR or STATEMENT_LIST, then the end_locus of our GCC subprogram
@@ -3639,7 +3622,7 @@ Call_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, tree gnu_target,
/* The return type of the FUNCTION_TYPE. */
tree gnu_result_type = TREE_TYPE (gnu_subprog_type);
tree gnu_subprog_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_subprog);
- VEC(tree,gc) *gnu_actual_vec = NULL;
+ vec<tree, va_gc> *gnu_actual_vec = NULL;
tree gnu_name_list = NULL_TREE;
tree gnu_stmt_list = NULL_TREE;
tree gnu_after_list = NULL_TREE;
@@ -4042,7 +4025,7 @@ Call_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, tree gnu_target,
gnu_actual = convert (DECL_ARG_TYPE (gnu_formal), gnu_actual);
}
- VEC_safe_push (tree, gc, gnu_actual_vec, gnu_actual);
+ vec_safe_push (gnu_actual_vec, gnu_actual);
}
gnu_call
@@ -4402,7 +4385,7 @@ Handled_Sequence_Of_Statements_to_gnu (Node_Id gnat_node)
start_stmt_group ();
gnat_pushlevel ();
- VEC_safe_push (tree, gc, gnu_except_ptr_stack,
+ vec_safe_push (gnu_except_ptr_stack,
create_var_decl (get_identifier ("EXCEPT_PTR"), NULL_TREE,
build_pointer_type (except_type_node),
build_call_n_expr (get_excptr_decl, 0),
@@ -4431,7 +4414,7 @@ Handled_Sequence_Of_Statements_to_gnu (Node_Id gnat_node)
/* If none of the exception handlers did anything, re-raise but do not
defer abortion. */
gnu_expr = build_call_n_expr (raise_nodefer_decl, 1,
- VEC_last (tree, gnu_except_ptr_stack));
+ gnu_except_ptr_stack->last ());
set_expr_location_from_node
(gnu_expr,
Present (End_Label (gnat_node)) ? End_Label (gnat_node) : gnat_node);
@@ -4443,7 +4426,7 @@ Handled_Sequence_Of_Statements_to_gnu (Node_Id gnat_node)
/* End the binding level dedicated to the exception handlers and get the
whole statement group. */
- VEC_pop (tree, gnu_except_ptr_stack);
+ gnu_except_ptr_stack->pop ();
gnat_poplevel ();
gnu_handler = end_stmt_group ();
@@ -4536,7 +4519,7 @@ Exception_Handler_to_gnu_sjlj (Node_Id gnat_node)
build_component_ref
(build_unary_op
(INDIRECT_REF, NULL_TREE,
- VEC_last (tree, gnu_except_ptr_stack)),
+ gnu_except_ptr_stack->last ()),
get_identifier ("not_handled_by_others"), NULL_TREE,
false)),
integer_zero_node);
@@ -4558,8 +4541,8 @@ Exception_Handler_to_gnu_sjlj (Node_Id gnat_node)
this_choice
= build_binary_op
(EQ_EXPR, boolean_type_node,
- VEC_last (tree, gnu_except_ptr_stack),
- convert (TREE_TYPE (VEC_last (tree, gnu_except_ptr_stack)),
+ gnu_except_ptr_stack->last (),
+ convert (TREE_TYPE (gnu_except_ptr_stack->last ()),
build_unary_op (ADDR_EXPR, NULL_TREE, gnu_expr)));
/* If this is the distinguished exception "Non_Ada_Error" (and we are
@@ -4570,7 +4553,7 @@ Exception_Handler_to_gnu_sjlj (Node_Id gnat_node)
tree gnu_comp
= build_component_ref
(build_unary_op (INDIRECT_REF, NULL_TREE,
- VEC_last (tree, gnu_except_ptr_stack)),
+ gnu_except_ptr_stack->last ()),
get_identifier ("lang"), NULL_TREE, false);
this_choice
@@ -4711,7 +4694,7 @@ Compilation_Unit_to_gnu (Node_Id gnat_node)
gnat_unit);
struct elab_info *info;
- VEC_safe_push (tree, gc, gnu_elab_proc_stack, gnu_elab_proc_decl);
+ vec_safe_push (gnu_elab_proc_stack, gnu_elab_proc_decl);
DECL_ELABORATION_PROC_P (gnu_elab_proc_decl) = 1;
/* Initialize the information structure for the function. */
@@ -4792,7 +4775,7 @@ Compilation_Unit_to_gnu (Node_Id gnat_node)
/* Generate elaboration code for this unit, if necessary, and say whether
we did or not. */
- VEC_pop (tree, gnu_elab_proc_stack);
+ gnu_elab_proc_stack->pop ();
/* Invalidate the global renaming pointers. This is necessary because
stabilization of the renamed entities may create SAVE_EXPRs which
@@ -5235,8 +5218,8 @@ gnat_to_gnu (Node_Id gnat_node)
int length = String_Length (gnat_string);
int i;
tree gnu_idx = TYPE_MIN_VALUE (TYPE_DOMAIN (gnu_result_type));
- VEC(constructor_elt,gc) *gnu_vec
- = VEC_alloc (constructor_elt, gc, length);
+ vec<constructor_elt, va_gc> *gnu_vec;
+ vec_alloc (gnu_vec, length);
for (i = 0; i < length; i++)
{
@@ -5658,7 +5641,8 @@ gnat_to_gnu (Node_Id gnat_node)
gnu_aggr_type = TYPE_REPRESENTATIVE_ARRAY (gnu_result_type);
if (Null_Record_Present (gnat_node))
- gnu_result = gnat_build_constructor (gnu_aggr_type, NULL);
+ gnu_result = gnat_build_constructor (gnu_aggr_type,
+ NULL);
else if (TREE_CODE (gnu_aggr_type) == RECORD_TYPE
|| TREE_CODE (gnu_aggr_type) == UNION_TYPE)
@@ -6231,7 +6215,7 @@ gnat_to_gnu (Node_Id gnat_node)
? gnat_to_gnu (Condition (gnat_node)) : NULL_TREE),
(Present (Name (gnat_node))
? get_gnu_tree (Entity (Name (gnat_node)))
- : VEC_last (loop_info, gnu_loop_stack)->label));
+ : gnu_loop_stack->last ()->label));
break;
case N_Simple_Return_Statement:
@@ -6246,7 +6230,7 @@ gnat_to_gnu (Node_Id gnat_node)
/* If this function has copy-in/copy-out parameters, get the real
object for the return. See Subprogram_to_gnu. */
if (TYPE_CI_CO_LIST (gnu_subprog_type))
- gnu_ret_obj = VEC_last (tree, gnu_return_var_stack);
+ gnu_ret_obj = gnu_return_var_stack->last ();
else
gnu_ret_obj = DECL_RESULT (current_function_decl);
@@ -6331,18 +6315,18 @@ gnat_to_gnu (Node_Id gnat_node)
/* If we have a return label defined, convert this into a branch to
that label. The return proper will be handled elsewhere. */
- if (VEC_last (tree, gnu_return_label_stack))
+ if (gnu_return_label_stack->last ())
{
if (gnu_ret_obj)
add_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, gnu_ret_obj,
gnu_ret_val));
gnu_result = build1 (GOTO_EXPR, void_type_node,
- VEC_last (tree, gnu_return_label_stack));
+ gnu_return_label_stack->last ());
/* When not optimizing, make sure the return is preserved. */
if (!optimize && Comes_From_Source (gnat_node))
- DECL_ARTIFICIAL (VEC_last (tree, gnu_return_label_stack)) = 0;
+ DECL_ARTIFICIAL (gnu_return_label_stack->last ()) = 0;
}
/* Otherwise, build a regular return. */
@@ -6569,15 +6553,15 @@ gnat_to_gnu (Node_Id gnat_node)
break;
case N_Pop_Constraint_Error_Label:
- VEC_pop (tree, gnu_constraint_error_label_stack);
+ gnu_constraint_error_label_stack->pop ();
break;
case N_Pop_Storage_Error_Label:
- VEC_pop (tree, gnu_storage_error_label_stack);
+ gnu_storage_error_label_stack->pop ();
break;
case N_Pop_Program_Error_Label:
- VEC_pop (tree, gnu_program_error_label_stack);
+ gnu_program_error_label_stack->pop ();
break;
/******************************/
@@ -6857,7 +6841,7 @@ gnat_to_gnu (Node_Id gnat_node)
/* The only validation we currently do on an unchecked conversion is
that of aliasing assumptions. */
if (flag_strict_aliasing)
- VEC_safe_push (Node_Id, heap, gnat_validate_uc_list, gnat_node);
+ gnat_validate_uc_list.safe_push (gnat_node);
gnu_result = alloc_stmt_list ();
break;
@@ -7032,13 +7016,13 @@ gnat_to_gnu (Node_Id gnat_node)
label to push onto the stack. */
static void
-push_exception_label_stack (VEC(tree,gc) **gnu_stack, Entity_Id gnat_label)
+push_exception_label_stack (vec<tree, va_gc> **gnu_stack, Entity_Id gnat_label)
{
tree gnu_label = (Present (gnat_label)
? gnat_to_gnu_entity (gnat_label, NULL_TREE, 0)
: NULL_TREE);
- VEC_safe_push (tree, gc, *gnu_stack, gnu_label);
+ vec_safe_push (*gnu_stack, gnu_label);
}
/* Record the current code position in GNAT_NODE. */
@@ -8678,7 +8662,7 @@ pos_to_constructor (Node_Id gnat_expr, tree gnu_array_type,
{
tree gnu_index = TYPE_MIN_VALUE (TYPE_DOMAIN (gnu_array_type));
tree gnu_expr;
- VEC(constructor_elt,gc) *gnu_expr_vec = NULL;
+ vec<constructor_elt, va_gc> *gnu_expr_vec = NULL;
for ( ; Present (gnat_expr); gnat_expr = Next (gnat_expr))
{
@@ -8719,7 +8703,7 @@ static tree
extract_values (tree values, tree record_type)
{
tree field, tem;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
{
@@ -8737,7 +8721,7 @@ extract_values (tree values, tree record_type)
{
value = extract_values (values, TREE_TYPE (field));
if (TREE_CODE (value) == CONSTRUCTOR
- && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (value)))
+ && vec_safe_is_empty (CONSTRUCTOR_ELTS (value)))
value = 0;
}
else
@@ -9126,11 +9110,11 @@ tree
get_exception_label (char kind)
{
if (kind == N_Raise_Constraint_Error)
- return VEC_last (tree, gnu_constraint_error_label_stack);
+ return gnu_constraint_error_label_stack->last ();
else if (kind == N_Raise_Storage_Error)
- return VEC_last (tree, gnu_storage_error_label_stack);
+ return gnu_storage_error_label_stack->last ();
else if (kind == N_Raise_Program_Error)
- return VEC_last (tree, gnu_program_error_label_stack);
+ return gnu_program_error_label_stack->last ();
else
return NULL_TREE;
}
@@ -9140,7 +9124,7 @@ get_exception_label (char kind)
tree
get_elaboration_procedure (void)
{
- return VEC_last (tree, gnu_elab_proc_stack);
+ return gnu_elab_proc_stack->last ();
}
#include "gt-ada-trans.h"
diff --git a/gcc/ada/gcc-interface/utils.c b/gcc/ada/gcc-interface/utils.c
index 43a835647d2..6aa465b8de8 100644
--- a/gcc/ada/gcc-interface/utils.c
+++ b/gcc/ada/gcc-interface/utils.c
@@ -201,13 +201,13 @@ static GTY((deletable)) struct gnat_binding_level *free_binding_level;
static GTY(()) tree global_context;
/* An array of global declarations. */
-static GTY(()) VEC(tree,gc) *global_decls;
+static GTY(()) vec<tree, va_gc> *global_decls;
/* An array of builtin function declarations. */
-static GTY(()) VEC(tree,gc) *builtin_decls;
+static GTY(()) vec<tree, va_gc> *builtin_decls;
/* An array of global renaming pointers. */
-static GTY(()) VEC(tree,gc) *global_renaming_pointers;
+static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
/* A chain of unused BLOCK nodes. */
static GTY((deletable)) tree free_block_chain;
@@ -576,10 +576,10 @@ gnat_pushdecl (tree decl, Node_Id gnat_node)
if (DECL_EXTERNAL (decl))
{
if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
- VEC_safe_push (tree, gc, builtin_decls, decl);
+ vec_safe_push (builtin_decls, decl);
}
else if (global_bindings_p ())
- VEC_safe_push (tree, gc, global_decls, decl);
+ vec_safe_push (global_decls, decl);
else
{
DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
@@ -1953,11 +1953,11 @@ create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
/* A list of the data type nodes of the subprogram formal parameters.
This list is generated by traversing the input list of PARM_DECL
nodes. */
- VEC(tree,gc) *param_type_list = NULL;
+ vec<tree, va_gc> *param_type_list = NULL;
tree t, type;
for (t = param_decl_list; t; t = DECL_CHAIN (t))
- VEC_safe_push (tree, gc, param_type_list, TREE_TYPE (t));
+ vec_safe_push (param_type_list, TREE_TYPE (t));
type = build_function_type_vec (return_type, param_type_list);
@@ -2517,7 +2517,7 @@ void
record_global_renaming_pointer (tree decl)
{
gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
- VEC_safe_push (tree, gc, global_renaming_pointers, decl);
+ vec_safe_push (global_renaming_pointers, decl);
}
/* Invalidate the global renaming pointers. */
@@ -2528,10 +2528,13 @@ invalidate_global_renaming_pointers (void)
unsigned int i;
tree iter;
- FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
+ if (global_renaming_pointers == NULL)
+ return;
+
+ FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
- VEC_free (tree, gc, global_renaming_pointers);
+ vec_free (global_renaming_pointers);
}
/* Return true if VALUE is a known to be a multiple of FACTOR, which must be
@@ -3091,7 +3094,7 @@ max_size (tree exp, bool max_p)
tree
build_template (tree template_type, tree array_type, tree expr)
{
- VEC(constructor_elt,gc) *template_elts = NULL;
+ vec<constructor_elt, va_gc> *template_elts = NULL;
tree bound_list = NULL_TREE;
tree field;
@@ -3755,7 +3758,7 @@ build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
tree
fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
{
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
tree field;
gnu_expr = maybe_unconstrained_array (gnu_expr);
@@ -3813,7 +3816,7 @@ convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
/* See the head comment of build_vms_descriptor. */
int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
tree lfield, ufield;
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
/* Convert POINTER to the pointer-to-array type. */
gnu_expr64 = convert (p_array_type, gnu_expr64);
@@ -3823,7 +3826,7 @@ convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
case 1: /* Class S */
case 15: /* Class SB */
/* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
t = DECL_CHAIN (DECL_CHAIN (klass));
t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
CONSTRUCTOR_APPEND_ELT (v, min_field,
@@ -3855,7 +3858,7 @@ convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
(TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
/* Build the template in the form of a constructor. */
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
ufield);
@@ -3903,7 +3906,7 @@ convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
(TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
/* Build the template in the form of a constructor. */
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
ufield);
@@ -3924,7 +3927,7 @@ convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
}
/* Build the fat pointer in the form of a constructor. */
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
template_addr);
@@ -3966,7 +3969,7 @@ convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
tree template_tree, template_addr, aflags, dimct, t, u;
/* See the head comment of build_vms_descriptor. */
int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
/* Convert POINTER to the pointer-to-array type. */
gnu_expr32 = convert (p_array_type, gnu_expr32);
@@ -3976,7 +3979,7 @@ convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
case 1: /* Class S */
case 15: /* Class SB */
/* Build {1, LENGTH} template; LENGTH is the 1st field. */
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
t = TYPE_FIELDS (desc_type);
t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
CONSTRUCTOR_APPEND_ELT (v, min_field,
@@ -4048,7 +4051,7 @@ convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
}
/* Build the fat pointer in the form of a constructor. */
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
template_addr);
@@ -4306,7 +4309,8 @@ convert_to_fat_pointer (tree type, tree expr)
tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
tree etype = TREE_TYPE (expr);
tree template_tree;
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 2);
/* If EXPR is null, make a fat pointer that contains a null pointer to the
array (compare_fat_pointers ensures that this is the full discriminant)
@@ -4323,7 +4327,8 @@ convert_to_fat_pointer (tree type, tree expr)
{
/* The template type can still be dummy at this point so we build an
empty constructor. The middle-end will fill it in with zeros. */
- t = build_constructor (template_type, NULL);
+ t = build_constructor (template_type,
+ NULL);
TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
@@ -4425,7 +4430,7 @@ convert (tree type, tree expr)
constructor to build the record, unless a variable size is involved. */
else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
{
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
/* If we previously converted from another type and our type is
of variable size, remove the conversion to avoid the need for
@@ -4478,7 +4483,7 @@ convert (tree type, tree expr)
expr),
false);
- v = VEC_alloc (constructor_elt, gc, 1);
+ vec_alloc (v, 1);
CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
return gnat_build_constructor (type, v);
@@ -4495,11 +4500,9 @@ convert (tree type, tree expr)
/* If we have just converted to this padded type, just get the
inner expression. */
if (TREE_CODE (expr) == CONSTRUCTOR
- && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
- && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0).index
- == TYPE_FIELDS (etype))
- unpadded
- = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0).value;
+ && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
+ && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
+ unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
/* Otherwise, build an explicit component reference. */
else
@@ -4533,7 +4536,8 @@ convert (tree type, tree expr)
if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
{
tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 2);
/* If the source already has a template, get a reference to the
associated array only, as we are going to rebuild a template
@@ -4592,8 +4596,7 @@ convert (tree type, tree expr)
{
expr = copy_node (expr);
TREE_TYPE (expr) = type;
- CONSTRUCTOR_ELTS (expr)
- = VEC_copy (constructor_elt, gc, CONSTRUCTOR_ELTS (expr));
+ CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
return expr;
}
@@ -4606,9 +4609,10 @@ convert (tree type, tree expr)
|| tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
{
- VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
- unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
+ vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
+ unsigned HOST_WIDE_INT len = vec_safe_length (e);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, len);
tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
unsigned HOST_WIDE_INT idx;
tree index, value;
@@ -4626,7 +4630,7 @@ convert (tree type, tree expr)
if (!SAME_FIELD_P (efield, field))
break;
constructor_elt elt = {field, convert (TREE_TYPE (field), value)};
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
/* If packing has made this field a bitfield and the input
value couldn't be emitted statically any more, we need to
@@ -4663,9 +4667,9 @@ convert (tree type, tree expr)
&& gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
etype))
{
- VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
- unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
+ unsigned HOST_WIDE_INT len = vec_safe_length (e);
+ vec<constructor_elt, va_gc> *v;
unsigned HOST_WIDE_INT ix;
tree value;
@@ -4689,11 +4693,11 @@ convert (tree type, tree expr)
}
/* Otherwise, build a regular vector constructor. */
- v = VEC_alloc (constructor_elt, gc, len);
+ vec_alloc (v, len);
FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
{
constructor_elt elt = {NULL_TREE, value};
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
}
expr = copy_node (expr);
TREE_TYPE (expr) = type;
@@ -4880,7 +4884,8 @@ convert (tree type, tree expr)
case RECORD_TYPE:
if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
{
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 1);
CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
convert (TREE_TYPE (TYPE_FIELDS (type)),
@@ -5048,9 +5053,7 @@ remove_conversions (tree exp, bool true_address)
&& TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
return
- remove_conversions (VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (exp), 0).value,
- true);
+ remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
break;
case COMPONENT_REF:
@@ -5292,7 +5295,8 @@ unchecked_convert (tree type, tree expr, bool notrunc_p)
{
tree rec_type = make_node (RECORD_TYPE);
unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 1);
tree field_type, field;
if (TYPE_UNSIGNED (etype))
@@ -5575,7 +5579,7 @@ gnat_write_global_declarations (void)
/* If we have declared types as used at the global level, insert them in
the global hash table. We use a dummy variable for this purpose. */
- if (!VEC_empty (tree, types_used_by_cur_var_decl))
+ if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
{
struct varpool_node *node;
char *label;
@@ -5589,9 +5593,9 @@ gnat_write_global_declarations (void)
node = varpool_node_for_decl (dummy_global);
node->symbol.force_output = 1;
- while (!VEC_empty (tree, types_used_by_cur_var_decl))
+ while (!types_used_by_cur_var_decl->is_empty ())
{
- tree t = VEC_pop (tree, types_used_by_cur_var_decl);
+ tree t = types_used_by_cur_var_decl->pop ();
types_used_by_var_decl_insert (t, dummy_global);
}
}
@@ -5600,7 +5604,7 @@ gnat_write_global_declarations (void)
ensures that global types whose compilation hasn't been finalized yet,
for example pointers to Taft amendment types, have their compilation
finalized in the right context. */
- FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
+ FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
if (TREE_CODE (iter) == TYPE_DECL)
debug_hooks->global_decl (iter);
@@ -5612,7 +5616,7 @@ gnat_write_global_declarations (void)
if (!seen_error ())
{
timevar_push (TV_SYMOUT);
- FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
+ FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
if (TREE_CODE (iter) != TYPE_DECL)
debug_hooks->global_decl (iter);
timevar_pop (TV_SYMOUT);
@@ -5641,7 +5645,7 @@ builtin_decl_for (tree name)
unsigned i;
tree decl;
- FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
if (DECL_NAME (decl) == name)
return decl;
diff --git a/gcc/ada/gcc-interface/utils2.c b/gcc/ada/gcc-interface/utils2.c
index 4578114f4a7..4bb16eca37d 100644
--- a/gcc/ada/gcc-interface/utils2.c
+++ b/gcc/ada/gcc-interface/utils2.c
@@ -441,7 +441,7 @@ compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
/* The constant folder doesn't fold fat pointer types so we do it here. */
if (TREE_CODE (p1) == CONSTRUCTOR)
- p1_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 0).value;
+ p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
else
p1_array = build_component_ref (p1, NULL_TREE,
TYPE_FIELDS (TREE_TYPE (p1)), true);
@@ -452,7 +452,7 @@ compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
null_pointer_node));
if (TREE_CODE (p2) == CONSTRUCTOR)
- p2_array = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 0).value;
+ p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
else
p2_array = build_component_ref (p2, NULL_TREE,
TYPE_FIELDS (TREE_TYPE (p2)), true);
@@ -473,14 +473,14 @@ compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
= fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
if (TREE_CODE (p1) == CONSTRUCTOR)
- p1_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p1), 1).value;
+ p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
else
p1_bounds
= build_component_ref (p1, NULL_TREE,
DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
if (TREE_CODE (p2) == CONSTRUCTOR)
- p2_bounds = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (p2), 1).value;
+ p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
else
p2_bounds
= build_component_ref (p2, NULL_TREE,
@@ -1334,9 +1334,7 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand)
a pointer to our type. */
if (TYPE_IS_PADDING_P (type))
{
- result = VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (operand),
- 0).value;
+ result = (*CONSTRUCTOR_ELTS (operand))[0].value;
result = convert (build_pointer_type (TREE_TYPE (operand)),
build_unary_op (ADDR_EXPR, NULL_TREE, result));
break;
@@ -1831,7 +1829,7 @@ compare_elmt_bitpos (const PTR rt1, const PTR rt2)
/* Return a CONSTRUCTOR of TYPE whose elements are V. */
tree
-gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
+gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
{
bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
bool side_effects = false;
@@ -1859,7 +1857,7 @@ gnat_build_constructor (tree type, VEC(constructor_elt,gc) *v)
by increasing bit position. This is necessary to ensure the
constructor can be output as static data. */
if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
- VEC_qsort (constructor_elt, v, compare_elmt_bitpos);
+ v->qsort (compare_elmt_bitpos);
result = build_constructor (type, v);
TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
@@ -1989,7 +1987,7 @@ build_simple_component_ref (tree record_variable, tree component,
if (TREE_CODE (record_variable) == CONSTRUCTOR
&& TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record_variable)))
{
- VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (record_variable);
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record_variable);
unsigned HOST_WIDE_INT idx;
tree index, value;
FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
@@ -2302,7 +2300,8 @@ build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
If there is no initializing expression, just set the bounds. */
if (init)
{
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
build_template (template_type, type, init));
@@ -2673,12 +2672,10 @@ gnat_stabilize_reference (tree ref, bool force, bool *success)
/* Constructors with 1 element are used extensively to formally
convert objects to special wrapping types. */
if (TREE_CODE (type) == RECORD_TYPE
- && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ref)) == 1)
+ && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
{
- tree index
- = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0).index;
- tree value
- = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ref), 0).value;
+ tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
+ tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
result
= build_constructor_single (type, index,
gnat_stabilize_reference_1 (value,
diff --git a/gcc/alias.c b/gcc/alias.c
index b42f9d71db4..2ef13cc755b 100644
--- a/gcc/alias.c
+++ b/gcc/alias.c
@@ -206,7 +206,7 @@ static void memory_modified_1 (rtx, const_rtx, void *);
The ADDRESS in group (1) _may_ alias globals; it has VOIDmode to
indicate this. */
-static GTY(()) VEC(rtx,gc) *reg_base_value;
+static GTY(()) vec<rtx, va_gc> *reg_base_value;
static rtx *new_reg_base_value;
/* The single VOIDmode ADDRESS that represents all argument bases.
@@ -219,7 +219,7 @@ static int unique_id;
/* We preserve the copy of old array around to avoid amount of garbage
produced. About 8% of garbage produced were attributed to this
array. */
-static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
+static GTY((deletable)) vec<rtx, va_gc> *old_reg_base_value;
/* Values of XINT (address, 0) of Pmode ADDRESS rtxes for special
registers. */
@@ -231,14 +231,14 @@ static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
#define static_reg_base_value \
(this_target_rtl->x_static_reg_base_value)
-#define REG_BASE_VALUE(X) \
- (REGNO (X) < VEC_length (rtx, reg_base_value) \
- ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
+#define REG_BASE_VALUE(X) \
+ (REGNO (X) < vec_safe_length (reg_base_value) \
+ ? (*reg_base_value)[REGNO (X)] : 0)
/* Vector indexed by N giving the initial (unchanging) value known for
pseudo-register N. This vector is initialized in init_alias_analysis,
and does not change until end_alias_analysis is called. */
-static GTY(()) VEC(rtx,gc) *reg_known_value;
+static GTY(()) vec<rtx, va_gc> *reg_known_value;
/* Vector recording for each reg_known_value whether it is due to a
REG_EQUIV note. Future passes (viz., reload) may replace the
@@ -258,11 +258,9 @@ static sbitmap reg_known_equiv_p;
NOTE_INSN_FUNCTION_BEG note. */
static bool copying_arguments;
-DEF_VEC_P(alias_set_entry);
-DEF_VEC_ALLOC_P(alias_set_entry,gc);
/* The splay-tree used to store the various alias set entries. */
-static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
+static GTY (()) vec<alias_set_entry, va_gc> *alias_sets;
/* Build a decomposed reference object for querying the alias-oracle
from the MEM rtx and store it in *REF.
@@ -391,7 +389,7 @@ rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
static inline alias_set_entry
get_alias_set_entry (alias_set_type alias_set)
{
- return VEC_index (alias_set_entry, alias_sets, alias_set);
+ return (*alias_sets)[alias_set];
}
/* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
@@ -854,9 +852,9 @@ new_alias_set (void)
if (flag_strict_aliasing)
{
if (alias_sets == 0)
- VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
- VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
- return VEC_length (alias_set_entry, alias_sets) - 1;
+ vec_safe_push (alias_sets, (alias_set_entry) 0);
+ vec_safe_push (alias_sets, (alias_set_entry) 0);
+ return alias_sets->length () - 1;
}
else
return 0;
@@ -900,7 +898,7 @@ record_alias_subset (alias_set_type superset, alias_set_type subset)
ggc_alloc_splay_tree_scalar_scalar_splay_tree_s,
ggc_alloc_splay_tree_scalar_scalar_splay_tree_node_s);
superset_entry->has_zero_child = 0;
- VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
+ (*alias_sets)[superset] = superset_entry;
}
if (subset == 0)
@@ -1079,7 +1077,7 @@ find_base_value (rtx src)
The test above is not sufficient because the scheduler may move
a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
- && regno < VEC_length (rtx, reg_base_value))
+ && regno < vec_safe_length (reg_base_value))
{
/* If we're inside init_alias_analysis, use new_reg_base_value
to reduce the number of relaxation iterations. */
@@ -1087,8 +1085,8 @@ find_base_value (rtx src)
&& DF_REG_DEF_COUNT (regno) == 1)
return new_reg_base_value[regno];
- if (VEC_index (rtx, reg_base_value, regno))
- return VEC_index (rtx, reg_base_value, regno);
+ if ((*reg_base_value)[regno])
+ return (*reg_base_value)[regno];
}
return 0;
@@ -1233,7 +1231,7 @@ record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
regno = REGNO (dest);
- gcc_checking_assert (regno < VEC_length (rtx, reg_base_value));
+ gcc_checking_assert (regno < reg_base_value->length ());
/* If this spans multiple hard registers, then we must indicate that every
register has an unusable value. */
@@ -1338,7 +1336,7 @@ record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
rtx
get_reg_base_value (unsigned int regno)
{
- return VEC_index (rtx, reg_base_value, regno);
+ return (*reg_base_value)[regno];
}
/* If a value is known for REGNO, return it. */
@@ -1349,8 +1347,8 @@ get_reg_known_value (unsigned int regno)
if (regno >= FIRST_PSEUDO_REGISTER)
{
regno -= FIRST_PSEUDO_REGISTER;
- if (regno < VEC_length (rtx, reg_known_value))
- return VEC_index (rtx, reg_known_value, regno);
+ if (regno < vec_safe_length (reg_known_value))
+ return (*reg_known_value)[regno];
}
return NULL;
}
@@ -1363,8 +1361,8 @@ set_reg_known_value (unsigned int regno, rtx val)
if (regno >= FIRST_PSEUDO_REGISTER)
{
regno -= FIRST_PSEUDO_REGISTER;
- if (regno < VEC_length (rtx, reg_known_value))
- VEC_replace (rtx, reg_known_value, regno, val);
+ if (regno < vec_safe_length (reg_known_value))
+ (*reg_known_value)[regno] = val;
}
}
@@ -1376,7 +1374,7 @@ get_reg_known_equiv_p (unsigned int regno)
if (regno >= FIRST_PSEUDO_REGISTER)
{
regno -= FIRST_PSEUDO_REGISTER;
- if (regno < VEC_length (rtx, reg_known_value))
+ if (regno < vec_safe_length (reg_known_value))
return bitmap_bit_p (reg_known_equiv_p, regno);
}
return false;
@@ -1388,7 +1386,7 @@ set_reg_known_equiv_p (unsigned int regno, bool val)
if (regno >= FIRST_PSEUDO_REGISTER)
{
regno -= FIRST_PSEUDO_REGISTER;
- if (regno < VEC_length (rtx, reg_known_value))
+ if (regno < vec_safe_length (reg_known_value))
{
if (val)
bitmap_set_bit (reg_known_equiv_p, regno);
@@ -2811,7 +2809,7 @@ init_alias_analysis (void)
timevar_push (TV_ALIAS_ANALYSIS);
- reg_known_value = VEC_alloc (rtx, gc, maxreg - FIRST_PSEUDO_REGISTER);
+ vec_alloc (reg_known_value, maxreg - FIRST_PSEUDO_REGISTER);
reg_known_equiv_p = sbitmap_alloc (maxreg - FIRST_PSEUDO_REGISTER);
/* If we have memory allocated from the previous run, use it. */
@@ -2819,9 +2817,9 @@ init_alias_analysis (void)
reg_base_value = old_reg_base_value;
if (reg_base_value)
- VEC_truncate (rtx, reg_base_value, 0);
+ reg_base_value->truncate (0);
- VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
+ vec_safe_grow_cleared (reg_base_value, maxreg);
new_reg_base_value = XNEWVEC (rtx, maxreg);
reg_seen = sbitmap_alloc (maxreg);
@@ -2969,11 +2967,10 @@ init_alias_analysis (void)
for (ui = 0; ui < maxreg; ui++)
{
if (new_reg_base_value[ui]
- && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
- && ! rtx_equal_p (new_reg_base_value[ui],
- VEC_index (rtx, reg_base_value, ui)))
+ && new_reg_base_value[ui] != (*reg_base_value)[ui]
+ && ! rtx_equal_p (new_reg_base_value[ui], (*reg_base_value)[ui]))
{
- VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
+ (*reg_base_value)[ui] = new_reg_base_value[ui];
changed = 1;
}
}
@@ -2982,7 +2979,7 @@ init_alias_analysis (void)
XDELETEVEC (rpo);
/* Fill in the remaining entries. */
- FOR_EACH_VEC_ELT (rtx, reg_known_value, i, val)
+ FOR_EACH_VEC_ELT (*reg_known_value, i, val)
{
int regno = i + FIRST_PSEUDO_REGISTER;
if (! val)
@@ -3003,14 +3000,14 @@ init_alias_analysis (void)
void
vt_equate_reg_base_value (const_rtx reg1, const_rtx reg2)
{
- VEC_replace (rtx, reg_base_value, REGNO (reg1), REG_BASE_VALUE (reg2));
+ (*reg_base_value)[REGNO (reg1)] = REG_BASE_VALUE (reg2);
}
void
end_alias_analysis (void)
{
old_reg_base_value = reg_base_value;
- VEC_free (rtx, gc, reg_known_value);
+ vec_free (reg_known_value);
sbitmap_free (reg_known_equiv_p);
}
diff --git a/gcc/asan.c b/gcc/asan.c
index 03519d11369..bd90e0a54d5 100644
--- a/gcc/asan.c
+++ b/gcc/asan.c
@@ -1436,12 +1436,12 @@ asan_global_struct (void)
TYPE is __asan_global struct type as returned by asan_global_struct. */
static void
-asan_add_global (tree decl, tree type, VEC(constructor_elt, gc) *v)
+asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
{
tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
unsigned HOST_WIDE_INT size;
tree str_cst, refdecl = decl;
- VEC(constructor_elt, gc) *vinner = NULL;
+ vec<constructor_elt, va_gc> *vinner = NULL;
if (!asan_pp_initialized)
asan_pp_initialize ();
@@ -1460,8 +1460,7 @@ asan_add_global (tree decl, tree type, VEC(constructor_elt, gc) *v)
if (asan_needs_local_alias (decl))
{
char buf[20];
- ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN",
- VEC_length (constructor_elt, v) + 1);
+ ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
@@ -1515,7 +1514,7 @@ asan_finish_file (void)
tree type = asan_global_struct (), var, ctor, decl;
tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
tree dtor_statements = NULL_TREE;
- VEC(constructor_elt, gc) *v;
+ vec<constructor_elt, va_gc> *v;
char buf[20];
type = build_array_type_nelts (type, gcount);
@@ -1526,7 +1525,7 @@ asan_finish_file (void)
TREE_PUBLIC (var) = 0;
DECL_ARTIFICIAL (var) = 1;
DECL_IGNORED_P (var) = 1;
- v = VEC_alloc (constructor_elt, gc, gcount);
+ vec_alloc (v, gcount);
FOR_EACH_DEFINED_VARIABLE (vnode)
if (asan_protect_global (vnode->symbol.decl))
asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
diff --git a/gcc/attribs.c b/gcc/attribs.c
index 0425de9f5b1..1c55ca3b8ac 100644
--- a/gcc/attribs.c
+++ b/gcc/attribs.c
@@ -46,23 +46,17 @@ struct substring
int length;
};
-DEF_VEC_O (attribute_spec);
-DEF_VEC_ALLOC_O (attribute_spec, heap);
-
/* Scoped attribute name representation. */
struct scoped_attributes
{
const char *ns;
- VEC (attribute_spec, heap) *attributes;
+ vec<attribute_spec> attributes;
htab_t attribute_hash;
};
-DEF_VEC_O (scoped_attributes);
-DEF_VEC_ALLOC_O (scoped_attributes, heap);
-
/* The table of scope attributes. */
-static VEC(scoped_attributes, heap) *attributes_table;
+static vec<scoped_attributes> attributes_table;
static scoped_attributes* find_attribute_namespace (const char*);
static void register_scoped_attribute (const struct attribute_spec *,
@@ -140,21 +134,20 @@ register_scoped_attributes (const struct attribute_spec * attributes,
/* We don't have any namespace NS yet. Create one. */
scoped_attributes sa;
- if (attributes_table == NULL)
- attributes_table = VEC_alloc (scoped_attributes, heap, 64);
+ if (!attributes_table.is_empty ())
+ attributes_table.create (64);
memset (&sa, 0, sizeof (sa));
sa.ns = ns;
- sa.attributes = VEC_alloc (attribute_spec, heap, 64);
- result = VEC_safe_push (scoped_attributes, heap, attributes_table, sa);
+ sa.attributes.create (64);
+ result = attributes_table.safe_push (sa);
result->attribute_hash = htab_create (200, hash_attr, eq_attr, NULL);
}
/* Really add the attributes to their namespace now. */
for (unsigned i = 0; attributes[i].name != NULL; ++i)
{
- VEC_safe_push (attribute_spec, heap,
- result->attributes, attributes[i]);
+ result->attributes.safe_push (attributes[i]);
register_scoped_attribute (&attributes[i], result);
}
@@ -171,7 +164,7 @@ find_attribute_namespace (const char* ns)
unsigned ix;
scoped_attributes *iter;
- FOR_EACH_VEC_ELT (scoped_attributes, attributes_table, ix, iter)
+ FOR_EACH_VEC_ELT (attributes_table, ix, iter)
if (ns == iter->ns
|| (iter->ns != NULL
&& ns != NULL
diff --git a/gcc/basic-block.h b/gcc/basic-block.h
index 7d17af5dcfa..03ba0cea9e5 100644
--- a/gcc/basic-block.h
+++ b/gcc/basic-block.h
@@ -61,9 +61,6 @@ struct GTY((user)) edge_def {
in profile.c */
};
-DEF_VEC_P(edge);
-DEF_VEC_ALLOC_P(edge,gc);
-DEF_VEC_ALLOC_P(edge,heap);
/* Garbage collection and PCH support for edge_def. */
extern void gt_ggc_mx (edge_def *e);
@@ -182,8 +179,8 @@ struct GTY(()) gimple_bb_info {
/* Basic block information indexed by block number. */
struct GTY((chain_next ("%h.next_bb"), chain_prev ("%h.prev_bb"))) basic_block_def {
/* The edges into and out of the block. */
- VEC(edge,gc) *preds;
- VEC(edge,gc) *succs;
+ vec<edge, va_gc> *preds;
+ vec<edge, va_gc> *succs;
/* Auxiliary info specific to a pass. */
PTR GTY ((skip (""))) aux;
@@ -231,9 +228,6 @@ typedef int __assert_gimple_bb_smaller_rtl_bb
[(int)sizeof(struct rtl_bb_info)
- (int)sizeof (struct gimple_bb_info)];
-DEF_VEC_P(basic_block);
-DEF_VEC_ALLOC_P(basic_block,gc);
-DEF_VEC_ALLOC_P(basic_block,heap);
#define BB_FREQ_MAX 10000
@@ -299,7 +293,7 @@ struct GTY(()) control_flow_graph {
basic_block x_exit_block_ptr;
/* Index by basic block number, get basic block struct info. */
- VEC(basic_block,gc) *x_basic_block_info;
+ vec<basic_block, va_gc> *x_basic_block_info;
/* Number of basic blocks in this flow graph. */
int x_n_basic_blocks;
@@ -315,7 +309,7 @@ struct GTY(()) control_flow_graph {
/* Mapping of labels to their associated blocks. At present
only used for the gimple CFG. */
- VEC(basic_block,gc) *x_label_to_block_map;
+ vec<basic_block, va_gc> *x_label_to_block_map;
enum profile_status_d x_profile_status;
@@ -341,9 +335,9 @@ struct GTY(()) control_flow_graph {
#define profile_status_for_function(FN) ((FN)->cfg->x_profile_status)
#define BASIC_BLOCK_FOR_FUNCTION(FN,N) \
- (VEC_index (basic_block, basic_block_info_for_function(FN), (N)))
+ ((*basic_block_info_for_function(FN))[(N)])
#define SET_BASIC_BLOCK_FOR_FUNCTION(FN,N,BB) \
- (VEC_replace (basic_block, basic_block_info_for_function(FN), (N), (BB)))
+ ((*basic_block_info_for_function(FN))[(N)] = (BB))
/* Defines for textual backward source compatibility. */
#define ENTRY_BLOCK_PTR (cfun->cfg->x_entry_block_ptr)
@@ -355,8 +349,8 @@ struct GTY(()) control_flow_graph {
#define label_to_block_map (cfun->cfg->x_label_to_block_map)
#define profile_status (cfun->cfg->x_profile_status)
-#define BASIC_BLOCK(N) (VEC_index (basic_block, basic_block_info, (N)))
-#define SET_BASIC_BLOCK(N,BB) (VEC_replace (basic_block, basic_block_info, (N), (BB)))
+#define BASIC_BLOCK(N) ((*basic_block_info)[(N)])
+#define SET_BASIC_BLOCK(N,BB) ((*basic_block_info)[(N)] = (BB))
/* For iterating over basic blocks. */
#define FOR_BB_BETWEEN(BB, FROM, TO, DIR) \
@@ -473,7 +467,7 @@ typedef struct ce_if_block
} ce_if_block_t;
/* This structure maintains an edge list vector. */
-/* FIXME: Make this a VEC(edge). */
+/* FIXME: Make this a vec<edge>. */
struct edge_list
{
int num_edges;
@@ -518,10 +512,10 @@ struct edge_list
#define EDGE_CRITICAL_P(e) (EDGE_COUNT ((e)->src->succs) >= 2 \
&& EDGE_COUNT ((e)->dest->preds) >= 2)
-#define EDGE_COUNT(ev) VEC_length (edge, (ev))
-#define EDGE_I(ev,i) VEC_index (edge, (ev), (i))
-#define EDGE_PRED(bb,i) VEC_index (edge, (bb)->preds, (i))
-#define EDGE_SUCC(bb,i) VEC_index (edge, (bb)->succs, (i))
+#define EDGE_COUNT(ev) vec_safe_length (ev)
+#define EDGE_I(ev,i) (*ev)[(i)]
+#define EDGE_PRED(bb,i) (*(bb)->preds)[(i)]
+#define EDGE_SUCC(bb,i) (*(bb)->succs)[(i)]
/* Returns true if BB has precisely one successor. */
@@ -581,10 +575,10 @@ single_pred (const_basic_block bb)
typedef struct {
unsigned index;
- VEC(edge,gc) **container;
+ vec<edge, va_gc> **container;
} edge_iterator;
-static inline VEC(edge,gc) *
+static inline vec<edge, va_gc> *
ei_container (edge_iterator i)
{
gcc_checking_assert (i.container);
@@ -596,7 +590,7 @@ ei_container (edge_iterator i)
/* Return an iterator pointing to the start of an edge vector. */
static inline edge_iterator
-ei_start_1 (VEC(edge,gc) **ev)
+ei_start_1 (vec<edge, va_gc> **ev)
{
edge_iterator i;
@@ -609,7 +603,7 @@ ei_start_1 (VEC(edge,gc) **ev)
/* Return an iterator pointing to the last element of an edge
vector. */
static inline edge_iterator
-ei_last_1 (VEC(edge,gc) **ev)
+ei_last_1 (vec<edge, va_gc> **ev)
{
edge_iterator i;
@@ -848,13 +842,13 @@ extern void set_immediate_dominator (enum cdi_direction, basic_block,
basic_block);
extern basic_block get_immediate_dominator (enum cdi_direction, basic_block);
extern bool dominated_by_p (enum cdi_direction, const_basic_block, const_basic_block);
-extern VEC (basic_block, heap) *get_dominated_by (enum cdi_direction, basic_block);
-extern VEC (basic_block, heap) *get_dominated_by_region (enum cdi_direction,
+extern vec<basic_block> get_dominated_by (enum cdi_direction, basic_block);
+extern vec<basic_block> get_dominated_by_region (enum cdi_direction,
basic_block *,
unsigned);
-extern VEC (basic_block, heap) *get_dominated_to_depth (enum cdi_direction,
+extern vec<basic_block> get_dominated_to_depth (enum cdi_direction,
basic_block, int);
-extern VEC (basic_block, heap) *get_all_dominated_blocks (enum cdi_direction,
+extern vec<basic_block> get_all_dominated_blocks (enum cdi_direction,
basic_block);
extern void add_to_dominance_info (enum cdi_direction, basic_block);
extern void delete_from_dominance_info (enum cdi_direction, basic_block);
@@ -862,7 +856,7 @@ basic_block recompute_dominator (enum cdi_direction, basic_block);
extern void redirect_immediate_dominators (enum cdi_direction, basic_block,
basic_block);
extern void iterate_fix_dominators (enum cdi_direction,
- VEC (basic_block, heap) *, bool);
+ vec<basic_block> , bool);
extern void verify_dominators (enum cdi_direction);
extern basic_block first_dom_son (enum cdi_direction, basic_block);
extern basic_block next_dom_son (enum cdi_direction, basic_block);
@@ -918,7 +912,7 @@ bb_has_abnormal_pred (basic_block bb)
/* Return the fallthru edge in EDGES if it exists, NULL otherwise. */
static inline edge
-find_fallthru_edge (VEC(edge,gc) *edges)
+find_fallthru_edge (vec<edge, va_gc> *edges)
{
edge e;
edge_iterator ei;
diff --git a/gcc/bb-reorder.c b/gcc/bb-reorder.c
index fd353f675c8..433b1a7ae3b 100644
--- a/gcc/bb-reorder.c
+++ b/gcc/bb-reorder.c
@@ -1458,10 +1458,10 @@ fix_up_crossing_landing_pad (eh_landing_pad old_lp, basic_block old_bb)
a separate section of the .o file (to cut down on paging and improve
cache locality). Return a vector of all edges that cross. */
-static VEC(edge, heap) *
+static vec<edge>
find_rarely_executed_basic_blocks_and_crossing_edges (void)
{
- VEC(edge, heap) *crossing_edges = NULL;
+ vec<edge> crossing_edges = vec<edge>();
basic_block bb;
edge e;
edge_iterator ei;
@@ -1483,7 +1483,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void)
unsigned i;
eh_landing_pad lp;
- FOR_EACH_VEC_ELT (eh_landing_pad, cfun->eh->lp_array, i, lp)
+ FOR_EACH_VEC_ELT (*cfun->eh->lp_array, i, lp)
{
bool all_same, all_diff;
@@ -1530,7 +1530,7 @@ find_rarely_executed_basic_blocks_and_crossing_edges (void)
&& e->dest != EXIT_BLOCK_PTR
&& BB_PARTITION (e->src) != BB_PARTITION (e->dest))
{
- VEC_safe_push (edge, heap, crossing_edges, e);
+ crossing_edges.safe_push (e);
flags |= EDGE_CROSSING;
}
@@ -1583,12 +1583,12 @@ set_edge_can_fallthru_flag (void)
Convert any easy fall-through crossing edges to unconditional jumps. */
static void
-add_labels_and_missing_jumps (VEC(edge, heap) *crossing_edges)
+add_labels_and_missing_jumps (vec<edge> crossing_edges)
{
size_t i;
edge e;
- FOR_EACH_VEC_ELT (edge, crossing_edges, i, e)
+ FOR_EACH_VEC_ELT (crossing_edges, i, e)
{
basic_block src = e->src;
basic_block dest = e->dest;
@@ -2500,7 +2500,7 @@ gate_handle_partition_blocks (void)
static unsigned
partition_hot_cold_basic_blocks (void)
{
- VEC(edge, heap) *crossing_edges;
+ vec<edge> crossing_edges;
if (n_basic_blocks <= NUM_FIXED_BLOCKS + 1)
return 0;
@@ -2508,7 +2508,7 @@ partition_hot_cold_basic_blocks (void)
df_set_flags (DF_DEFER_INSN_RESCAN);
crossing_edges = find_rarely_executed_basic_blocks_and_crossing_edges ();
- if (crossing_edges == NULL)
+ if (!crossing_edges.exists ())
return 0;
/* Make sure the source of any crossing edge ends in a jump and the
@@ -2539,7 +2539,7 @@ partition_hot_cold_basic_blocks (void)
/* Clear bb->aux fields that the above routines were using. */
clear_aux_for_blocks ();
- VEC_free (edge, heap, crossing_edges);
+ crossing_edges.release ();
/* ??? FIXME: DF generates the bb info for a block immediately.
And by immediately, I mean *during* creation of the block.
diff --git a/gcc/builtins.c b/gcc/builtins.c
index c309566919e..fbeac7517c6 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -6581,7 +6581,7 @@ expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
{
unsigned int nargs, z;
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
mode =
get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
@@ -6592,12 +6592,12 @@ expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
/* If this is turned into an external library call, the weak parameter
must be dropped to match the expected parameter list. */
nargs = call_expr_nargs (exp);
- vec = VEC_alloc (tree, gc, nargs - 1);
+ vec_alloc (vec, nargs - 1);
for (z = 0; z < 3; z++)
- VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
+ vec->quick_push (CALL_EXPR_ARG (exp, z));
/* Skip the boolean weak parameter. */
for (z = 4; z < 6; z++)
- VEC_quick_push (tree, vec, CALL_EXPR_ARG (exp, z));
+ vec->quick_push (CALL_EXPR_ARG (exp, z));
exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
break;
}
@@ -11206,10 +11206,10 @@ build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
VEC. */
tree
-build_call_expr_loc_vec (location_t loc, tree fndecl, VEC(tree,gc) *vec)
+build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
{
- return build_call_expr_loc_array (loc, fndecl, VEC_length (tree, vec),
- VEC_address (tree, vec));
+ return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
+ vec_safe_address (vec));
}
diff --git a/gcc/c-family/c-common.c b/gcc/c-family/c-common.c
index 7828d210507..fac71901e0f 100644
--- a/gcc/c-family/c-common.c
+++ b/gcc/c-family/c-common.c
@@ -1839,7 +1839,7 @@ strict_aliasing_warning (tree otype, tree type, tree expr)
void
sizeof_pointer_memaccess_warning (location_t *sizeof_arg_loc, tree callee,
- VEC(tree, gc) *params, tree *sizeof_arg,
+ vec<tree, va_gc> *params, tree *sizeof_arg,
bool (*comp_types) (tree, tree))
{
tree type, dest = NULL_TREE, src = NULL_TREE, tem;
@@ -1849,7 +1849,7 @@ sizeof_pointer_memaccess_warning (location_t *sizeof_arg_loc, tree callee,
if (TREE_CODE (callee) != FUNCTION_DECL
|| DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL
- || VEC_length (tree, params) <= 1)
+ || vec_safe_length (params) <= 1)
return;
switch (DECL_FUNCTION_CODE (callee))
@@ -1870,55 +1870,55 @@ sizeof_pointer_memaccess_warning (location_t *sizeof_arg_loc, tree callee,
case BUILT_IN_MEMCPY_CHK:
case BUILT_IN_MEMMOVE:
case BUILT_IN_MEMMOVE_CHK:
- if (VEC_length (tree, params) < 3)
+ if (params->length () < 3)
return;
- src = VEC_index (tree, params, 1);
- dest = VEC_index (tree, params, 0);
+ src = (*params)[1];
+ dest = (*params)[0];
idx = 2;
break;
case BUILT_IN_BCOPY:
- if (VEC_length (tree, params) < 3)
+ if (params->length () < 3)
return;
- src = VEC_index (tree, params, 0);
- dest = VEC_index (tree, params, 1);
+ src = (*params)[0];
+ dest = (*params)[1];
idx = 2;
break;
case BUILT_IN_MEMCMP:
case BUILT_IN_BCMP:
- if (VEC_length (tree, params) < 3)
+ if (params->length () < 3)
return;
- src = VEC_index (tree, params, 1);
- dest = VEC_index (tree, params, 0);
+ src = (*params)[1];
+ dest = (*params)[0];
idx = 2;
cmp = true;
break;
case BUILT_IN_MEMSET:
case BUILT_IN_MEMSET_CHK:
- if (VEC_length (tree, params) < 3)
+ if (params->length () < 3)
return;
- dest = VEC_index (tree, params, 0);
+ dest = (*params)[0];
idx = 2;
break;
case BUILT_IN_BZERO:
- dest = VEC_index (tree, params, 0);
+ dest = (*params)[0];
idx = 1;
break;
case BUILT_IN_STRNDUP:
- src = VEC_index (tree, params, 0);
+ src = (*params)[0];
strop = true;
idx = 1;
break;
case BUILT_IN_MEMCHR:
- if (VEC_length (tree, params) < 3)
+ if (params->length () < 3)
return;
- src = VEC_index (tree, params, 0);
+ src = (*params)[0];
idx = 2;
break;
case BUILT_IN_SNPRINTF:
case BUILT_IN_SNPRINTF_CHK:
case BUILT_IN_VSNPRINTF:
case BUILT_IN_VSNPRINTF_CHK:
- dest = VEC_index (tree, params, 0);
+ dest = (*params)[0];
idx = 1;
strop = true;
break;
@@ -8729,9 +8729,7 @@ handle_target_attribute (tree *node, tree name, tree args, int flags,
/* Arguments being collected for optimization. */
typedef const char *const_char_p; /* For DEF_VEC_P. */
-DEF_VEC_P(const_char_p);
-DEF_VEC_ALLOC_P(const_char_p, gc);
-static GTY(()) VEC(const_char_p, gc) *optimize_args;
+static GTY(()) vec<const_char_p, va_gc> *optimize_args;
/* Inner function to convert a TREE_LIST to argv string to parse the optimize
@@ -8752,8 +8750,8 @@ parse_optimize_options (tree args, bool attr_p)
/* Build up argv vector. Just in case the string is stored away, use garbage
collected strings. */
- VEC_truncate (const_char_p, optimize_args, 0);
- VEC_safe_push (const_char_p, gc, optimize_args, NULL);
+ vec_safe_truncate (optimize_args, 0);
+ vec_safe_push (optimize_args, (const char *) NULL);
for (ap = args; ap != NULL_TREE; ap = TREE_CHAIN (ap))
{
@@ -8763,7 +8761,7 @@ parse_optimize_options (tree args, bool attr_p)
{
char buffer[20];
sprintf (buffer, "-O%ld", (long) TREE_INT_CST_LOW (value));
- VEC_safe_push (const_char_p, gc, optimize_args, ggc_strdup (buffer));
+ vec_safe_push (optimize_args, ggc_strdup (buffer));
}
else if (TREE_CODE (value) == STRING_CST)
@@ -8825,17 +8823,17 @@ parse_optimize_options (tree args, bool attr_p)
memcpy (r, p, len2);
r[len2] = '\0';
- VEC_safe_push (const_char_p, gc, optimize_args, q);
+ vec_safe_push (optimize_args, (const char *) q);
}
}
}
- opt_argc = VEC_length (const_char_p, optimize_args);
+ opt_argc = optimize_args->length ();
opt_argv = (const char **) alloca (sizeof (char *) * (opt_argc + 1));
for (i = 1; i < opt_argc; i++)
- opt_argv[i] = VEC_index (const_char_p, optimize_args, i);
+ opt_argv[i] = (*optimize_args)[i];
saved_flag_strict_aliasing = flag_strict_aliasing;
@@ -8852,7 +8850,7 @@ parse_optimize_options (tree args, bool attr_p)
/* Don't allow changing -fstrict-aliasing. */
flag_strict_aliasing = saved_flag_strict_aliasing;
- VEC_truncate (const_char_p, optimize_args, 0);
+ optimize_args->truncate (0);
return ret;
}
@@ -9736,9 +9734,9 @@ complete_array_type (tree *ptype, tree initial_value, bool do_default)
}
else if (TREE_CODE (initial_value) == CONSTRUCTOR)
{
- VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (initial_value);
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (initial_value);
- if (VEC_empty (constructor_elt, v))
+ if (vec_safe_is_empty (v))
{
if (pedantic)
failure = 3;
@@ -9751,15 +9749,12 @@ complete_array_type (tree *ptype, tree initial_value, bool do_default)
constructor_elt *ce;
bool fold_p = false;
- if (VEC_index (constructor_elt, v, 0).index)
+ if ((*v)[0].index)
maxindex = fold_convert_loc (input_location, sizetype,
- VEC_index (constructor_elt,
- v, 0).index);
+ (*v)[0].index);
curindex = maxindex;
- for (cnt = 1;
- VEC_iterate (constructor_elt, v, cnt, ce);
- cnt++)
+ for (cnt = 1; vec_safe_iterate (v, cnt, &ce); cnt++)
{
bool curfold_p = false;
if (ce->index)
@@ -9879,18 +9874,18 @@ builtin_type_for_size (int size, bool unsignedp)
Returns 0 if an error is encountered. */
static int
-sync_resolve_size (tree function, VEC(tree,gc) *params)
+sync_resolve_size (tree function, vec<tree, va_gc> *params)
{
tree type;
int size;
- if (VEC_empty (tree, params))
+ if (!params)
{
error ("too few arguments to function %qE", function);
return 0;
}
- type = TREE_TYPE (VEC_index (tree, params, 0));
+ type = TREE_TYPE ((*params)[0]);
if (TREE_CODE (type) != POINTER_TYPE)
goto incompatible;
@@ -9914,7 +9909,7 @@ sync_resolve_size (tree function, VEC(tree,gc) *params)
static bool
sync_resolve_params (location_t loc, tree orig_function, tree function,
- VEC(tree, gc) *params, bool orig_format)
+ vec<tree, va_gc> *params, bool orig_format)
{
function_args_iterator iter;
tree ptype;
@@ -9925,7 +9920,7 @@ sync_resolve_params (location_t loc, tree orig_function, tree function,
as the pointer parameter, so we shouldn't get any complaints from the
call to check_function_arguments what ever type the user used. */
function_args_iter_next (&iter);
- ptype = TREE_TYPE (TREE_TYPE (VEC_index (tree, params, 0)));
+ ptype = TREE_TYPE (TREE_TYPE ((*params)[0]));
/* For the rest of the values, we need to cast these to FTYPE, so that we
don't get warnings for passing pointer types, etc. */
@@ -9940,7 +9935,7 @@ sync_resolve_params (location_t loc, tree orig_function, tree function,
break;
++parmnum;
- if (VEC_length (tree, params) <= parmnum)
+ if (params->length () <= parmnum)
{
error_at (loc, "too few arguments to function %qE", orig_function);
return false;
@@ -9956,17 +9951,17 @@ sync_resolve_params (location_t loc, tree orig_function, tree function,
/* Ideally for the first conversion we'd use convert_for_assignment
so that we get warnings for anything that doesn't match the pointer
type. This isn't portable across the C and C++ front ends atm. */
- val = VEC_index (tree, params, parmnum);
+ val = (*params)[parmnum];
val = convert (ptype, val);
val = convert (arg_type, val);
- VEC_replace (tree, params, parmnum, val);
+ (*params)[parmnum] = val;
}
function_args_iter_next (&iter);
}
/* __atomic routines are not variadic. */
- if (!orig_format && VEC_length (tree, params) != parmnum + 1)
+ if (!orig_format && params->length () != parmnum + 1)
{
error_at (loc, "too many arguments to function %qE", orig_function);
return false;
@@ -9976,7 +9971,7 @@ sync_resolve_params (location_t loc, tree orig_function, tree function,
being "an optional list of variables protected by the memory barrier".
No clue what that's supposed to mean, precisely, but we consider all
call-clobbered variables to be protected so we're safe. */
- VEC_truncate (tree, params, parmnum + 1);
+ params->truncate (parmnum + 1);
return true;
}
@@ -10004,7 +9999,8 @@ sync_resolve_return (tree first_param, tree result, bool orig_format)
0 is returned if the parameters are invalid. */
static int
-get_atomic_generic_size (location_t loc, tree function, VEC(tree,gc) *params)
+get_atomic_generic_size (location_t loc, tree function,
+ vec<tree, va_gc> *params)
{
unsigned int n_param;
unsigned int n_model;
@@ -10032,14 +10028,14 @@ get_atomic_generic_size (location_t loc, tree function, VEC(tree,gc) *params)
gcc_unreachable ();
}
- if (VEC_length (tree, params) != n_param)
+ if (vec_safe_length (params) != n_param)
{
error_at (loc, "incorrect number of arguments to function %qE", function);
return 0;
}
/* Get type of first parameter, and determine its size. */
- type_0 = TREE_TYPE (VEC_index (tree, params, 0));
+ type_0 = TREE_TYPE ((*params)[0]);
if (TREE_CODE (type_0) != POINTER_TYPE || VOID_TYPE_P (TREE_TYPE (type_0)))
{
error_at (loc, "argument 1 of %qE must be a non-void pointer type",
@@ -10071,7 +10067,7 @@ get_atomic_generic_size (location_t loc, tree function, VEC(tree,gc) *params)
for (x = 0; x < n_param - n_model; x++)
{
int size;
- tree type = TREE_TYPE (VEC_index (tree, params, x));
+ tree type = TREE_TYPE ((*params)[x]);
/* __atomic_compare_exchange has a bool in the 4th postion, skip it. */
if (n_param == 6 && x == 3)
continue;
@@ -10093,7 +10089,7 @@ get_atomic_generic_size (location_t loc, tree function, VEC(tree,gc) *params)
/* Check memory model parameters for validity. */
for (x = n_param - n_model ; x < n_param; x++)
{
- tree p = VEC_index (tree, params, x);
+ tree p = (*params)[x];
if (TREE_CODE (p) == INTEGER_CST)
{
int i = tree_low_cst (p, 1);
@@ -10126,30 +10122,30 @@ get_atomic_generic_size (location_t loc, tree function, VEC(tree,gc) *params)
static tree
add_atomic_size_parameter (unsigned n, location_t loc, tree function,
- VEC(tree,gc) *params)
+ vec<tree, va_gc> *params)
{
tree size_node;
/* Insert a SIZE_T parameter as the first param. If there isn't
enough space, allocate a new vector and recursively re-build with that. */
- if (!VEC_space (tree, params, 1))
+ if (!params->space (1))
{
unsigned int z, len;
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *v;
tree f;
- len = VEC_length (tree, params);
- vec = VEC_alloc (tree, gc, len + 1);
+ len = params->length ();
+ vec_alloc (v, len + 1);
for (z = 0; z < len; z++)
- VEC_quick_push (tree, vec, VEC_index (tree, params, z));
- f = build_function_call_vec (loc, function, vec, NULL);
- VEC_free (tree, gc, vec);
+ v->quick_push ((*params)[z]);
+ f = build_function_call_vec (loc, function, v, NULL);
+ vec_free (v);
return f;
}
/* Add the size parameter and leave as a function call for processing. */
size_node = build_int_cst (size_type_node, n);
- VEC_quick_insert (tree, params, 0, size_node);
+ params->quick_insert (0, size_node);
return NULL_TREE;
}
@@ -10165,7 +10161,7 @@ add_atomic_size_parameter (unsigned n, location_t loc, tree function,
NEW_RETURN is set to the the return value the result is copied into. */
static bool
resolve_overloaded_atomic_exchange (location_t loc, tree function,
- VEC(tree,gc) *params, tree *new_return)
+ vec<tree, va_gc> *params, tree *new_return)
{
tree p0, p1, p2, p3;
tree I_type, I_type_ptr;
@@ -10190,10 +10186,10 @@ resolve_overloaded_atomic_exchange (location_t loc, tree function,
into
*return = (T) (fn (In* mem, (In) *desired, model)) */
- p0 = VEC_index (tree, params, 0);
- p1 = VEC_index (tree, params, 1);
- p2 = VEC_index (tree, params, 2);
- p3 = VEC_index (tree, params, 3);
+ p0 = (*params)[0];
+ p1 = (*params)[1];
+ p2 = (*params)[2];
+ p3 = (*params)[3];
/* Create pointer to appropriate size. */
I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
@@ -10201,15 +10197,15 @@ resolve_overloaded_atomic_exchange (location_t loc, tree function,
/* Convert object pointer to required type. */
p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
- VEC_replace (tree, params, 0, p0);
+ (*params)[0] = p0;
/* Convert new value to required type, and dereference it. */
p1 = build_indirect_ref (loc, p1, RO_UNARY_STAR);
p1 = build1 (VIEW_CONVERT_EXPR, I_type, p1);
- VEC_replace (tree, params, 1, p1);
+ (*params)[1] = p1;
/* Move memory model to the 3rd position, and end param list. */
- VEC_replace (tree, params, 2, p3);
- VEC_truncate (tree, params, 3);
+ (*params)[2] = p3;
+ params->truncate (3);
/* Convert return pointer and dereference it for later assignment. */
*new_return = build_indirect_ref (loc, p2, RO_UNARY_STAR);
@@ -10229,7 +10225,7 @@ resolve_overloaded_atomic_exchange (location_t loc, tree function,
static bool
resolve_overloaded_atomic_compare_exchange (location_t loc, tree function,
- VEC(tree,gc) *params,
+ vec<tree, va_gc> *params,
tree *new_return)
{
tree p0, p1, p2;
@@ -10253,9 +10249,9 @@ resolve_overloaded_atomic_compare_exchange (location_t loc, tree function,
there is no danger this will be done twice. */
if (n > 0)
{
- VEC_replace (tree, params, 3, VEC_index (tree, params, 4));
- VEC_replace (tree, params, 4, VEC_index (tree, params, 5));
- VEC_truncate (tree, params, 5);
+ (*params)[3] = (*params)[4];
+ (*params)[4] = (*params)[5];
+ params->truncate (5);
}
*new_return = add_atomic_size_parameter (n, loc, function, params);
return true;
@@ -10266,9 +10262,9 @@ resolve_overloaded_atomic_compare_exchange (location_t loc, tree function,
into
bool fn ((In *)mem, (In *)expected, (In) *desired, weak, succ, fail) */
- p0 = VEC_index (tree, params, 0);
- p1 = VEC_index (tree, params, 1);
- p2 = VEC_index (tree, params, 2);
+ p0 = (*params)[0];
+ p1 = (*params)[1];
+ p2 = (*params)[2];
/* Create pointer to appropriate size. */
I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
@@ -10276,16 +10272,16 @@ resolve_overloaded_atomic_compare_exchange (location_t loc, tree function,
/* Convert object pointer to required type. */
p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
- VEC_replace (tree, params, 0, p0);
+ (*params)[0] = p0;
/* Convert expected pointer to required type. */
p1 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p1);
- VEC_replace (tree, params, 1, p1);
+ (*params)[1] = p1;
/* Convert desired value to required type, and dereference it. */
p2 = build_indirect_ref (loc, p2, RO_UNARY_STAR);
p2 = build1 (VIEW_CONVERT_EXPR, I_type, p2);
- VEC_replace (tree, params, 2, p2);
+ (*params)[2] = p2;
/* The rest of the parameters are fine. NULL means no special return value
processing.*/
@@ -10306,7 +10302,7 @@ resolve_overloaded_atomic_compare_exchange (location_t loc, tree function,
static bool
resolve_overloaded_atomic_load (location_t loc, tree function,
- VEC(tree,gc) *params, tree *new_return)
+ vec<tree, va_gc> *params, tree *new_return)
{
tree p0, p1, p2;
tree I_type, I_type_ptr;
@@ -10331,9 +10327,9 @@ resolve_overloaded_atomic_load (location_t loc, tree function,
into
*return = (T) (fn ((In *) mem, model)) */
- p0 = VEC_index (tree, params, 0);
- p1 = VEC_index (tree, params, 1);
- p2 = VEC_index (tree, params, 2);
+ p0 = (*params)[0];
+ p1 = (*params)[1];
+ p2 = (*params)[2];
/* Create pointer to appropriate size. */
I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
@@ -10341,11 +10337,11 @@ resolve_overloaded_atomic_load (location_t loc, tree function,
/* Convert object pointer to required type. */
p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
- VEC_replace (tree, params, 0, p0);
+ (*params)[0] = p0;
/* Move memory model to the 2nd position, and end param list. */
- VEC_replace (tree, params, 1, p2);
- VEC_truncate (tree, params, 2);
+ (*params)[1] = p2;
+ params->truncate (2);
/* Convert return pointer and dereference it for later assignment. */
*new_return = build_indirect_ref (loc, p1, RO_UNARY_STAR);
@@ -10366,7 +10362,7 @@ resolve_overloaded_atomic_load (location_t loc, tree function,
static bool
resolve_overloaded_atomic_store (location_t loc, tree function,
- VEC(tree,gc) *params, tree *new_return)
+ vec<tree, va_gc> *params, tree *new_return)
{
tree p0, p1;
tree I_type, I_type_ptr;
@@ -10391,8 +10387,8 @@ resolve_overloaded_atomic_store (location_t loc, tree function,
into
fn ((In *) mem, (In) *value, model) */
- p0 = VEC_index (tree, params, 0);
- p1 = VEC_index (tree, params, 1);
+ p0 = (*params)[0];
+ p1 = (*params)[1];
/* Create pointer to appropriate size. */
I_type = builtin_type_for_size (BITS_PER_UNIT * n, 1);
@@ -10400,12 +10396,12 @@ resolve_overloaded_atomic_store (location_t loc, tree function,
/* Convert object pointer to required type. */
p0 = build1 (VIEW_CONVERT_EXPR, I_type_ptr, p0);
- VEC_replace (tree, params, 0, p0);
+ (*params)[0] = p0;
/* Convert new value to required type, and dereference it. */
p1 = build_indirect_ref (loc, p1, RO_UNARY_STAR);
p1 = build1 (VIEW_CONVERT_EXPR, I_type, p1);
- VEC_replace (tree, params, 1, p1);
+ (*params)[1] = p1;
/* The memory model is in the right spot already. Return is void. */
*new_return = NULL_TREE;
@@ -10426,7 +10422,8 @@ resolve_overloaded_atomic_store (location_t loc, tree function,
continue. */
tree
-resolve_overloaded_builtin (location_t loc, tree function, VEC(tree,gc) *params)
+resolve_overloaded_builtin (location_t loc, tree function,
+ vec<tree, va_gc> *params)
{
enum built_in_function orig_code = DECL_FUNCTION_CODE (function);
bool orig_format = true;
@@ -10550,7 +10547,7 @@ resolve_overloaded_builtin (location_t loc, tree function, VEC(tree,gc) *params)
orig_format))
return error_mark_node;
- first_param = VEC_index (tree, params, 0);
+ first_param = (*params)[0];
result = build_function_call_vec (loc, new_function, params, NULL);
if (result == error_mark_node)
return result;
@@ -11097,9 +11094,9 @@ record_types_used_by_current_var_decl (tree decl)
{
gcc_assert (decl && DECL_P (decl) && TREE_STATIC (decl));
- while (!VEC_empty (tree, types_used_by_cur_var_decl))
+ while (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
{
- tree type = VEC_pop (tree, types_used_by_cur_var_decl);
+ tree type = types_used_by_cur_var_decl->pop ();
types_used_by_var_decl_insert (type, decl);
}
}
@@ -11121,7 +11118,7 @@ record_locally_defined_typedef (tree decl)
return;
l = (struct c_language_function *) cfun->language;
- VEC_safe_push (tree, gc, l->local_typedefs, decl);
+ vec_safe_push (l->local_typedefs, decl);
}
/* If T is a TYPE_DECL declared locally, mark it as used. */
@@ -11159,7 +11156,7 @@ maybe_warn_unused_local_typedefs (void)
if (warn_unused_local_typedefs
&& errorcount == unused_local_typedefs_warn_count)
{
- FOR_EACH_VEC_ELT (tree, l->local_typedefs, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (l->local_typedefs, i, decl)
if (!TREE_USED (decl))
warning_at (DECL_SOURCE_LOCATION (decl),
OPT_Wunused_local_typedefs,
@@ -11167,86 +11164,82 @@ maybe_warn_unused_local_typedefs (void)
unused_local_typedefs_warn_count = errorcount;
}
- if (l->local_typedefs)
- {
- VEC_free (tree, gc, l->local_typedefs);
- l->local_typedefs = NULL;
- }
+ vec_free (l->local_typedefs);
}
/* The C and C++ parsers both use vectors to hold function arguments.
For efficiency, we keep a cache of unused vectors. This is the
cache. */
-typedef VEC(tree,gc)* tree_gc_vec;
-DEF_VEC_P(tree_gc_vec);
-DEF_VEC_ALLOC_P(tree_gc_vec,gc);
-static GTY((deletable)) VEC(tree_gc_vec,gc) *tree_vector_cache;
+typedef vec<tree, va_gc> *tree_gc_vec;
+static GTY((deletable)) vec<tree_gc_vec, va_gc> *tree_vector_cache;
/* Return a new vector from the cache. If the cache is empty,
allocate a new vector. These vectors are GC'ed, so it is OK if the
pointer is not released.. */
-VEC(tree,gc) *
+vec<tree, va_gc> *
make_tree_vector (void)
{
- if (!VEC_empty (tree_gc_vec, tree_vector_cache))
- return VEC_pop (tree_gc_vec, tree_vector_cache);
+ if (tree_vector_cache && !tree_vector_cache->is_empty ())
+ return tree_vector_cache->pop ();
else
{
- /* Passing 0 to VEC_alloc returns NULL, and our callers require
+ /* Passing 0 to vec::alloc returns NULL, and our callers require
that we always return a non-NULL value. The vector code uses
4 when growing a NULL vector, so we do too. */
- return VEC_alloc (tree, gc, 4);
+ vec<tree, va_gc> *v;
+ vec_alloc (v, 4);
+ return v;
}
}
/* Release a vector of trees back to the cache. */
void
-release_tree_vector (VEC(tree,gc) *vec)
+release_tree_vector (vec<tree, va_gc> *vec)
{
if (vec != NULL)
{
- VEC_truncate (tree, vec, 0);
- VEC_safe_push (tree_gc_vec, gc, tree_vector_cache, vec);
+ vec->truncate (0);
+ vec_safe_push (tree_vector_cache, vec);
}
}
/* Get a new tree vector holding a single tree. */
-VEC(tree,gc) *
+vec<tree, va_gc> *
make_tree_vector_single (tree t)
{
- VEC(tree,gc) *ret = make_tree_vector ();
- VEC_quick_push (tree, ret, t);
+ vec<tree, va_gc> *ret = make_tree_vector ();
+ ret->quick_push (t);
return ret;
}
/* Get a new tree vector of the TREE_VALUEs of a TREE_LIST chain. */
-VEC(tree,gc) *
+vec<tree, va_gc> *
make_tree_vector_from_list (tree list)
{
- VEC(tree,gc) *ret = make_tree_vector ();
+ vec<tree, va_gc> *ret = make_tree_vector ();
for (; list; list = TREE_CHAIN (list))
- VEC_safe_push (tree, gc, ret, TREE_VALUE (list));
+ vec_safe_push (ret, TREE_VALUE (list));
return ret;
}
/* Get a new tree vector which is a copy of an existing one. */
-VEC(tree,gc) *
-make_tree_vector_copy (const VEC(tree,gc) *orig)
+vec<tree, va_gc> *
+make_tree_vector_copy (const vec<tree, va_gc> *orig)
{
- VEC(tree,gc) *ret;
+ vec<tree, va_gc> *ret;
unsigned int ix;
tree t;
ret = make_tree_vector ();
- VEC_reserve (tree, gc, ret, VEC_length (tree, orig));
- FOR_EACH_VEC_ELT (tree, orig, ix, t)
- VEC_quick_push (tree, ret, t);
+ vec_safe_reserve (ret, vec_safe_length (orig));
+ FOR_EACH_VEC_SAFE_ELT (orig, ix, t)
+ ret->quick_push (t);
return ret;
}
diff --git a/gcc/c-family/c-common.h b/gcc/c-family/c-common.h
index 5c545f2950f..afd8f07e72f 100644
--- a/gcc/c-family/c-common.h
+++ b/gcc/c-family/c-common.h
@@ -486,7 +486,7 @@ typedef enum ref_operator {
struct GTY(()) stmt_tree_s {
/* A stack of statement lists being collected. */
- VEC(tree,gc) *x_cur_stmt_list;
+ vec<tree, va_gc> *x_cur_stmt_list;
/* In C++, Nonzero if we should treat statements as full
expressions. In particular, this variable is non-zero if at the
@@ -512,20 +512,16 @@ struct GTY(()) c_language_function {
/* Vector of locally defined typedefs, for
-Wunused-local-typedefs. */
- VEC(tree,gc) *local_typedefs;
+ vec<tree, va_gc> *local_typedefs;
};
#define stmt_list_stack (current_stmt_tree ()->x_cur_stmt_list)
/* When building a statement-tree, this is the current statement list
- being collected. We define it in this convoluted way, rather than
- using VEC_last, because it must be an lvalue. */
+ being collected. */
+#define cur_stmt_list (stmt_list_stack->last ())
-#define cur_stmt_list \
- (*(VEC_address (tree, stmt_list_stack) \
- + VEC_length (tree, stmt_list_stack) - 1))
-
-#define building_stmt_list_p() (!VEC_empty (tree, stmt_list_stack))
+#define building_stmt_list_p() (stmt_list_stack && !stmt_list_stack->is_empty())
/* Language-specific hooks. */
@@ -759,7 +755,7 @@ extern void constant_expression_warning (tree);
extern void constant_expression_error (tree);
extern bool strict_aliasing_warning (tree, tree, tree);
extern void sizeof_pointer_memaccess_warning (location_t *, tree,
- VEC(tree, gc) *, tree *,
+ vec<tree, va_gc> *, tree *,
bool (*) (tree, tree));
extern void warnings_for_convert_and_check (tree, tree, tree);
extern tree convert_and_check (tree, tree);
@@ -899,10 +895,10 @@ extern void c_do_switch_warnings (splay_tree, location_t, tree, tree);
extern tree build_function_call (location_t, tree, tree);
-extern tree build_function_call_vec (location_t, tree,
- VEC(tree,gc) *, VEC(tree,gc) *);
+extern tree build_function_call_vec (location_t, tree, vec<tree, va_gc> *,
+ vec<tree, va_gc> *);
-extern tree resolve_overloaded_builtin (location_t, tree, VEC(tree,gc) *);
+extern tree resolve_overloaded_builtin (location_t, tree, vec<tree, va_gc> *);
extern tree finish_label_address_expr (tree, location_t);
@@ -997,11 +993,11 @@ extern void set_underlying_type (tree);
extern void record_locally_defined_typedef (tree);
extern void maybe_record_typedef_use (tree);
extern void maybe_warn_unused_local_typedefs (void);
-extern VEC(tree,gc) *make_tree_vector (void);
-extern void release_tree_vector (VEC(tree,gc) *);
-extern VEC(tree,gc) *make_tree_vector_single (tree);
-extern VEC(tree,gc) *make_tree_vector_from_list (tree);
-extern VEC(tree,gc) *make_tree_vector_copy (const VEC(tree,gc) *);
+extern vec<tree, va_gc> *make_tree_vector (void);
+extern void release_tree_vector (vec<tree, va_gc> *);
+extern vec<tree, va_gc> *make_tree_vector_single (tree);
+extern vec<tree, va_gc> *make_tree_vector_from_list (tree);
+extern vec<tree, va_gc> *make_tree_vector_copy (const vec<tree, va_gc> *);
/* In c-gimplify.c */
extern void c_genericize (tree);
diff --git a/gcc/c-family/c-gimplify.c b/gcc/c-family/c-gimplify.c
index 821c5d5d1ff..27814e1a1fb 100644
--- a/gcc/c-family/c-gimplify.c
+++ b/gcc/c-family/c-gimplify.c
@@ -108,9 +108,9 @@ add_block_to_enclosing (tree block)
unsigned i;
tree enclosing;
gimple bind;
- VEC(gimple, heap) *stack = gimple_bind_expr_stack ();
+ vec<gimple> stack = gimple_bind_expr_stack ();
- FOR_EACH_VEC_ELT (gimple, stack, i, bind)
+ FOR_EACH_VEC_ELT (stack, i, bind)
if (gimple_bind_block (bind))
break;
diff --git a/gcc/c-family/c-pragma.c b/gcc/c-family/c-pragma.c
index 70d8748ece9..f04cc6f13f9 100644
--- a/gcc/c-family/c-pragma.c
+++ b/gcc/c-family/c-pragma.c
@@ -33,7 +33,6 @@ along with GCC; see the file COPYING3. If not see
#include "tm_p.h" /* For REGISTER_TARGET_PRAGMAS (why is
this not a target hook?). */
#include "vec.h"
-#include "vecprim.h"
#include "target.h"
#include "diagnostic.h"
#include "opts.h"
@@ -241,10 +240,8 @@ typedef struct GTY(()) pending_weak_d
tree value;
} pending_weak;
-DEF_VEC_O(pending_weak);
-DEF_VEC_ALLOC_O(pending_weak,gc);
-static GTY(()) VEC(pending_weak,gc) *pending_weaks;
+static GTY(()) vec<pending_weak, va_gc> *pending_weaks;
static void apply_pragma_weak (tree, tree);
static void handle_pragma_weak (cpp_reader *);
@@ -294,11 +291,11 @@ maybe_apply_pragma_weak (tree decl)
id = DECL_ASSEMBLER_NAME (decl);
- FOR_EACH_VEC_ELT (pending_weak, pending_weaks, i, pe)
+ FOR_EACH_VEC_ELT (*pending_weaks, i, pe)
if (id == pe->name)
{
apply_pragma_weak (decl, pe->value);
- VEC_unordered_remove (pending_weak, pending_weaks, i);
+ pending_weaks->unordered_remove (i);
break;
}
}
@@ -313,7 +310,10 @@ maybe_apply_pending_pragma_weaks (void)
pending_weak *pe;
symtab_node target;
- FOR_EACH_VEC_ELT (pending_weak, pending_weaks, i, pe)
+ if (!pending_weaks)
+ return;
+
+ FOR_EACH_VEC_ELT (*pending_weaks, i, pe)
{
alias_id = pe->name;
id = pe->value;
@@ -373,7 +373,7 @@ handle_pragma_weak (cpp_reader * ARG_UNUSED (dummy))
else
{
pending_weak pe = {name, value};
- VEC_safe_push (pending_weak, gc, pending_weaks, pe);
+ vec_safe_push (pending_weaks, pe);
}
}
@@ -414,10 +414,8 @@ typedef struct GTY(()) pending_redefinition_d {
tree newname;
} pending_redefinition;
-DEF_VEC_O(pending_redefinition);
-DEF_VEC_ALLOC_O(pending_redefinition,gc);
-static GTY(()) VEC(pending_redefinition,gc) *pending_redefine_extname;
+static GTY(()) vec<pending_redefinition, va_gc> *pending_redefine_extname;
static void handle_pragma_redefine_extname (cpp_reader *);
@@ -488,7 +486,7 @@ add_to_renaming_pragma_list (tree oldname, tree newname)
unsigned ix;
pending_redefinition *p;
- FOR_EACH_VEC_ELT (pending_redefinition, pending_redefine_extname, ix, p)
+ FOR_EACH_VEC_SAFE_ELT (pending_redefine_extname, ix, p)
if (oldname == p->oldname)
{
if (p->newname != newname)
@@ -498,7 +496,7 @@ add_to_renaming_pragma_list (tree oldname, tree newname)
}
pending_redefinition e = {oldname, newname};
- VEC_safe_push (pending_redefinition, gc, pending_redefine_extname, e);
+ vec_safe_push (pending_redefine_extname, e);
}
/* The current prefix set by #pragma extern_prefix. */
@@ -532,7 +530,7 @@ maybe_apply_renaming_pragma (tree decl, tree asmname)
"conflict with previous rename");
/* Take any pending redefine_extname off the list. */
- FOR_EACH_VEC_ELT (pending_redefinition, pending_redefine_extname, ix, p)
+ FOR_EACH_VEC_SAFE_ELT (pending_redefine_extname, ix, p)
if (DECL_NAME (decl) == p->oldname)
{
/* Only warn if there is a conflict. */
@@ -540,20 +538,18 @@ maybe_apply_renaming_pragma (tree decl, tree asmname)
warning (OPT_Wpragmas, "#pragma redefine_extname ignored due to "
"conflict with previous rename");
- VEC_unordered_remove (pending_redefinition,
- pending_redefine_extname, ix);
+ pending_redefine_extname->unordered_remove (ix);
break;
}
return 0;
}
/* Find out if we have a pending #pragma redefine_extname. */
- FOR_EACH_VEC_ELT (pending_redefinition, pending_redefine_extname, ix, p)
+ FOR_EACH_VEC_SAFE_ELT (pending_redefine_extname, ix, p)
if (DECL_NAME (decl) == p->oldname)
{
tree newname = p->newname;
- VEC_unordered_remove (pending_redefinition,
- pending_redefine_extname, ix);
+ pending_redefine_extname->unordered_remove (ix);
/* If we already have an asmname, #pragma redefine_extname is
ignored (with a warning if it conflicts). */
@@ -600,7 +596,7 @@ maybe_apply_renaming_pragma (tree decl, tree asmname)
static void handle_pragma_visibility (cpp_reader *);
-static VEC (int, heap) *visstack;
+static vec<int> visstack;
/* Push the visibility indicated by STR onto the top of the #pragma
visibility stack. KIND is 0 for #pragma GCC visibility, 1 for
@@ -612,8 +608,7 @@ static VEC (int, heap) *visstack;
void
push_visibility (const char *str, int kind)
{
- VEC_safe_push (int, heap, visstack,
- ((int) default_visibility) | (kind << 8));
+ visstack.safe_push (((int) default_visibility) | (kind << 8));
if (!strcmp (str, "default"))
default_visibility = VISIBILITY_DEFAULT;
else if (!strcmp (str, "internal"))
@@ -633,14 +628,14 @@ push_visibility (const char *str, int kind)
bool
pop_visibility (int kind)
{
- if (!VEC_length (int, visstack))
+ if (!visstack.length ())
return false;
- if ((VEC_last (int, visstack) >> 8) != kind)
+ if ((visstack.last () >> 8) != kind)
return false;
default_visibility
- = (enum symbol_visibility) (VEC_pop (int, visstack) & 0xff);
+ = (enum symbol_visibility) (visstack.pop () & 0xff);
visibility_options.inpragma
- = VEC_length (int, visstack) != 0;
+ = visstack.length () != 0;
return true;
}
@@ -1152,10 +1147,8 @@ handle_pragma_float_const_decimal64 (cpp_reader *ARG_UNUSED (dummy))
}
/* A vector of registered pragma callbacks, which is never freed. */
-DEF_VEC_O (internal_pragma_handler);
-DEF_VEC_ALLOC_O (internal_pragma_handler, heap);
-static VEC(internal_pragma_handler, heap) *registered_pragmas;
+static vec<internal_pragma_handler> registered_pragmas;
typedef struct
{
@@ -1163,10 +1156,8 @@ typedef struct
const char *name;
} pragma_ns_name;
-DEF_VEC_O (pragma_ns_name);
-DEF_VEC_ALLOC_O (pragma_ns_name, heap);
-static VEC(pragma_ns_name, heap) *registered_pp_pragmas;
+static vec<pragma_ns_name> registered_pp_pragmas;
struct omp_pragma_def { const char *name; unsigned int id; };
static const struct omp_pragma_def omp_pragmas[] = {
@@ -1202,13 +1193,10 @@ c_pp_lookup_pragma (unsigned int id, const char **space, const char **name)
}
if (id >= PRAGMA_FIRST_EXTERNAL
- && (id < PRAGMA_FIRST_EXTERNAL
- + VEC_length (pragma_ns_name, registered_pp_pragmas)))
+ && (id < PRAGMA_FIRST_EXTERNAL + registered_pp_pragmas.length ()))
{
- *space = VEC_index (pragma_ns_name, registered_pp_pragmas,
- id - PRAGMA_FIRST_EXTERNAL).space;
- *name = VEC_index (pragma_ns_name, registered_pp_pragmas,
- id - PRAGMA_FIRST_EXTERNAL).name;
+ *space = registered_pp_pragmas[id - PRAGMA_FIRST_EXTERNAL].space;
+ *name = registered_pp_pragmas[id - PRAGMA_FIRST_EXTERNAL].name;
return;
}
@@ -1233,15 +1221,14 @@ c_register_pragma_1 (const char *space, const char *name,
ns_name.space = space;
ns_name.name = name;
- VEC_safe_push (pragma_ns_name, heap, registered_pp_pragmas, ns_name);
- id = VEC_length (pragma_ns_name, registered_pp_pragmas);
+ registered_pp_pragmas.safe_push (ns_name);
+ id = registered_pp_pragmas.length ();
id += PRAGMA_FIRST_EXTERNAL - 1;
}
else
{
- VEC_safe_push (internal_pragma_handler, heap, registered_pragmas,
- ihandler);
- id = VEC_length (internal_pragma_handler, registered_pragmas);
+ registered_pragmas.safe_push (ihandler);
+ id = registered_pragmas.length ();
id += PRAGMA_FIRST_EXTERNAL - 1;
/* The C++ front end allocates 6 bits in cp_token; the C front end
@@ -1331,7 +1318,7 @@ c_invoke_pragma_handler (unsigned int id)
pragma_handler_2arg handler_2arg;
id -= PRAGMA_FIRST_EXTERNAL;
- ihandler = &VEC_index (internal_pragma_handler, registered_pragmas, id);
+ ihandler = &registered_pragmas[id];
if (ihandler->extra_data)
{
handler_2arg = ihandler->handler.handler_2arg;
diff --git a/gcc/c-family/c-pretty-print.c b/gcc/c-family/c-pretty-print.c
index edeccce7a12..c8df1acf29f 100644
--- a/gcc/c-family/c-pretty-print.c
+++ b/gcc/c-family/c-pretty-print.c
@@ -1662,7 +1662,7 @@ pp_c_expression_list (c_pretty_printer *pp, tree e)
/* Print out V, which contains the elements of a constructor. */
void
-pp_c_constructor_elts (c_pretty_printer *pp, VEC(constructor_elt,gc) *v)
+pp_c_constructor_elts (c_pretty_printer *pp, vec<constructor_elt, va_gc> *v)
{
unsigned HOST_WIDE_INT ix;
tree value;
@@ -1670,7 +1670,7 @@ pp_c_constructor_elts (c_pretty_printer *pp, VEC(constructor_elt,gc) *v)
FOR_EACH_CONSTRUCTOR_VALUE (v, ix, value)
{
pp_expression (pp, value);
- if (ix != VEC_length (constructor_elt, v) - 1)
+ if (ix != vec_safe_length (v) - 1)
pp_separate_with (pp, ',');
}
}
diff --git a/gcc/c-family/c-pretty-print.h b/gcc/c-family/c-pretty-print.h
index 2f9f94af183..801663c3fc5 100644
--- a/gcc/c-family/c-pretty-print.h
+++ b/gcc/c-family/c-pretty-print.h
@@ -198,7 +198,7 @@ void pp_c_statement (c_pretty_printer *, tree);
void pp_c_expression (c_pretty_printer *, tree);
void pp_c_logical_or_expression (c_pretty_printer *, tree);
void pp_c_expression_list (c_pretty_printer *, tree);
-void pp_c_constructor_elts (c_pretty_printer *, VEC(constructor_elt,gc) *);
+void pp_c_constructor_elts (c_pretty_printer *, vec<constructor_elt, va_gc> *);
void pp_c_call_argument_list (c_pretty_printer *, tree);
void pp_c_unary_expression (c_pretty_printer *, tree);
void pp_c_cast_expression (c_pretty_printer *, tree);
diff --git a/gcc/c-family/c-semantics.c b/gcc/c-family/c-semantics.c
index 1a21ec17f83..dca7ec93add 100644
--- a/gcc/c-family/c-semantics.c
+++ b/gcc/c-family/c-semantics.c
@@ -37,7 +37,7 @@ push_stmt_list (void)
{
tree t;
t = alloc_stmt_list ();
- VEC_safe_push (tree, gc, stmt_list_stack, t);
+ vec_safe_push (stmt_list_stack, t);
return t;
}
@@ -52,10 +52,10 @@ pop_stmt_list (tree t)
nestings will be due to outstanding cleanups. */
while (1)
{
- u = VEC_pop (tree, stmt_list_stack);
- if (!VEC_empty (tree, stmt_list_stack))
+ u = stmt_list_stack->pop ();
+ if (!stmt_list_stack->is_empty ())
{
- tree x = VEC_last (tree, stmt_list_stack);
+ tree x = stmt_list_stack->last ();
STATEMENT_LIST_HAS_LABEL (x) |= STATEMENT_LIST_HAS_LABEL (u);
}
if (t == u)
diff --git a/gcc/c/ChangeLog b/gcc/c/ChangeLog
index cd3a457d953..e6ef408a697 100644
--- a/gcc/c/ChangeLog
+++ b/gcc/c/ChangeLog
@@ -1,3 +1,19 @@
+2012-11-16 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * c-common.c: Use new vec API in vec.h.
+ * c-common.h: Likewise.
+ * c-gimplify.c: Likewise.
+ * c-pragma.c: Likewise.
+ * c-pretty-print.c: Likewise.
+ * c-pretty-print.h: Likewise.
+ * c-semantics.c: Likewise.
+ * c-decl.c: Likewise.
+ * c-parser.c: Likewise.
+ * c-tree.h: Likewise.
+ * c-typeck.c: Likewise.
+
2012-10-29 Jonathan Wakely <jwakely.gcc@gmail.com>
PR c++/54930
diff --git a/gcc/c/c-decl.c b/gcc/c/c-decl.c
index f03146690e1..b1c88bdd96e 100644
--- a/gcc/c/c-decl.c
+++ b/gcc/c/c-decl.c
@@ -286,8 +286,6 @@ struct GTY(()) c_goto_bindings {
};
typedef struct c_goto_bindings *c_goto_bindings_p;
-DEF_VEC_P(c_goto_bindings_p);
-DEF_VEC_ALLOC_P(c_goto_bindings_p,gc);
/* The additional information we keep track of for a label binding.
These fields are updated as scopes are popped. */
@@ -302,11 +300,11 @@ struct GTY(()) c_label_vars {
warn if a goto branches to this label from later in the function.
Decls are added to this list as scopes are popped. We only add
the decls that matter. */
- VEC(tree,gc) *decls_in_scope;
+ vec<tree, va_gc> *decls_in_scope;
/* A list of goto statements to this label. This is only used for
goto statements seen before the label was defined, so that we can
issue appropriate warnings for them. */
- VEC(c_goto_bindings_p,gc) *gotos;
+ vec<c_goto_bindings_p, va_gc> *gotos;
};
/* Each c_scope structure describes the complete contents of one
@@ -496,11 +494,9 @@ static bool keep_next_level_flag;
static bool next_is_function_body;
-/* A VEC of pointers to c_binding structures. */
+/* A vector of pointers to c_binding structures. */
typedef struct c_binding *c_binding_ptr;
-DEF_VEC_P(c_binding_ptr);
-DEF_VEC_ALLOC_P(c_binding_ptr,heap);
/* Information that we keep for a struct or union while it is being
parsed. */
@@ -509,15 +505,15 @@ struct c_struct_parse_info
{
/* If warn_cxx_compat, a list of types defined within this
struct. */
- VEC(tree,heap) *struct_types;
+ vec<tree> struct_types;
/* If warn_cxx_compat, a list of field names which have bindings,
and which are defined in this struct, but which are not defined
in any enclosing struct. This is used to clear the in_struct
field of the c_bindings structure. */
- VEC(c_binding_ptr,heap) *fields;
+ vec<c_binding_ptr> fields;
/* If warn_cxx_compat, a list of typedef names used when defining
fields in this struct. */
- VEC(tree,heap) *typedefs_seen;
+ vec<tree> typedefs_seen;
};
/* Information for the struct or union currently being parsed, or
@@ -1019,16 +1015,14 @@ update_label_decls (struct c_scope *scope)
of B1, if any. Save it to issue a
warning if needed. */
if (decl_jump_unsafe (b1->decl))
- VEC_safe_push (tree, gc,
- label_vars->decls_in_scope,
- b1->decl);
+ vec_safe_push(label_vars->decls_in_scope, b1->decl);
}
}
}
/* Update the bindings of any goto statements associated
with this label. */
- FOR_EACH_VEC_ELT (c_goto_bindings_p, label_vars->gotos, ix, g)
+ FOR_EACH_VEC_SAFE_ELT (label_vars->gotos, ix, g)
update_spot_bindings (scope, &g->goto_bindings);
}
}
@@ -1375,7 +1369,7 @@ c_bindings_start_stmt_expr (struct c_spot_bindings* switch_bindings)
continue;
label_vars = b->u.label;
++label_vars->label_bindings.stmt_exprs;
- FOR_EACH_VEC_ELT (c_goto_bindings_p, label_vars->gotos, ix, g)
+ FOR_EACH_VEC_SAFE_ELT (label_vars->gotos, ix, g)
++g->goto_bindings.stmt_exprs;
}
}
@@ -1413,7 +1407,7 @@ c_bindings_end_stmt_expr (struct c_spot_bindings *switch_bindings)
label_vars->label_bindings.left_stmt_expr = true;
label_vars->label_bindings.stmt_exprs = 0;
}
- FOR_EACH_VEC_ELT (c_goto_bindings_p, label_vars->gotos, ix, g)
+ FOR_EACH_VEC_SAFE_ELT (label_vars->gotos, ix, g)
{
--g->goto_bindings.stmt_exprs;
if (g->goto_bindings.stmt_exprs < 0)
@@ -3056,7 +3050,7 @@ make_label (location_t location, tree name, bool defining,
label_vars->shadowed = NULL;
set_spot_bindings (&label_vars->label_bindings, defining);
label_vars->decls_in_scope = make_tree_vector ();
- label_vars->gotos = VEC_alloc (c_goto_bindings_p, gc, 0);
+ label_vars->gotos = NULL;
*p_label_vars = label_vars;
return label;
@@ -3153,7 +3147,7 @@ lookup_label_for_goto (location_t loc, tree name)
g = ggc_alloc_c_goto_bindings ();
g->loc = loc;
set_spot_bindings (&g->goto_bindings, true);
- VEC_safe_push (c_goto_bindings_p, gc, label_vars->gotos, g);
+ vec_safe_push (label_vars->gotos, g);
return label;
}
@@ -3165,7 +3159,7 @@ lookup_label_for_goto (location_t loc, tree name)
...
goto lab;
Issue a warning or error. */
- FOR_EACH_VEC_ELT (tree, label_vars->decls_in_scope, ix, decl)
+ FOR_EACH_VEC_SAFE_ELT (label_vars->decls_in_scope, ix, decl)
warn_about_goto (loc, label, decl);
if (label_vars->label_bindings.left_stmt_expr)
@@ -3217,7 +3211,7 @@ check_earlier_gotos (tree label, struct c_label_vars* label_vars)
unsigned int ix;
struct c_goto_bindings *g;
- FOR_EACH_VEC_ELT (c_goto_bindings_p, label_vars->gotos, ix, g)
+ FOR_EACH_VEC_SAFE_ELT (label_vars->gotos, ix, g)
{
struct c_binding *b;
struct c_scope *scope;
@@ -3267,7 +3261,7 @@ check_earlier_gotos (tree label, struct c_label_vars* label_vars)
/* Now that the label is defined, we will issue warnings about
subsequent gotos to this label when we see them. */
- VEC_truncate (c_goto_bindings_p, label_vars->gotos, 0);
+ vec_safe_truncate (label_vars->gotos, 0);
label_vars->gotos = NULL;
}
@@ -3939,10 +3933,10 @@ add_flexible_array_elts_to_size (tree decl, tree init)
{
tree elt, type;
- if (VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (init)))
+ if (vec_safe_is_empty (CONSTRUCTOR_ELTS (init)))
return;
- elt = VEC_last (constructor_elt, CONSTRUCTOR_ELTS (init)).value;
+ elt = CONSTRUCTOR_ELTS (init)->last ().value;
type = TREE_TYPE (elt);
if (TREE_CODE (type) == ARRAY_TYPE
&& TYPE_SIZE (type) == NULL_TREE
@@ -4509,15 +4503,15 @@ finish_decl (tree decl, location_t init_loc, tree init,
tree cleanup_id = TREE_VALUE (TREE_VALUE (attr));
tree cleanup_decl = lookup_name (cleanup_id);
tree cleanup;
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *v;
/* Build "cleanup(&decl)" for the destructor. */
cleanup = build_unary_op (input_location, ADDR_EXPR, decl, 0);
- vec = VEC_alloc (tree, gc, 1);
- VEC_quick_push (tree, vec, cleanup);
+ vec_alloc (v, 1);
+ v->quick_push (cleanup);
cleanup = build_function_call_vec (DECL_SOURCE_LOCATION (decl),
- cleanup_decl, vec, NULL);
- VEC_free (tree, gc, vec);
+ cleanup_decl, v, NULL);
+ vec_free (v);
/* Don't warn about decl unused; the cleanup uses it. */
TREE_USED (decl) = 1;
@@ -5656,7 +5650,7 @@ grokdeclarator (const struct c_declarator *declarator,
c_arg_tag *tag;
unsigned ix;
- FOR_EACH_VEC_ELT_REVERSE (c_arg_tag, arg_info->tags, ix, tag)
+ FOR_EACH_VEC_SAFE_ELT_REVERSE (arg_info->tags, ix, tag)
TYPE_CONTEXT (tag->type) = type;
}
break;
@@ -6398,7 +6392,7 @@ get_parm_info (bool ellipsis, tree expr)
struct c_arg_info *arg_info = build_arg_info ();
tree parms = 0;
- VEC(c_arg_tag,gc) *tags = NULL;
+ vec<c_arg_tag, va_gc> *tags = NULL;
tree types = 0;
tree others = 0;
@@ -6523,7 +6517,7 @@ get_parm_info (bool ellipsis, tree expr)
tag.id = b->id;
tag.type = decl;
- VEC_safe_push (c_arg_tag, gc, tags, tag);
+ vec_safe_push (tags, tag);
break;
case CONST_DECL:
@@ -6735,9 +6729,9 @@ start_struct (location_t loc, enum tree_code code, tree name,
*enclosing_struct_parse_info = struct_parse_info;
struct_parse_info = XNEW (struct c_struct_parse_info);
- struct_parse_info->struct_types = VEC_alloc (tree, heap, 0);
- struct_parse_info->fields = VEC_alloc (c_binding_ptr, heap, 0);
- struct_parse_info->typedefs_seen = VEC_alloc (tree, heap, 0);
+ struct_parse_info->struct_types.create (0);
+ struct_parse_info->fields.create (0);
+ struct_parse_info->typedefs_seen.create (0);
/* FIXME: This will issue a warning for a use of a type defined
within a statement expr used within sizeof, et. al. This is not
@@ -6847,8 +6841,7 @@ grokfield (location_t loc,
to be cleared when this struct is finished. */
if (!b->in_struct)
{
- VEC_safe_push (c_binding_ptr, heap,
- struct_parse_info->fields, b);
+ struct_parse_info->fields.safe_push (b);
b->in_struct = 1;
}
}
@@ -7028,7 +7021,7 @@ warn_cxx_compat_finish_struct (tree fieldlist)
because the flag is used to issue visibility warnings, and we
only want to issue those warnings if the type is referenced
outside of the struct declaration. */
- FOR_EACH_VEC_ELT (tree, struct_parse_info->struct_types, ix, x)
+ FOR_EACH_VEC_ELT (struct_parse_info->struct_types, ix, x)
C_TYPE_DEFINED_IN_STRUCT (x) = 1;
/* The TYPEDEFS_SEEN field of STRUCT_PARSE_INFO is a list of
@@ -7037,14 +7030,14 @@ warn_cxx_compat_finish_struct (tree fieldlist)
not parse in C++, because the C++ lookup rules say that the
typedef name would be looked up in the context of the struct, and
would thus be the field rather than the typedef. */
- if (!VEC_empty (tree, struct_parse_info->typedefs_seen)
+ if (!struct_parse_info->typedefs_seen.is_empty ()
&& fieldlist != NULL_TREE)
{
/* Use a pointer_set using the name of the typedef. We can use
a pointer_set because identifiers are interned. */
struct pointer_set_t *tset = pointer_set_create ();
- FOR_EACH_VEC_ELT (tree, struct_parse_info->typedefs_seen, ix, x)
+ FOR_EACH_VEC_ELT (struct_parse_info->typedefs_seen, ix, x)
pointer_set_insert (tset, DECL_NAME (x));
for (x = fieldlist; x != NULL_TREE; x = DECL_CHAIN (x))
@@ -7066,7 +7059,7 @@ warn_cxx_compat_finish_struct (tree fieldlist)
/* For each field which has a binding and which was not defined in
an enclosing struct, clear the in_struct field. */
- FOR_EACH_VEC_ELT (c_binding_ptr, struct_parse_info->fields, ix, b)
+ FOR_EACH_VEC_ELT (struct_parse_info->fields, ix, b)
b->in_struct = 0;
}
@@ -7353,9 +7346,9 @@ finish_struct (location_t loc, tree t, tree fieldlist, tree attributes,
if (warn_cxx_compat)
warn_cxx_compat_finish_struct (fieldlist);
- VEC_free (tree, heap, struct_parse_info->struct_types);
- VEC_free (c_binding_ptr, heap, struct_parse_info->fields);
- VEC_free (tree, heap, struct_parse_info->typedefs_seen);
+ struct_parse_info->struct_types.release ();
+ struct_parse_info->fields.release ();
+ struct_parse_info->typedefs_seen.release ();
XDELETE (struct_parse_info);
struct_parse_info = enclosing_struct_parse_info;
@@ -7365,7 +7358,7 @@ finish_struct (location_t loc, tree t, tree fieldlist, tree attributes,
if (warn_cxx_compat
&& struct_parse_info != NULL
&& !in_sizeof && !in_typeof && !in_alignof)
- VEC_safe_push (tree, heap, struct_parse_info->struct_types, t);
+ struct_parse_info->struct_types.safe_push (t);
return t;
}
@@ -7583,7 +7576,7 @@ finish_enum (tree enumtype, tree values, tree attributes)
if (warn_cxx_compat
&& struct_parse_info != NULL
&& !in_sizeof && !in_typeof && !in_alignof)
- VEC_safe_push (tree, heap, struct_parse_info->struct_types, enumtype);
+ struct_parse_info->struct_types.safe_push (enumtype);
return enumtype;
}
@@ -7981,7 +7974,7 @@ store_parm_decls_newstyle (tree fndecl, const struct c_arg_info *arg_info)
}
/* And all the tag declarations. */
- FOR_EACH_VEC_ELT_REVERSE (c_arg_tag, arg_info->tags, ix, tag)
+ FOR_EACH_VEC_SAFE_ELT_REVERSE (arg_info->tags, ix, tag)
if (tag->id)
bind (tag->id, tag->type, current_scope,
/*invisible=*/false, /*nested=*/false, UNKNOWN_LOCATION);
@@ -8598,8 +8591,7 @@ c_push_function_context (void)
cfun->language = p = ggc_alloc_cleared_language_function ();
p->base.x_stmt_tree = c_stmt_tree;
- c_stmt_tree.x_cur_stmt_list
- = VEC_copy (tree, gc, c_stmt_tree.x_cur_stmt_list);
+ c_stmt_tree.x_cur_stmt_list = vec_safe_copy (c_stmt_tree.x_cur_stmt_list);
p->x_break_label = c_break_label;
p->x_cont_label = c_cont_label;
p->x_switch_stack = c_switch_stack;
@@ -9538,7 +9530,7 @@ declspecs_add_type (location_t loc, struct c_declspecs *specs,
/* If we are parsing a struct, record that a struct field
used a typedef. */
if (warn_cxx_compat && struct_parse_info != NULL)
- VEC_safe_push (tree, heap, struct_parse_info->typedefs_seen, type);
+ struct_parse_info->typedefs_seen.safe_push (type);
}
}
else if (TREE_CODE (type) == IDENTIFIER_NODE)
@@ -10040,7 +10032,7 @@ collect_all_refs (const char *source_file)
tree t;
unsigned i;
- FOR_EACH_VEC_ELT (tree, all_translation_units, i, t)
+ FOR_EACH_VEC_ELT (*all_translation_units, i, t)
collect_ada_nodes (BLOCK_VARS (DECL_INITIAL (t)), source_file);
collect_ada_nodes (BLOCK_VARS (ext_block), source_file);
@@ -10056,7 +10048,7 @@ for_each_global_decl (void (*callback) (tree decl))
tree decl;
unsigned i;
- FOR_EACH_VEC_ELT (tree, all_translation_units, i, t)
+ FOR_EACH_VEC_ELT (*all_translation_units, i, t)
{
decls = DECL_INITIAL (t);
for (decl = BLOCK_VARS (decls); decl; decl = TREE_CHAIN (decl))
@@ -10116,7 +10108,7 @@ c_write_global_declarations (void)
/* Process all file scopes in this compilation, and the external_scope,
through wrapup_global_declarations and check_global_declarations. */
- FOR_EACH_VEC_ELT (tree, all_translation_units, i, t)
+ FOR_EACH_VEC_ELT (*all_translation_units, i, t)
c_write_global_declarations_1 (BLOCK_VARS (DECL_INITIAL (t)));
c_write_global_declarations_1 (BLOCK_VARS (ext_block));
@@ -10135,7 +10127,7 @@ c_write_global_declarations (void)
if (!seen_error ())
{
timevar_push (TV_SYMOUT);
- FOR_EACH_VEC_ELT (tree, all_translation_units, i, t)
+ FOR_EACH_VEC_ELT (*all_translation_units, i, t)
c_write_global_declarations_2 (BLOCK_VARS (DECL_INITIAL (t)));
c_write_global_declarations_2 (BLOCK_VARS (ext_block));
timevar_pop (TV_SYMOUT);
diff --git a/gcc/c/c-parser.c b/gcc/c/c-parser.c
index bfa98afef01..d85bff0a2d9 100644
--- a/gcc/c/c-parser.c
+++ b/gcc/c/c-parser.c
@@ -1178,9 +1178,9 @@ static struct c_expr c_parser_transaction_expression (c_parser *, enum rid);
static tree c_parser_transaction_cancel (c_parser *);
static struct c_expr c_parser_expression (c_parser *);
static struct c_expr c_parser_expression_conv (c_parser *);
-static VEC(tree,gc) *c_parser_expr_list (c_parser *, bool, bool,
- VEC(tree,gc) **, location_t *,
- tree *);
+static vec<tree, va_gc> *c_parser_expr_list (c_parser *, bool, bool,
+ vec<tree, va_gc> **, location_t *,
+ tree *);
static void c_parser_omp_construct (c_parser *);
static void c_parser_omp_threadprivate (c_parser *);
static void c_parser_omp_barrier (c_parser *);
@@ -3540,7 +3540,7 @@ c_parser_attributes (c_parser *parser)
|| c_parser_next_token_is (parser, CPP_KEYWORD))
{
tree attr, attr_name, attr_args;
- VEC(tree,gc) *expr_list;
+ vec<tree, va_gc> *expr_list;
if (c_parser_next_token_is (parser, CPP_COMMA))
{
c_parser_consume_token (parser);
@@ -6123,10 +6123,10 @@ c_parser_alignof_expression (c_parser *parser)
stores the arguments in CEXPR_LIST. */
static bool
c_parser_get_builtin_args (c_parser *parser, const char *bname,
- VEC(c_expr_t,gc) **ret_cexpr_list)
+ vec<c_expr_t, va_gc> **ret_cexpr_list)
{
location_t loc = c_parser_peek_token (parser)->location;
- VEC (c_expr_t,gc) *cexpr_list;
+ vec<c_expr_t, va_gc> *cexpr_list;
c_expr_t expr;
*ret_cexpr_list = NULL;
@@ -6145,7 +6145,7 @@ c_parser_get_builtin_args (c_parser *parser, const char *bname,
}
expr = c_parser_expr_no_commas (parser, NULL);
- cexpr_list = VEC_alloc (c_expr_t, gc, 1);
+ vec_alloc (cexpr_list, 1);
C_EXPR_APPEND (cexpr_list, expr);
while (c_parser_next_token_is (parser, CPP_COMMA))
{
@@ -6511,7 +6511,7 @@ c_parser_postfix_expression (c_parser *parser)
break;
case RID_CHOOSE_EXPR:
{
- VEC (c_expr_t, gc) *cexpr_list;
+ vec<c_expr_t, va_gc> *cexpr_list;
c_expr_t *e1_p, *e2_p, *e3_p;
tree c;
@@ -6524,7 +6524,7 @@ c_parser_postfix_expression (c_parser *parser)
break;
}
- if (VEC_length (c_expr_t, cexpr_list) != 3)
+ if (vec_safe_length (cexpr_list) != 3)
{
error_at (loc, "wrong number of arguments to "
"%<__builtin_choose_expr%>");
@@ -6532,9 +6532,9 @@ c_parser_postfix_expression (c_parser *parser)
break;
}
- e1_p = &VEC_index (c_expr_t, cexpr_list, 0);
- e2_p = &VEC_index (c_expr_t, cexpr_list, 1);
- e3_p = &VEC_index (c_expr_t, cexpr_list, 2);
+ e1_p = &(*cexpr_list)[0];
+ e2_p = &(*cexpr_list)[1];
+ e3_p = &(*cexpr_list)[2];
c = e1_p->value;
mark_exp_read (e2_p->value);
@@ -6594,7 +6594,7 @@ c_parser_postfix_expression (c_parser *parser)
break;
case RID_BUILTIN_COMPLEX:
{
- VEC(c_expr_t, gc) *cexpr_list;
+ vec<c_expr_t, va_gc> *cexpr_list;
c_expr_t *e1_p, *e2_p;
c_parser_consume_token (parser);
@@ -6606,7 +6606,7 @@ c_parser_postfix_expression (c_parser *parser)
break;
}
- if (VEC_length (c_expr_t, cexpr_list) != 2)
+ if (vec_safe_length (cexpr_list) != 2)
{
error_at (loc, "wrong number of arguments to "
"%<__builtin_complex%>");
@@ -6614,8 +6614,8 @@ c_parser_postfix_expression (c_parser *parser)
break;
}
- e1_p = &VEC_index (c_expr_t, cexpr_list, 0);
- e2_p = &VEC_index (c_expr_t, cexpr_list, 1);
+ e1_p = &(*cexpr_list)[0];
+ e2_p = &(*cexpr_list)[1];
mark_exp_read (e1_p->value);
if (TREE_CODE (e1_p->value) == EXCESS_PRECISION_EXPR)
@@ -6655,7 +6655,7 @@ c_parser_postfix_expression (c_parser *parser)
}
case RID_BUILTIN_SHUFFLE:
{
- VEC(c_expr_t,gc) *cexpr_list;
+ vec<c_expr_t, va_gc> *cexpr_list;
unsigned int i;
c_expr_t *p;
@@ -6668,21 +6668,21 @@ c_parser_postfix_expression (c_parser *parser)
break;
}
- FOR_EACH_VEC_ELT (c_expr_t, cexpr_list, i, p)
+ FOR_EACH_VEC_SAFE_ELT (cexpr_list, i, p)
mark_exp_read (p->value);
- if (VEC_length (c_expr_t, cexpr_list) == 2)
+ if (vec_safe_length (cexpr_list) == 2)
expr.value =
c_build_vec_perm_expr
- (loc, VEC_index (c_expr_t, cexpr_list, 0).value,
- NULL_TREE, VEC_index (c_expr_t, cexpr_list, 1).value);
+ (loc, (*cexpr_list)[0].value,
+ NULL_TREE, (*cexpr_list)[1].value);
- else if (VEC_length (c_expr_t, cexpr_list) == 3)
+ else if (vec_safe_length (cexpr_list) == 3)
expr.value =
c_build_vec_perm_expr
- (loc, VEC_index (c_expr_t, cexpr_list, 0).value,
- VEC_index (c_expr_t, cexpr_list, 1).value,
- VEC_index (c_expr_t, cexpr_list, 2).value);
+ (loc, (*cexpr_list)[0].value,
+ (*cexpr_list)[1].value,
+ (*cexpr_list)[2].value);
else
{
error_at (loc, "wrong number of arguments to "
@@ -6872,8 +6872,8 @@ c_parser_postfix_expression_after_primary (c_parser *parser,
location_t sizeof_arg_loc[3];
tree sizeof_arg[3];
unsigned int i;
- VEC(tree,gc) *exprlist;
- VEC(tree,gc) *origtypes;
+ vec<tree, va_gc> *exprlist;
+ vec<tree, va_gc> *origtypes;
while (true)
{
location_t op_loc = c_parser_peek_token (parser)->location;
@@ -6922,7 +6922,7 @@ c_parser_postfix_expression_after_primary (c_parser *parser,
&& DECL_FUNCTION_CODE (orig_expr.value) == BUILT_IN_CONSTANT_P)
expr.original_code = C_MAYBE_CONST_EXPR;
expr.original_type = NULL;
- if (exprlist != NULL)
+ if (exprlist)
{
release_tree_vector (exprlist);
release_tree_vector (origtypes);
@@ -7069,13 +7069,13 @@ c_parser_expression_conv (c_parser *parser)
nonempty-expr-list , assignment-expression
*/
-static VEC(tree,gc) *
+static vec<tree, va_gc> *
c_parser_expr_list (c_parser *parser, bool convert_p, bool fold_p,
- VEC(tree,gc) **p_orig_types, location_t *sizeof_arg_loc,
- tree *sizeof_arg)
+ vec<tree, va_gc> **p_orig_types,
+ location_t *sizeof_arg_loc, tree *sizeof_arg)
{
- VEC(tree,gc) *ret;
- VEC(tree,gc) *orig_types;
+ vec<tree, va_gc> *ret;
+ vec<tree, va_gc> *orig_types;
struct c_expr expr;
location_t loc = c_parser_peek_token (parser)->location;
location_t cur_sizeof_arg_loc = UNKNOWN_LOCATION;
@@ -7095,9 +7095,9 @@ c_parser_expr_list (c_parser *parser, bool convert_p, bool fold_p,
expr = default_function_array_read_conversion (loc, expr);
if (fold_p)
expr.value = c_fully_fold (expr.value, false, NULL);
- VEC_quick_push (tree, ret, expr.value);
- if (orig_types != NULL)
- VEC_quick_push (tree, orig_types, expr.original_type);
+ ret->quick_push (expr.value);
+ if (orig_types)
+ orig_types->quick_push (expr.original_type);
if (sizeof_arg != NULL
&& cur_sizeof_arg_loc != UNKNOWN_LOCATION
&& expr.original_code == SIZEOF_EXPR)
@@ -7119,9 +7119,9 @@ c_parser_expr_list (c_parser *parser, bool convert_p, bool fold_p,
expr = default_function_array_read_conversion (loc, expr);
if (fold_p)
expr.value = c_fully_fold (expr.value, false, NULL);
- VEC_safe_push (tree, gc, ret, expr.value);
- if (orig_types != NULL)
- VEC_safe_push (tree, gc, orig_types, expr.original_type);
+ vec_safe_push (ret, expr.value);
+ if (orig_types)
+ vec_safe_push (orig_types, expr.original_type);
if (++idx < 3
&& sizeof_arg != NULL
&& cur_sizeof_arg_loc != UNKNOWN_LOCATION
@@ -7131,7 +7131,7 @@ c_parser_expr_list (c_parser *parser, bool convert_p, bool fold_p,
sizeof_arg_loc[idx] = cur_sizeof_arg_loc;
}
}
- if (orig_types != NULL)
+ if (orig_types)
*p_orig_types = orig_types;
return ret;
}
@@ -8209,13 +8209,13 @@ static tree
c_parser_objc_keywordexpr (c_parser *parser)
{
tree ret;
- VEC(tree,gc) *expr_list = c_parser_expr_list (parser, true, true,
+ vec<tree, va_gc> *expr_list = c_parser_expr_list (parser, true, true,
NULL, NULL, NULL);
- if (VEC_length (tree, expr_list) == 1)
+ if (vec_safe_length (expr_list) == 1)
{
/* Just return the expression, remove a level of
indirection. */
- ret = VEC_index (tree, expr_list, 0);
+ ret = (*expr_list)[0];
}
else
{
@@ -9923,7 +9923,7 @@ c_parser_omp_for_loop (location_t loc,
bool fail = false, open_brace_parsed = false;
int i, collapse = 1, nbraces = 0;
location_t for_loc;
- VEC(tree,gc) *for_block = make_tree_vector ();
+ vec<tree, va_gc> *for_block = make_tree_vector ();
for (cl = clauses; cl; cl = OMP_CLAUSE_CHAIN (cl))
if (OMP_CLAUSE_CODE (cl) == OMP_CLAUSE_COLLAPSE)
@@ -9955,7 +9955,7 @@ c_parser_omp_for_loop (location_t loc,
if (c_parser_next_tokens_start_declaration (parser))
{
if (i > 0)
- VEC_safe_push (tree, gc, for_block, c_begin_compound_stmt (true));
+ vec_safe_push (for_block, c_begin_compound_stmt (true));
c_parser_declaration_or_fndef (parser, true, true, true, true, true, NULL);
decl = check_for_loop_decls (for_loc, flag_isoc99);
if (decl == NULL)
@@ -10186,12 +10186,12 @@ c_parser_omp_for_loop (location_t loc,
ret = stmt;
}
pop_scopes:
- while (!VEC_empty (tree, for_block))
+ while (!for_block->is_empty ())
{
/* FIXME diagnostics: LOC below should be the actual location of
this particular for block. We need to build a list of
locations to go along with FOR_BLOCK. */
- stmt = c_end_compound_stmt (loc, VEC_pop (tree, for_block), true);
+ stmt = c_end_compound_stmt (loc, for_block->pop (), true);
add_stmt (stmt);
}
release_tree_vector (for_block);
diff --git a/gcc/c/c-tree.h b/gcc/c/c-tree.h
index 17fc719b49d..531cc7c6d45 100644
--- a/gcc/c/c-tree.h
+++ b/gcc/c/c-tree.h
@@ -135,15 +135,12 @@ struct c_expr
typedef struct c_expr c_expr_t;
/* A varray of c_expr_t. */
-DEF_VEC_O (c_expr_t);
-DEF_VEC_ALLOC_O (c_expr_t, gc);
-DEF_VEC_ALLOC_O (c_expr_t, heap);
/* Append a new c_expr_t element to V. */
#define C_EXPR_APPEND(V, ELEM) \
do { \
c_expr_t __elem = (ELEM); \
- VEC_safe_push (c_expr_t, gc, V, __elem); \
+ vec_safe_push (V, __elem); \
} while (0)
/* A kind of type specifier. Note that this information is currently
@@ -363,15 +360,13 @@ typedef struct c_arg_tag_d {
tree type;
} c_arg_tag;
-DEF_VEC_O(c_arg_tag);
-DEF_VEC_ALLOC_O(c_arg_tag,gc);
/* Information about the parameters in a function declarator. */
struct c_arg_info {
/* A list of parameter decls. */
tree parms;
/* A list of structure, union and enum tags defined. */
- VEC(c_arg_tag,gc) *tags;
+ vec<c_arg_tag, va_gc> *tags;
/* A list of argument types to go in the FUNCTION_TYPE. */
tree types;
/* A list of non-parameter decls (notably enumeration constants)
diff --git a/gcc/c/c-typeck.c b/gcc/c/c-typeck.c
index cf63355f614..2032f66f363 100644
--- a/gcc/c/c-typeck.c
+++ b/gcc/c/c-typeck.c
@@ -80,8 +80,8 @@ static int function_types_compatible_p (const_tree, const_tree, bool *,
bool *);
static int type_lists_compatible_p (const_tree, const_tree, bool *, bool *);
static tree lookup_field (tree, tree);
-static int convert_arguments (tree, VEC(tree,gc) *, VEC(tree,gc) *, tree,
- tree);
+static int convert_arguments (tree, vec<tree, va_gc> *, vec<tree, va_gc> *,
+ tree, tree);
static tree pointer_diff (location_t, tree, tree);
static tree convert_for_assignment (location_t, tree, tree, tree,
enum impl_conv, bool, tree, tree, int);
@@ -2662,14 +2662,14 @@ c_expr_sizeof_type (location_t loc, struct c_type_name *t)
tree
build_function_call (location_t loc, tree function, tree params)
{
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *v;
tree ret;
- vec = VEC_alloc (tree, gc, list_length (params));
+ vec_alloc (v, list_length (params));
for (; params; params = TREE_CHAIN (params))
- VEC_quick_push (tree, vec, TREE_VALUE (params));
- ret = build_function_call_vec (loc, function, vec, NULL);
- VEC_free (tree, gc, vec);
+ v->quick_push (TREE_VALUE (params));
+ ret = build_function_call_vec (loc, function, v, NULL);
+ vec_free (v);
return ret;
}
@@ -2690,8 +2690,9 @@ static void inform_declaration (tree decl)
PARAMS. */
tree
-build_function_call_vec (location_t loc, tree function, VEC(tree,gc) *params,
- VEC(tree,gc) *origtypes)
+build_function_call_vec (location_t loc, tree function,
+ vec<tree, va_gc> *params,
+ vec<tree, va_gc> *origtypes)
{
tree fntype, fundecl = 0;
tree name = NULL_TREE, result;
@@ -2729,9 +2730,8 @@ build_function_call_vec (location_t loc, tree function, VEC(tree,gc) *params,
/* For Objective-C, convert any calls via a cast to OBJC_TYPE_REF
expressions, like those used for ObjC messenger dispatches. */
- if (!VEC_empty (tree, params))
- function = objc_rewrite_function_call (function,
- VEC_index (tree, params, 0));
+ if (params && !params->is_empty ())
+ function = objc_rewrite_function_call (function, (*params)[0]);
function = c_fully_fold (function, false, NULL);
@@ -2800,8 +2800,7 @@ build_function_call_vec (location_t loc, tree function, VEC(tree,gc) *params,
/* Before the abort, allow the function arguments to exit or
call longjmp. */
for (i = 0; i < nargs; i++)
- trap = build2 (COMPOUND_EXPR, void_type_node,
- VEC_index (tree, params, i), trap);
+ trap = build2 (COMPOUND_EXPR, void_type_node, (*params)[i], trap);
if (VOID_TYPE_P (return_type))
{
@@ -2816,7 +2815,8 @@ build_function_call_vec (location_t loc, tree function, VEC(tree,gc) *params,
if (AGGREGATE_TYPE_P (return_type))
rhs = build_compound_literal (loc, return_type,
- build_constructor (return_type, 0),
+ build_constructor (return_type,
+ NULL),
false);
else
rhs = build_zero_cst (return_type);
@@ -2826,7 +2826,7 @@ build_function_call_vec (location_t loc, tree function, VEC(tree,gc) *params,
}
}
- argarray = VEC_address (tree, params);
+ argarray = vec_safe_address (params);
/* Check that arguments to builtin functions match the expectations. */
if (fundecl
@@ -2886,8 +2886,8 @@ build_function_call_vec (location_t loc, tree function, VEC(tree,gc) *params,
failure. */
static int
-convert_arguments (tree typelist, VEC(tree,gc) *values,
- VEC(tree,gc) *origtypes, tree function, tree fundecl)
+convert_arguments (tree typelist, vec<tree, va_gc> *values,
+ vec<tree, va_gc> *origtypes, tree function, tree fundecl)
{
tree typetail, val;
unsigned int parmnum;
@@ -2934,7 +2934,7 @@ convert_arguments (tree typelist, VEC(tree,gc) *values,
converted arguments. */
for (typetail = typelist, parmnum = 0;
- VEC_iterate (tree, values, parmnum, val);
+ values && values->iterate (parmnum, &val);
++parmnum)
{
tree type = typetail ? TREE_VALUE (typetail) : 0;
@@ -3127,9 +3127,7 @@ convert_arguments (tree typelist, VEC(tree,gc) *values,
sake of better warnings from convert_and_check. */
if (excess_precision)
val = build1 (EXCESS_PRECISION_EXPR, valtype, val);
- origtype = (origtypes == NULL
- ? NULL_TREE
- : VEC_index (tree, origtypes, parmnum));
+ origtype = (!origtypes) ? NULL_TREE : (*origtypes)[parmnum];
parmval = convert_for_assignment (input_location, type, val,
origtype, ic_argpass, npc,
fundecl, function,
@@ -3173,7 +3171,7 @@ convert_arguments (tree typelist, VEC(tree,gc) *values,
/* Convert `short' and `char' to full-size `int'. */
parmval = default_conversion (val);
- VEC_replace (tree, values, parmnum, parmval);
+ (*values)[parmnum] = parmval;
if (parmval == error_mark_node)
error_args = true;
@@ -3181,7 +3179,7 @@ convert_arguments (tree typelist, VEC(tree,gc) *values,
typetail = TREE_CHAIN (typetail);
}
- gcc_assert (parmnum == VEC_length (tree, values));
+ gcc_assert (parmnum == vec_safe_length (values));
if (typetail != 0 && TREE_VALUE (typetail) != void_type_node)
{
@@ -6341,7 +6339,7 @@ static tree constructor_bit_index;
/* If we are saving up the elements rather than allocating them,
this is the list of elements so far (in reverse order,
most recent first). */
-static VEC(constructor_elt,gc) *constructor_elements;
+static vec<constructor_elt, va_gc> *constructor_elements;
/* 1 if constructor should be incrementally stored into a constructor chain,
0 if all the elements should be kept in AVL tree. */
@@ -6417,7 +6415,7 @@ struct constructor_stack
tree unfilled_index;
tree unfilled_fields;
tree bit_index;
- VEC(constructor_elt,gc) *elements;
+ vec<constructor_elt, va_gc> *elements;
struct init_node *pending_elts;
int offset;
int depth;
@@ -6462,7 +6460,7 @@ struct initializer_stack
tree decl;
struct constructor_stack *constructor_stack;
struct constructor_range_stack *constructor_range_stack;
- VEC(constructor_elt,gc) *elements;
+ vec<constructor_elt, va_gc> *elements;
struct spelling *spelling;
struct spelling *spelling_base;
int spelling_size;
@@ -6611,7 +6609,7 @@ really_start_incremental_init (tree type)
constructor_simple = 1;
constructor_nonconst = 0;
constructor_depth = SPELLING_DEPTH ();
- constructor_elements = 0;
+ constructor_elements = NULL;
constructor_pending_elts = 0;
constructor_type = type;
constructor_incremental = 1;
@@ -6757,7 +6755,7 @@ push_init_level (int implicit, struct obstack * braced_init_obstack)
constructor_simple = 1;
constructor_nonconst = 0;
constructor_depth = SPELLING_DEPTH ();
- constructor_elements = 0;
+ constructor_elements = NULL;
constructor_incremental = 1;
constructor_designated = 0;
constructor_pending_elts = 0;
@@ -6807,7 +6805,7 @@ push_init_level (int implicit, struct obstack * braced_init_obstack)
constructor_simple = TREE_STATIC (value);
constructor_nonconst = CONSTRUCTOR_NON_CONST (value);
constructor_elements = CONSTRUCTOR_ELTS (value);
- if (!VEC_empty (constructor_elt, constructor_elements)
+ if (!vec_safe_is_empty (constructor_elements)
&& (TREE_CODE (constructor_type) == RECORD_TYPE
|| TREE_CODE (constructor_type) == ARRAY_TYPE))
set_nonincremental_init (braced_init_obstack);
@@ -6957,9 +6955,8 @@ pop_init_level (int implicit, struct obstack * braced_init_obstack)
&& constructor_unfilled_fields)
{
bool constructor_zeroinit =
- (VEC_length (constructor_elt, constructor_elements) == 1
- && integer_zerop
- (VEC_index (constructor_elt, constructor_elements, 0).value));
+ (vec_safe_length (constructor_elements) == 1
+ && integer_zerop ((*constructor_elements)[0].value));
/* Do not warn for flexible array members or zero-length arrays. */
while (constructor_unfilled_fields
@@ -6997,19 +6994,19 @@ pop_init_level (int implicit, struct obstack * braced_init_obstack)
{
/* A nonincremental scalar initializer--just return
the element, after verifying there is just one. */
- if (VEC_empty (constructor_elt,constructor_elements))
+ if (vec_safe_is_empty (constructor_elements))
{
if (!constructor_erroneous)
error_init ("empty scalar initializer");
ret.value = error_mark_node;
}
- else if (VEC_length (constructor_elt,constructor_elements) != 1)
+ else if (vec_safe_length (constructor_elements) != 1)
{
error_init ("extra elements in scalar initializer");
- ret.value = VEC_index (constructor_elt,constructor_elements,0).value;
+ ret.value = (*constructor_elements)[0].value;
}
else
- ret.value = VEC_index (constructor_elt,constructor_elements,0).value;
+ ret.value = (*constructor_elements)[0].value;
}
else
{
@@ -7534,7 +7531,7 @@ set_nonincremental_init (struct obstack * braced_init_obstack)
add_pending_init (index, value, NULL_TREE, true,
braced_init_obstack);
}
- constructor_elements = 0;
+ constructor_elements = NULL;
if (TREE_CODE (constructor_type) == RECORD_TYPE)
{
constructor_unfilled_fields = TYPE_FIELDS (constructor_type);
@@ -7679,10 +7676,9 @@ find_init_member (tree field, struct obstack * braced_init_obstack)
}
else if (TREE_CODE (constructor_type) == UNION_TYPE)
{
- if (!VEC_empty (constructor_elt, constructor_elements)
- && (VEC_last (constructor_elt, constructor_elements).index
- == field))
- return VEC_last (constructor_elt, constructor_elements).value;
+ if (!vec_safe_is_empty (constructor_elements)
+ && (constructor_elements->last ().index == field))
+ return constructor_elements->last ().value;
}
return 0;
}
@@ -7859,12 +7855,11 @@ output_init_element (tree value, tree origtype, bool strict_string, tree type,
return;
}
else if (TREE_CODE (constructor_type) == UNION_TYPE
- && !VEC_empty (constructor_elt, constructor_elements))
+ && !vec_safe_is_empty (constructor_elements))
{
if (!implicit)
{
- if (TREE_SIDE_EFFECTS (VEC_last (constructor_elt,
- constructor_elements).value))
+ if (TREE_SIDE_EFFECTS (constructor_elements->last ().value))
warning_init (0,
"initialized field with side-effects overwritten");
else if (warn_override_init)
@@ -7872,14 +7867,14 @@ output_init_element (tree value, tree origtype, bool strict_string, tree type,
}
/* We can have just one union field set. */
- constructor_elements = 0;
+ constructor_elements = NULL;
}
/* Otherwise, output this element either to
constructor_elements or to the assembler file. */
constructor_elt celt = {field, value};
- VEC_safe_push (constructor_elt, gc, constructor_elements, celt);
+ vec_safe_push (constructor_elements, celt);
/* Advance the variable that indicates sequential elements output. */
if (TREE_CODE (constructor_type) == ARRAY_TYPE)
diff --git a/gcc/calls.c b/gcc/calls.c
index 18b52487512..f4bed747e84 100644
--- a/gcc/calls.c
+++ b/gcc/calls.c
@@ -1696,7 +1696,7 @@ static struct
based on crtl->args.internal_arg_pointer. The element is NULL_RTX if the
pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
with fixed offset, or PC if this is with variable or unknown offset. */
- VEC(rtx, heap) *cache;
+ vec<rtx> cache;
} internal_arg_pointer_exp_state;
static rtx internal_arg_pointer_based_exp (rtx, bool);
@@ -1725,21 +1725,17 @@ internal_arg_pointer_based_exp_scan (void)
rtx val = NULL_RTX;
unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
/* Punt on pseudos set multiple times. */
- if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache)
- && (VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx)
+ if (idx < internal_arg_pointer_exp_state.cache.length ()
+ && (internal_arg_pointer_exp_state.cache[idx]
!= NULL_RTX))
val = pc_rtx;
else
val = internal_arg_pointer_based_exp (SET_SRC (set), false);
if (val != NULL_RTX)
{
- if (idx
- >= VEC_length (rtx, internal_arg_pointer_exp_state.cache))
- VEC_safe_grow_cleared (rtx, heap,
- internal_arg_pointer_exp_state.cache,
- idx + 1);
- VEC_replace (rtx, internal_arg_pointer_exp_state.cache,
- idx, val);
+ if (idx >= internal_arg_pointer_exp_state.cache.length ())
+ internal_arg_pointer_exp_state.cache.safe_grow_cleared(idx + 1);
+ internal_arg_pointer_exp_state.cache[idx] = val;
}
}
if (NEXT_INSN (insn) == NULL_RTX)
@@ -1799,8 +1795,8 @@ internal_arg_pointer_based_exp (rtx rtl, bool toplevel)
if (REG_P (rtl))
{
unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
- if (idx < VEC_length (rtx, internal_arg_pointer_exp_state.cache))
- return VEC_index (rtx, internal_arg_pointer_exp_state.cache, idx);
+ if (idx < internal_arg_pointer_exp_state.cache.length ())
+ return internal_arg_pointer_exp_state.cache[idx];
return NULL_RTX;
}
@@ -3443,7 +3439,7 @@ expand_call (tree exp, rtx target, int ignore)
sbitmap_free (stored_args_map);
internal_arg_pointer_exp_state.scan_start = NULL_RTX;
- VEC_free (rtx, heap, internal_arg_pointer_exp_state.cache);
+ internal_arg_pointer_exp_state.cache.release ();
}
else
{
diff --git a/gcc/cfg.c b/gcc/cfg.c
index ac448d38c27..4ad53502e37 100644
--- a/gcc/cfg.c
+++ b/gcc/cfg.c
@@ -107,14 +107,14 @@ clear_edges (void)
{
FOR_EACH_EDGE (e, ei, bb->succs)
free_edge (e);
- VEC_truncate (edge, bb->succs, 0);
- VEC_truncate (edge, bb->preds, 0);
+ vec_safe_truncate (bb->succs, 0);
+ vec_safe_truncate (bb->preds, 0);
}
FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
free_edge (e);
- VEC_truncate (edge, EXIT_BLOCK_PTR->preds, 0);
- VEC_truncate (edge, ENTRY_BLOCK_PTR->succs, 0);
+ vec_safe_truncate (EXIT_BLOCK_PTR->preds, 0);
+ vec_safe_truncate (ENTRY_BLOCK_PTR->succs, 0);
gcc_assert (!n_edges);
}
@@ -199,7 +199,7 @@ expunge_block (basic_block b)
static inline void
connect_src (edge e)
{
- VEC_safe_push (edge, gc, e->src->succs, e);
+ vec_safe_push (e->src->succs, e);
df_mark_solutions_dirty ();
}
@@ -209,7 +209,7 @@ static inline void
connect_dest (edge e)
{
basic_block dest = e->dest;
- VEC_safe_push (edge, gc, dest->preds, e);
+ vec_safe_push (dest->preds, e);
e->dest_idx = EDGE_COUNT (dest->preds) - 1;
df_mark_solutions_dirty ();
}
@@ -227,7 +227,7 @@ disconnect_src (edge e)
{
if (tmp == e)
{
- VEC_unordered_remove (edge, src->succs, ei.index);
+ src->succs->unordered_remove (ei.index);
df_mark_solutions_dirty ();
return;
}
@@ -246,7 +246,7 @@ disconnect_dest (edge e)
basic_block dest = e->dest;
unsigned int dest_idx = e->dest_idx;
- VEC_unordered_remove (edge, dest->preds, dest_idx);
+ dest->preds->unordered_remove (dest_idx);
/* If we removed an edge in the middle of the edge vector, we need
to update dest_idx of the edge that moved into the "hole". */
diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index 7a76c60e433..89cc6a329d1 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -24,7 +24,6 @@ along with GCC; see the file COPYING3. If not see
#include "coretypes.h"
#include "basic-block.h"
#include "vec.h"
-#include "vecprim.h"
#include "bitmap.h"
#include "sbitmap.h"
#include "timevar.h"
@@ -1139,27 +1138,27 @@ compute_idf (bitmap def_blocks, bitmap_head *dfs)
{
bitmap_iterator bi;
unsigned bb_index, i;
- VEC(int,heap) *work_stack;
+ vec<int> work_stack;
bitmap phi_insertion_points;
- work_stack = VEC_alloc (int, heap, n_basic_blocks);
+ work_stack.create (n_basic_blocks);
phi_insertion_points = BITMAP_ALLOC (NULL);
/* Seed the work list with all the blocks in DEF_BLOCKS. We use
- VEC_quick_push here for speed. This is safe because we know that
+ vec::quick_push here for speed. This is safe because we know that
the number of definition blocks is no greater than the number of
basic blocks, which is the initial capacity of WORK_STACK. */
EXECUTE_IF_SET_IN_BITMAP (def_blocks, 0, bb_index, bi)
- VEC_quick_push (int, work_stack, bb_index);
+ work_stack.quick_push (bb_index);
/* Pop a block off the worklist, add every block that appears in
the original block's DF that we have not already processed to
the worklist. Iterate until the worklist is empty. Blocks
which are added to the worklist are potential sites for
PHI nodes. */
- while (VEC_length (int, work_stack) > 0)
+ while (work_stack.length () > 0)
{
- bb_index = VEC_pop (int, work_stack);
+ bb_index = work_stack.pop ();
/* Since the registration of NEW -> OLD name mappings is done
separately from the call to update_ssa, when updating the SSA
@@ -1174,12 +1173,12 @@ compute_idf (bitmap def_blocks, bitmap_head *dfs)
/* Use a safe push because if there is a definition of VAR
in every basic block, then WORK_STACK may eventually have
more than N_BASIC_BLOCK entries. */
- VEC_safe_push (int, heap, work_stack, i);
+ work_stack.safe_push (i);
bitmap_set_bit (phi_insertion_points, i);
}
}
- VEC_free (int, heap, work_stack);
+ work_stack.release ();
return phi_insertion_points;
}
diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c
index ab882d269f0..eccbb411f8f 100644
--- a/gcc/cfgcleanup.c
+++ b/gcc/cfgcleanup.c
@@ -2816,12 +2816,12 @@ delete_unreachable_blocks (void)
delete_basic_block (b);
else
{
- VEC (basic_block, heap) *h
+ vec<basic_block> h
= get_all_dominated_blocks (CDI_DOMINATORS, b);
- while (VEC_length (basic_block, h))
+ while (h.length ())
{
- b = VEC_pop (basic_block, h);
+ b = h.pop ();
prev_bb = b->prev_bb;
@@ -2830,7 +2830,7 @@ delete_unreachable_blocks (void)
delete_basic_block (b);
}
- VEC_free (basic_block, heap, h);
+ h.release ();
}
changed = true;
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 2c17ec127a4..856baa94d94 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -848,18 +848,15 @@ expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
set_rtl (decl, x);
}
-DEF_VEC_I(HOST_WIDE_INT);
-DEF_VEC_ALLOC_I(HOST_WIDE_INT,heap);
-
struct stack_vars_data
{
/* Vector of offset pairs, always end of some padding followed
by start of the padding that needs Address Sanitizer protection.
The vector is in reversed, highest offset pairs come first. */
- VEC(HOST_WIDE_INT, heap) *asan_vec;
+ vec<HOST_WIDE_INT> asan_vec;
/* Vector of partition representative decls in between the paddings. */
- VEC(tree, heap) *asan_decl_vec;
+ vec<tree> asan_decl_vec;
};
/* A subroutine of expand_used_vars. Give each partition representative
@@ -953,10 +950,8 @@ expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
= alloc_stack_frame_space (stack_vars[i].size
+ ASAN_RED_ZONE_SIZE,
MAX (alignb, ASAN_RED_ZONE_SIZE));
- VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
- prev_offset);
- VEC_safe_push (HOST_WIDE_INT, heap, data->asan_vec,
- offset + stack_vars[i].size);
+ data->asan_vec.safe_push (prev_offset);
+ data->asan_vec.safe_push (offset + stack_vars[i].size);
/* Find best representative of the partition.
Prefer those with DECL_NAME, even better
satisfying asan_protect_stack_decl predicate. */
@@ -973,7 +968,7 @@ expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
repr_decl = stack_vars[j].decl;
if (repr_decl == NULL_TREE)
repr_decl = stack_vars[i].decl;
- VEC_safe_push (tree, heap, data->asan_decl_vec, repr_decl);
+ data->asan_decl_vec.safe_push (repr_decl);
}
else
offset = alloc_stack_frame_space (stack_vars[i].size, alignb);
@@ -1526,7 +1521,7 @@ static rtx
expand_used_vars (void)
{
tree var, outer_block = DECL_INITIAL (current_function_decl);
- VEC(tree,heap) *maybe_local_decls = NULL;
+ vec<tree> maybe_local_decls = vec<tree>();
rtx var_end_seq = NULL_RTX;
struct pointer_map_t *ssa_name_decls;
unsigned i;
@@ -1585,7 +1580,7 @@ expand_used_vars (void)
/* At this point all variables on the local_decls with TREE_USED
set are not associated with any block scope. Lay them out. */
- len = VEC_length (tree, cfun->local_decls);
+ len = vec_safe_length (cfun->local_decls);
FOR_EACH_LOCAL_DECL (cfun, i, var)
{
bool expand_now = false;
@@ -1630,7 +1625,7 @@ expand_used_vars (void)
/* If rtl isn't set yet, which can happen e.g. with
-fstack-protector, retry before returning from this
function. */
- VEC_safe_push (tree, heap, maybe_local_decls, var);
+ maybe_local_decls.safe_push (var);
}
}
@@ -1645,8 +1640,8 @@ expand_used_vars (void)
We just want the duplicates, as those are the artificial
non-ignored vars that we want to keep until instantiate_decls.
Move them down and truncate the array. */
- if (!VEC_empty (tree, cfun->local_decls))
- VEC_block_remove (tree, cfun->local_decls, 0, len);
+ if (!vec_safe_is_empty (cfun->local_decls))
+ cfun->local_decls->block_remove (0, len);
/* At this point, all variables within the block tree with TREE_USED
set are actually used by the optimized function. Lay them out. */
@@ -1680,8 +1675,8 @@ expand_used_vars (void)
{
struct stack_vars_data data;
- data.asan_vec = NULL;
- data.asan_decl_vec = NULL;
+ data.asan_vec = vec<HOST_WIDE_INT>();
+ data.asan_decl_vec = vec<tree>();
/* Reorder decls to be protected by iterating over the variables
array multiple times, and allocating out of each phase in turn. */
@@ -1703,29 +1698,26 @@ expand_used_vars (void)
in addition to phase 1 and 2. */
expand_stack_vars (asan_decl_phase_3, &data);
- if (!VEC_empty (HOST_WIDE_INT, data.asan_vec))
+ if (!data.asan_vec.is_empty ())
{
HOST_WIDE_INT prev_offset = frame_offset;
HOST_WIDE_INT offset
= alloc_stack_frame_space (ASAN_RED_ZONE_SIZE,
ASAN_RED_ZONE_SIZE);
- VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, prev_offset);
- VEC_safe_push (HOST_WIDE_INT, heap, data.asan_vec, offset);
+ data.asan_vec.safe_push (prev_offset);
+ data.asan_vec.safe_push (offset);
var_end_seq
= asan_emit_stack_protection (virtual_stack_vars_rtx,
- VEC_address (HOST_WIDE_INT,
- data.asan_vec),
- VEC_address (tree,
- data.asan_decl_vec),
- VEC_length (HOST_WIDE_INT,
- data.asan_vec));
+ data.asan_vec.address (),
+ data.asan_decl_vec. address(),
+ data.asan_vec.length ());
}
expand_stack_vars (NULL, &data);
- VEC_free (HOST_WIDE_INT, heap, data.asan_vec);
- VEC_free (tree, heap, data.asan_decl_vec);
+ data.asan_vec.release ();
+ data.asan_decl_vec.release ();
}
fini_vars_expansion ();
@@ -1733,7 +1725,7 @@ expand_used_vars (void)
/* If there were any artificial non-ignored vars without rtl
found earlier, see if deferred stack allocation hasn't assigned
rtl to them. */
- FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var)
+ FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls, i, var)
{
rtx rtl = DECL_RTL_IF_SET (var);
@@ -1742,7 +1734,7 @@ expand_used_vars (void)
if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
add_local_decl (cfun, var);
}
- VEC_free (tree, heap, maybe_local_decls);
+ maybe_local_decls.release ();
/* If the target requires that FRAME_OFFSET be aligned, do it. */
if (STACK_ALIGNMENT_NEEDED)
@@ -2104,12 +2096,12 @@ expand_call_stmt (gimple stmt)
/* Ensure RTL is created for debug args. */
if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
{
- VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
+ vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
unsigned int ix;
tree dtemp;
if (debug_args)
- for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
+ for (ix = 1; (*debug_args)->iterate (ix, &dtemp); ix += 2)
{
gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
expand_debug_expr (dtemp);
@@ -3657,13 +3649,13 @@ expand_debug_source_expr (tree exp)
if (DECL_CONTEXT (aexp)
== DECL_ABSTRACT_ORIGIN (current_function_decl))
{
- VEC(tree, gc) **debug_args;
+ vec<tree, va_gc> **debug_args;
unsigned int ix;
tree ddecl;
debug_args = decl_debug_args_lookup (current_function_decl);
if (debug_args != NULL)
{
- for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
+ for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl);
ix += 2)
if (ddecl == aexp)
return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
diff --git a/gcc/cfghooks.c b/gcc/cfghooks.c
index 39f2f93d9be..dfeff083123 100644
--- a/gcc/cfghooks.c
+++ b/gcc/cfghooks.c
@@ -814,11 +814,12 @@ make_forwarder_block (basic_block bb, bool (*redirect_edge_p) (edge),
if (dom_info_available_p (CDI_DOMINATORS))
{
- VEC (basic_block, heap) *doms_to_fix = VEC_alloc (basic_block, heap, 2);
- VEC_quick_push (basic_block, doms_to_fix, dummy);
- VEC_quick_push (basic_block, doms_to_fix, bb);
+ vec<basic_block> doms_to_fix;
+ doms_to_fix.create (2);
+ doms_to_fix.quick_push (dummy);
+ doms_to_fix.quick_push (bb);
iterate_fix_dominators (CDI_DOMINATORS, doms_to_fix, false);
- VEC_free (basic_block, heap, doms_to_fix);
+ doms_to_fix.release ();
}
if (current_loops != NULL)
@@ -1144,7 +1145,7 @@ bool
cfg_hook_duplicate_loop_to_header_edge (struct loop *loop, edge e,
unsigned int ndupl,
sbitmap wont_exit, edge orig,
- VEC (edge, heap) **to_remove,
+ vec<edge> *to_remove,
int flags)
{
gcc_assert (cfg_hooks->cfg_hook_duplicate_loop_to_header_edge);
diff --git a/gcc/cfghooks.h b/gcc/cfghooks.h
index 2806c4f27c5..cb81f5a0e2e 100644
--- a/gcc/cfghooks.h
+++ b/gcc/cfghooks.h
@@ -119,7 +119,7 @@ struct cfg_hooks
in loop versioning. */
bool (*cfg_hook_duplicate_loop_to_header_edge) (struct loop *, edge,
unsigned, sbitmap,
- edge, VEC (edge, heap) **,
+ edge, vec<edge> *,
int);
/* Add condition to new basic block and update CFG used in loop
@@ -186,7 +186,7 @@ extern bool cfg_hook_duplicate_loop_to_header_edge (struct loop *loop, edge,
unsigned int ndupl,
sbitmap wont_exit,
edge orig,
- VEC (edge, heap) **to_remove,
+ vec<edge> *to_remove,
int flags);
extern void lv_flush_pending_stmts (edge);
diff --git a/gcc/cfgloop.c b/gcc/cfgloop.c
index 5ab7c201688..8c5b45ad2ae 100644
--- a/gcc/cfgloop.c
+++ b/gcc/cfgloop.c
@@ -66,7 +66,7 @@ flow_loop_nested_p (const struct loop *outer, const struct loop *loop)
unsigned odepth = loop_depth (outer);
return (loop_depth (loop) > odepth
- && VEC_index (loop_p, loop->superloops, odepth) == outer);
+ && (*loop->superloops)[odepth] == outer);
}
/* Returns the loop such that LOOP is nested DEPTH (indexed from zero)
@@ -82,22 +82,22 @@ superloop_at_depth (struct loop *loop, unsigned depth)
if (depth == ldepth)
return loop;
- return VEC_index (loop_p, loop->superloops, depth);
+ return (*loop->superloops)[depth];
}
/* Returns the list of the latch edges of LOOP. */
-static VEC (edge, heap) *
+static vec<edge>
get_loop_latch_edges (const struct loop *loop)
{
edge_iterator ei;
edge e;
- VEC (edge, heap) *ret = NULL;
+ vec<edge> ret = vec<edge>();
FOR_EACH_EDGE (e, ei, loop->header->preds)
{
if (dominated_by_p (CDI_DOMINATORS, e->src, loop->header))
- VEC_safe_push (edge, heap, ret, e);
+ ret.safe_push (e);
}
return ret;
@@ -113,7 +113,7 @@ flow_loop_dump (const struct loop *loop, FILE *file,
{
basic_block *bbs;
unsigned i;
- VEC (edge, heap) *latches;
+ vec<edge> latches;
edge e;
if (! loop || ! loop->header)
@@ -128,9 +128,9 @@ flow_loop_dump (const struct loop *loop, FILE *file,
{
fprintf (file, "multiple latches:");
latches = get_loop_latch_edges (loop);
- FOR_EACH_VEC_ELT (edge, latches, i, e)
+ FOR_EACH_VEC_ELT (latches, i, e)
fprintf (file, " %d", e->src->index);
- VEC_free (edge, heap, latches);
+ latches.release ();
fprintf (file, "\n");
}
@@ -179,7 +179,7 @@ flow_loop_free (struct loop *loop)
{
struct loop_exit *exit, *next;
- VEC_free (loop_p, gc, loop->superloops);
+ vec_free (loop->superloops);
/* Break the list of the loop exit records. They will be freed when the
corresponding edge is rescanned or removed, and this avoids
@@ -207,7 +207,7 @@ flow_loops_free (struct loops *loops)
loop_p loop;
/* Free the loop descriptors. */
- FOR_EACH_VEC_ELT (loop_p, loops->larray, i, loop)
+ FOR_EACH_VEC_SAFE_ELT (loops->larray, i, loop)
{
if (!loop)
continue;
@@ -215,7 +215,7 @@ flow_loops_free (struct loops *loops)
flow_loop_free (loop);
}
- VEC_free (loop_p, gc, loops->larray);
+ vec_free (loops->larray);
}
}
@@ -225,7 +225,7 @@ flow_loops_free (struct loops *loops)
int
flow_loop_nodes_find (basic_block header, struct loop *loop)
{
- VEC (basic_block, heap) *stack = NULL;
+ vec<basic_block> stack = vec<basic_block>();
int num_nodes = 1;
edge latch;
edge_iterator latch_ei;
@@ -239,16 +239,16 @@ flow_loop_nodes_find (basic_block header, struct loop *loop)
continue;
num_nodes++;
- VEC_safe_push (basic_block, heap, stack, latch->src);
+ stack.safe_push (latch->src);
latch->src->loop_father = loop;
- while (!VEC_empty (basic_block, stack))
+ while (!stack.is_empty ())
{
basic_block node;
edge e;
edge_iterator ei;
- node = VEC_pop (basic_block, stack);
+ node = stack.pop ();
FOR_EACH_EDGE (e, ei, node->preds)
{
@@ -258,12 +258,12 @@ flow_loop_nodes_find (basic_block header, struct loop *loop)
{
ancestor->loop_father = loop;
num_nodes++;
- VEC_safe_push (basic_block, heap, stack, ancestor);
+ stack.safe_push (ancestor);
}
}
}
}
- VEC_free (basic_block, heap, stack);
+ stack.release ();
return num_nodes;
}
@@ -278,11 +278,11 @@ establish_preds (struct loop *loop, struct loop *father)
unsigned depth = loop_depth (father) + 1;
unsigned i;
- VEC_truncate (loop_p, loop->superloops, 0);
- VEC_reserve (loop_p, gc, loop->superloops, depth);
- FOR_EACH_VEC_ELT (loop_p, father->superloops, i, ploop)
- VEC_quick_push (loop_p, loop->superloops, ploop);
- VEC_quick_push (loop_p, loop->superloops, father);
+ loop->superloops = 0;
+ vec_alloc (loop->superloops, depth);
+ FOR_EACH_VEC_SAFE_ELT (father->superloops, i, ploop)
+ loop->superloops->quick_push (ploop);
+ loop->superloops->quick_push (father);
for (ploop = loop->inner; ploop; ploop = ploop->next)
establish_preds (ploop, loop);
@@ -320,7 +320,7 @@ flow_loop_tree_node_remove (struct loop *loop)
prev->next = loop->next;
}
- VEC_truncate (loop_p, loop->superloops, 0);
+ loop->superloops = NULL;
}
/* Allocates and returns new loop structure. */
@@ -346,7 +346,7 @@ init_loops_structure (struct loops *loops, unsigned num_loops)
struct loop *root;
memset (loops, 0, sizeof *loops);
- loops->larray = VEC_alloc (loop_p, gc, num_loops);
+ vec_alloc (loops->larray, num_loops);
/* Dummy loop containing whole function. */
root = alloc_loop ();
@@ -356,7 +356,7 @@ init_loops_structure (struct loops *loops, unsigned num_loops)
ENTRY_BLOCK_PTR->loop_father = root;
EXIT_BLOCK_PTR->loop_father = root;
- VEC_quick_push (loop_p, loops->larray, root);
+ loops->larray->quick_push (root);
loops->tree_root = root;
}
@@ -457,7 +457,7 @@ flow_loops_find (struct loops *loops)
header = BASIC_BLOCK (rc_order[b]);
loop = alloc_loop ();
- VEC_quick_push (loop_p, loops->larray, loop);
+ loops->larray->quick_push (loop);
loop->header = header;
loop->num = num_loops;
@@ -492,7 +492,7 @@ flow_loops_find (struct loops *loops)
sbitmap_free (headers);
loops->exits = NULL;
- return VEC_length (loop_p, loops->larray);
+ return loops->larray->length ();
}
/* Ratio of frequencies of edges so that one of more latch edges is
@@ -513,13 +513,13 @@ flow_loops_find (struct loops *loops)
derive the loop structure from it). */
static edge
-find_subloop_latch_edge_by_profile (VEC (edge, heap) *latches)
+find_subloop_latch_edge_by_profile (vec<edge> latches)
{
unsigned i;
edge e, me = NULL;
gcov_type mcount = 0, tcount = 0;
- FOR_EACH_VEC_ELT (edge, latches, i, e)
+ FOR_EACH_VEC_ELT (latches, i, e)
{
if (e->count > mcount)
{
@@ -553,9 +553,9 @@ find_subloop_latch_edge_by_profile (VEC (edge, heap) *latches)
another edge. */
static edge
-find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, VEC (edge, heap) *latches)
+find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, vec<edge> latches)
{
- edge e, latch = VEC_index (edge, latches, 0);
+ edge e, latch = latches[0];
unsigned i;
gimple phi;
gimple_stmt_iterator psi;
@@ -563,12 +563,12 @@ find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, VEC (edge, h
basic_block bb;
/* Find the candidate for the latch edge. */
- for (i = 1; VEC_iterate (edge, latches, i, e); i++)
+ for (i = 1; latches.iterate (i, &e); i++)
if (dominated_by_p (CDI_DOMINATORS, latch->src, e->src))
latch = e;
/* Verify that it dominates all the latch edges. */
- FOR_EACH_VEC_ELT (edge, latches, i, e)
+ FOR_EACH_VEC_ELT (latches, i, e)
if (!dominated_by_p (CDI_DOMINATORS, e->src, latch->src))
return NULL;
@@ -587,7 +587,7 @@ find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, VEC (edge, h
if (!bb || !flow_bb_inside_loop_p (loop, bb))
continue;
- FOR_EACH_VEC_ELT (edge, latches, i, e)
+ FOR_EACH_VEC_ELT (latches, i, e)
if (e != latch
&& PHI_ARG_DEF_FROM_EDGE (phi, e) == lop)
return NULL;
@@ -607,10 +607,10 @@ find_subloop_latch_edge_by_ivs (struct loop *loop ATTRIBUTE_UNUSED, VEC (edge, h
static edge
find_subloop_latch_edge (struct loop *loop)
{
- VEC (edge, heap) *latches = get_loop_latch_edges (loop);
+ vec<edge> latches = get_loop_latch_edges (loop);
edge latch = NULL;
- if (VEC_length (edge, latches) > 1)
+ if (latches.length () > 1)
{
latch = find_subloop_latch_edge_by_profile (latches);
@@ -622,7 +622,7 @@ find_subloop_latch_edge (struct loop *loop)
latch = find_subloop_latch_edge_by_ivs (loop, latches);
}
- VEC_free (edge, heap, latches);
+ latches.release ();
return latch;
}
@@ -671,21 +671,21 @@ form_subloop (struct loop *loop, edge latch)
static void
merge_latch_edges (struct loop *loop)
{
- VEC (edge, heap) *latches = get_loop_latch_edges (loop);
+ vec<edge> latches = get_loop_latch_edges (loop);
edge latch, e;
unsigned i;
- gcc_assert (VEC_length (edge, latches) > 0);
+ gcc_assert (latches.length () > 0);
- if (VEC_length (edge, latches) == 1)
- loop->latch = VEC_index (edge, latches, 0)->src;
+ if (latches.length () == 1)
+ loop->latch = latches[0]->src;
else
{
if (dump_file)
fprintf (dump_file, "Merged latch edges of loop %d\n", loop->num);
mfb_reis_set = pointer_set_create ();
- FOR_EACH_VEC_ELT (edge, latches, i, e)
+ FOR_EACH_VEC_ELT (latches, i, e)
pointer_set_insert (mfb_reis_set, e);
latch = make_forwarder_block (loop->header, mfb_redirect_edges_in_set,
NULL);
@@ -695,7 +695,7 @@ merge_latch_edges (struct loop *loop)
loop->latch = latch->src;
}
- VEC_free (edge, heap, latches);
+ latches.release ();
}
/* LOOP may have several latch edges. Transform it into (possibly several)
@@ -1114,10 +1114,10 @@ release_recorded_exits (void)
/* Returns the list of the exit edges of a LOOP. */
-VEC (edge, heap) *
+vec<edge>
get_loop_exit_edges (const struct loop *loop)
{
- VEC (edge, heap) *edges = NULL;
+ vec<edge> edges = vec<edge>();
edge e;
unsigned i;
basic_block *body;
@@ -1131,7 +1131,7 @@ get_loop_exit_edges (const struct loop *loop)
if (loops_state_satisfies_p (LOOPS_HAVE_RECORDED_EXITS))
{
for (exit = loop->exits->next; exit->e; exit = exit->next)
- VEC_safe_push (edge, heap, edges, exit->e);
+ edges.safe_push (exit->e);
}
else
{
@@ -1140,7 +1140,7 @@ get_loop_exit_edges (const struct loop *loop)
FOR_EACH_EDGE (e, ei, body[i]->succs)
{
if (!flow_bb_inside_loop_p (loop, e->dest))
- VEC_safe_push (edge, heap, edges, e);
+ edges.safe_push (e);
}
free (body);
}
@@ -1180,7 +1180,7 @@ add_bb_to_loop (basic_block bb, struct loop *loop)
gcc_assert (bb->loop_father == NULL);
bb->loop_father = loop;
loop->num_nodes++;
- FOR_EACH_VEC_ELT (loop_p, loop->superloops, i, ploop)
+ FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)
ploop->num_nodes++;
FOR_EACH_EDGE (e, ei, bb->succs)
@@ -1197,7 +1197,7 @@ add_bb_to_loop (basic_block bb, struct loop *loop)
void
remove_bb_from_loops (basic_block bb)
{
- int i;
+ unsigned i;
struct loop *loop = bb->loop_father;
loop_p ploop;
edge_iterator ei;
@@ -1205,7 +1205,7 @@ remove_bb_from_loops (basic_block bb)
gcc_assert (loop != NULL);
loop->num_nodes--;
- FOR_EACH_VEC_ELT (loop_p, loop->superloops, i, ploop)
+ FOR_EACH_VEC_SAFE_ELT (loop->superloops, i, ploop)
ploop->num_nodes--;
bb->loop_father = NULL;
@@ -1232,9 +1232,9 @@ find_common_loop (struct loop *loop_s, struct loop *loop_d)
ddepth = loop_depth (loop_d);
if (sdepth < ddepth)
- loop_d = VEC_index (loop_p, loop_d->superloops, sdepth);
+ loop_d = (*loop_d->superloops)[sdepth];
else if (sdepth > ddepth)
- loop_s = VEC_index (loop_p, loop_s->superloops, ddepth);
+ loop_s = (*loop_s->superloops)[ddepth];
while (loop_s != loop_d)
{
@@ -1253,7 +1253,7 @@ delete_loop (struct loop *loop)
flow_loop_tree_node_remove (loop);
/* Remove loop from loops array. */
- VEC_replace (loop_p, current_loops->larray, loop->num, NULL);
+ (*current_loops->larray)[loop->num] = NULL;
/* Free loop data. */
flow_loop_free (loop);
diff --git a/gcc/cfgloop.h b/gcc/cfgloop.h
index 5cd62b3d640..9e2e02de93d 100644
--- a/gcc/cfgloop.h
+++ b/gcc/cfgloop.h
@@ -22,7 +22,6 @@ along with GCC; see the file COPYING3. If not see
#define GCC_CFGLOOP_H
#include "basic-block.h"
-#include "vecprim.h"
#include "double-int.h"
#include "bitmap.h"
@@ -91,9 +90,6 @@ struct GTY (()) loop_exit {
};
typedef struct loop *loop_p;
-DEF_VEC_P (loop_p);
-DEF_VEC_ALLOC_P (loop_p, heap);
-DEF_VEC_ALLOC_P (loop_p, gc);
/* An integer estimation of the number of iterations. Estimate_state
describes what is the state of the estimation. */
@@ -129,7 +125,7 @@ struct GTY ((chain_next ("%h.next"))) loop {
unsigned num_nodes;
/* Superloops of the loop, starting with the outermost loop. */
- VEC (loop_p, gc) *superloops;
+ vec<loop_p, va_gc> *superloops;
/* The first inner (child) loop or NULL if innermost loop. */
struct loop *inner;
@@ -198,7 +194,7 @@ struct GTY (()) loops {
int state;
/* Array of the loops. */
- VEC (loop_p, gc) *larray;
+ vec<loop_p, va_gc> *larray;
/* Maps edges to the list of their descriptions as loop exits. Edges
whose sources or destinations have loop_father == NULL (which may
@@ -253,7 +249,7 @@ extern basic_block *get_loop_body_in_bfs_order (const struct loop *);
extern basic_block *get_loop_body_in_custom_order (const struct loop *,
int (*) (const void *, const void *));
-extern VEC (edge, heap) *get_loop_exit_edges (const struct loop *);
+extern vec<edge> get_loop_exit_edges (const struct loop *);
extern edge single_exit (const struct loop *);
extern edge single_likely_exit (struct loop *loop);
extern unsigned num_loop_branches (const struct loop *);
@@ -314,7 +310,7 @@ extern void copy_loop_info (struct loop *loop, struct loop *target);
extern void duplicate_subloops (struct loop *, struct loop *);
extern bool duplicate_loop_to_header_edge (struct loop *, edge,
unsigned, sbitmap, edge,
- VEC (edge, heap) **, int);
+ vec<edge> *, int);
extern struct loop *loopify (edge, edge,
basic_block, edge, edge, bool,
unsigned, unsigned);
@@ -434,7 +430,7 @@ simple_loop_desc (struct loop *loop)
static inline struct loop *
get_loop (unsigned num)
{
- return VEC_index (loop_p, current_loops->larray, num);
+ return (*current_loops->larray)[num];
}
/* Returns the number of superloops of LOOP. */
@@ -442,7 +438,7 @@ get_loop (unsigned num)
static inline unsigned
loop_depth (const struct loop *loop)
{
- return VEC_length (loop_p, loop->superloops);
+ return vec_safe_length (loop->superloops);
}
/* Returns the loop depth of the loop BB belongs to. */
@@ -459,12 +455,12 @@ bb_loop_depth (const_basic_block bb)
static inline struct loop *
loop_outer (const struct loop *loop)
{
- unsigned n = VEC_length (loop_p, loop->superloops);
+ unsigned n = vec_safe_length (loop->superloops);
if (n == 0)
return NULL;
- return VEC_index (loop_p, loop->superloops, n - 1);
+ return (*loop->superloops)[n - 1];
}
/* Returns true if LOOP has at least one exit edge. */
@@ -477,7 +473,7 @@ loop_has_exit_edges (const struct loop *loop)
/* Returns the list of loops in current_loops. */
-static inline VEC (loop_p, gc) *
+static inline vec<loop_p, va_gc> *
get_loops (void)
{
if (!current_loops)
@@ -495,7 +491,7 @@ number_of_loops (void)
if (!current_loops)
return 0;
- return VEC_length (loop_p, current_loops->larray);
+ return vec_safe_length (current_loops->larray);
}
/* Returns true if state of the loops satisfies all properties
@@ -542,7 +538,7 @@ enum li_flags
typedef struct
{
/* The list of loops to visit. */
- VEC(int,heap) *to_visit;
+ vec<int> to_visit;
/* The index of the actual loop. */
unsigned idx;
@@ -553,7 +549,7 @@ fel_next (loop_iterator *li, loop_p *loop)
{
int anum;
- while (VEC_iterate (int, li->to_visit, li->idx, anum))
+ while (li->to_visit.iterate (li->idx, &anum))
{
li->idx++;
*loop = get_loop (anum);
@@ -561,7 +557,7 @@ fel_next (loop_iterator *li, loop_p *loop)
return;
}
- VEC_free (int, heap, li->to_visit);
+ li->to_visit.release ();
*loop = NULL;
}
@@ -575,21 +571,21 @@ fel_init (loop_iterator *li, loop_p *loop, unsigned flags)
li->idx = 0;
if (!current_loops)
{
- li->to_visit = NULL;
+ li->to_visit.create (0);
*loop = NULL;
return;
}
- li->to_visit = VEC_alloc (int, heap, number_of_loops ());
+ li->to_visit.create (number_of_loops ());
mn = (flags & LI_INCLUDE_ROOT) ? 0 : 1;
if (flags & LI_ONLY_INNERMOST)
{
- for (i = 0; VEC_iterate (loop_p, current_loops->larray, i, aloop); i++)
+ for (i = 0; vec_safe_iterate (current_loops->larray, i, &aloop); i++)
if (aloop != NULL
&& aloop->inner == NULL
&& aloop->num >= mn)
- VEC_quick_push (int, li->to_visit, aloop->num);
+ li->to_visit.quick_push (aloop->num);
}
else if (flags & LI_FROM_INNERMOST)
{
@@ -602,7 +598,7 @@ fel_init (loop_iterator *li, loop_p *loop, unsigned flags)
while (1)
{
if (aloop->num >= mn)
- VEC_quick_push (int, li->to_visit, aloop->num);
+ li->to_visit.quick_push (aloop->num);
if (aloop->next)
{
@@ -624,7 +620,7 @@ fel_init (loop_iterator *li, loop_p *loop, unsigned flags)
while (1)
{
if (aloop->num >= mn)
- VEC_quick_push (int, li->to_visit, aloop->num);
+ li->to_visit.quick_push (aloop->num);
if (aloop->inner != NULL)
aloop = aloop->inner;
@@ -649,7 +645,7 @@ fel_init (loop_iterator *li, loop_p *loop, unsigned flags)
#define FOR_EACH_LOOP_BREAK(LI) \
{ \
- VEC_free (int, heap, (LI).to_visit); \
+ (LI).to_visit.release (); \
break; \
}
@@ -714,19 +710,18 @@ extern void doloop_optimize_loops (void);
extern void move_loop_invariants (void);
extern bool finite_loop_p (struct loop *);
extern void scale_loop_profile (struct loop *loop, int scale, int iteration_bound);
-extern VEC (basic_block, heap) * get_loop_hot_path (const struct loop *loop);
+extern vec<basic_block> get_loop_hot_path (const struct loop *loop);
/* Returns the outermost loop of the loop nest that contains LOOP.*/
static inline struct loop *
loop_outermost (struct loop *loop)
{
-
- unsigned n = VEC_length (loop_p, loop->superloops);
+ unsigned n = vec_safe_length (loop->superloops);
if (n <= 1)
return loop;
- return VEC_index (loop_p, loop->superloops, 1);
+ return (*loop->superloops)[1];
}
diff --git a/gcc/cfgloopanal.c b/gcc/cfgloopanal.c
index ba7c2626635..9b102849bd2 100644
--- a/gcc/cfgloopanal.c
+++ b/gcc/cfgloopanal.c
@@ -129,7 +129,7 @@ mark_irreducible_loops (void)
if (depth == loop_depth (act->loop_father))
cloop = act->loop_father;
else
- cloop = VEC_index (loop_p, act->loop_father->superloops, depth);
+ cloop = (*act->loop_father->superloops)[depth];
src = LOOP_REPR (cloop);
}
@@ -454,14 +454,14 @@ edge
single_likely_exit (struct loop *loop)
{
edge found = single_exit (loop);
- VEC (edge, heap) *exits;
+ vec<edge> exits;
unsigned i;
edge ex;
if (found)
return found;
exits = get_loop_exit_edges (loop);
- FOR_EACH_VEC_ELT (edge, exits, i, ex)
+ FOR_EACH_VEC_ELT (exits, i, ex)
{
if (ex->flags & (EDGE_EH | EDGE_ABNORMAL_CALL))
continue;
@@ -476,11 +476,11 @@ single_likely_exit (struct loop *loop)
found = ex;
else
{
- VEC_free (edge, heap, exits);
+ exits.release ();
return NULL;
}
}
- VEC_free (edge, heap, exits);
+ exits.release ();
return found;
}
@@ -489,11 +489,11 @@ single_likely_exit (struct loop *loop)
order against direction of edges from latch. Specially, if
header != latch, latch is the 1-st block. */
-VEC (basic_block, heap) *
+vec<basic_block>
get_loop_hot_path (const struct loop *loop)
{
basic_block bb = loop->header;
- VEC (basic_block, heap) *path = NULL;
+ vec<basic_block> path = vec<basic_block>();
bitmap visited = BITMAP_ALLOC (NULL);
while (true)
@@ -502,7 +502,7 @@ get_loop_hot_path (const struct loop *loop)
edge e;
edge best = NULL;
- VEC_safe_push (basic_block, heap, path, bb);
+ path.safe_push (bb);
bitmap_set_bit (visited, bb->index);
FOR_EACH_EDGE (e, ei, bb->succs)
if ((!best || e->probability > best->probability)
diff --git a/gcc/cfgloopmanip.c b/gcc/cfgloopmanip.c
index e6c6dbf5105..a1972ed84d0 100644
--- a/gcc/cfgloopmanip.c
+++ b/gcc/cfgloopmanip.c
@@ -119,11 +119,11 @@ fix_loop_placement (struct loop *loop)
{
unsigned i;
edge e;
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
struct loop *father = current_loops->tree_root, *act;
bool ret = false;
- FOR_EACH_VEC_ELT (edge, exits, i, e)
+ FOR_EACH_VEC_ELT (exits, i, e)
{
act = find_common_loop (loop, e->dest->loop_father);
if (flow_loop_nested_p (father, act))
@@ -139,13 +139,13 @@ fix_loop_placement (struct loop *loop)
/* The exit edges of LOOP no longer exits its original immediate
superloops; remove them from the appropriate exit lists. */
- FOR_EACH_VEC_ELT (edge, exits, i, e)
+ FOR_EACH_VEC_ELT (exits, i, e)
rescan_loop_exit (e, false, false);
ret = true;
}
- VEC_free (edge, heap, exits);
+ exits.release ();
return ret;
}
@@ -285,7 +285,7 @@ remove_path (edge e)
{
edge ae;
basic_block *rem_bbs, *bord_bbs, from, bb;
- VEC (basic_block, heap) *dom_bbs;
+ vec<basic_block> dom_bbs;
int i, nrem, n_bord_bbs;
sbitmap seen;
bool irred_invalidated = false;
@@ -354,7 +354,7 @@ remove_path (edge e)
/* Remove the path. */
from = e->src;
remove_branch (e);
- dom_bbs = NULL;
+ dom_bbs.create (0);
/* Cancel loops contained in the path. */
for (i = 0; i < nrem; i++)
@@ -379,14 +379,14 @@ remove_path (edge e)
ldom;
ldom = next_dom_son (CDI_DOMINATORS, ldom))
if (!dominated_by_p (CDI_DOMINATORS, from, ldom))
- VEC_safe_push (basic_block, heap, dom_bbs, ldom);
+ dom_bbs.safe_push (ldom);
}
free (seen);
/* Recount dominators. */
iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, true);
- VEC_free (basic_block, heap, dom_bbs);
+ dom_bbs.release ();
free (bord_bbs);
/* Fix placements of basic blocks inside loops and the placement of
@@ -407,7 +407,7 @@ static void
place_new_loop (struct loop *loop)
{
loop->num = number_of_loops ();
- VEC_safe_push (loop_p, gc, current_loops->larray, loop);
+ vec_safe_push (current_loops->larray, loop);
}
/* Given LOOP structure with filled header and latch, find the body of the
@@ -588,7 +588,7 @@ scale_loop_profile (struct loop *loop, int scale, int iteration_bound)
static void
update_dominators_in_loop (struct loop *loop)
{
- VEC (basic_block, heap) *dom_bbs = NULL;
+ vec<basic_block> dom_bbs = vec<basic_block>();
sbitmap seen;
basic_block *body;
unsigned i;
@@ -610,14 +610,14 @@ update_dominators_in_loop (struct loop *loop)
if (!bitmap_bit_p (seen, ldom->index))
{
bitmap_set_bit (seen, ldom->index);
- VEC_safe_push (basic_block, heap, dom_bbs, ldom);
+ dom_bbs.safe_push (ldom);
}
}
iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
free (body);
free (seen);
- VEC_free (basic_block, heap, dom_bbs);
+ dom_bbs.release ();
}
/* Creates an if region as shown above. CONDITION is used to create
@@ -1115,7 +1115,7 @@ set_zero_probability (edge e)
bool
duplicate_loop_to_header_edge (struct loop *loop, edge e,
unsigned int ndupl, sbitmap wont_exit,
- edge orig, VEC (edge, heap) **to_remove,
+ edge orig, vec<edge> *to_remove,
int flags)
{
struct loop *target, *aloop;
@@ -1345,7 +1345,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
if (orig && bitmap_bit_p (wont_exit, j + 1))
{
if (to_remove)
- VEC_safe_push (edge, heap, *to_remove, new_spec_edges[SE_ORIG]);
+ to_remove->safe_push (new_spec_edges[SE_ORIG]);
set_zero_probability (new_spec_edges[SE_ORIG]);
/* Scale the frequencies of the blocks dominated by the exit. */
@@ -1381,7 +1381,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
if (orig && bitmap_bit_p (wont_exit, 0))
{
if (to_remove)
- VEC_safe_push (edge, heap, *to_remove, orig);
+ to_remove->safe_push (orig);
set_zero_probability (orig);
/* Scale the frequencies of the blocks dominated by the exit. */
@@ -1408,14 +1408,14 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
for (i = 0; i < n; i++)
{
basic_block dominated, dom_bb;
- VEC (basic_block, heap) *dom_bbs;
+ vec<basic_block> dom_bbs;
unsigned j;
bb = bbs[i];
bb->aux = 0;
dom_bbs = get_dominated_by (CDI_DOMINATORS, bb);
- FOR_EACH_VEC_ELT (basic_block, dom_bbs, j, dominated)
+ FOR_EACH_VEC_ELT (dom_bbs, j, dominated)
{
if (flow_bb_inside_loop_p (loop, dominated))
continue;
@@ -1423,7 +1423,7 @@ duplicate_loop_to_header_edge (struct loop *loop, edge e,
CDI_DOMINATORS, first_active[i], first_active_latch);
set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
}
- VEC_free (basic_block, heap, dom_bbs);
+ dom_bbs.release ();
}
free (first_active);
diff --git a/gcc/cfgrtl.c b/gcc/cfgrtl.c
index 8855bdf9861..a5bb97462a6 100644
--- a/gcc/cfgrtl.c
+++ b/gcc/cfgrtl.c
@@ -350,10 +350,10 @@ rtl_create_basic_block (void *headp, void *endp, basic_block after)
basic_block bb;
/* Grow the basic block array if needed. */
- if ((size_t) last_basic_block >= VEC_length (basic_block, basic_block_info))
+ if ((size_t) last_basic_block >= basic_block_info->length ())
{
size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
- VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
+ vec_safe_grow_cleared (basic_block_info, new_size);
}
n_basic_blocks++;
@@ -1401,7 +1401,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
{
if (tmp == e)
{
- VEC_unordered_remove (edge, ENTRY_BLOCK_PTR->succs, ei.index);
+ ENTRY_BLOCK_PTR->succs->unordered_remove (ei.index);
found = true;
break;
}
@@ -1411,7 +1411,7 @@ force_nonfallthru_and_redirect (edge e, basic_block target, rtx jump_label)
gcc_assert (found);
- VEC_safe_push (edge, gc, bb->succs, e);
+ vec_safe_push (bb->succs, e);
make_single_succ_edge (ENTRY_BLOCK_PTR, bb, EDGE_FALLTHRU);
}
}
diff --git a/gcc/cgraph.c b/gcc/cgraph.c
index 1969576d00f..c53e51cebdf 100644
--- a/gcc/cgraph.c
+++ b/gcc/cgraph.c
@@ -1291,8 +1291,7 @@ cgraph_release_function_body (struct cgraph_node *node)
free_histograms ();
pop_cfun();
gimple_set_body (node->symbol.decl, NULL);
- VEC_free (ipa_opt_pass, heap,
- node->ipa_transforms_to_apply);
+ node->ipa_transforms_to_apply.release ();
/* Struct function hangs a lot of data that would leak if we didn't
removed all pointers to it. */
ggc_free (DECL_STRUCT_FUNCTION (node->symbol.decl));
@@ -1317,8 +1316,7 @@ cgraph_remove_node (struct cgraph_node *node)
cgraph_call_node_removal_hooks (node);
cgraph_node_remove_callers (node);
cgraph_node_remove_callees (node);
- VEC_free (ipa_opt_pass, heap,
- node->ipa_transforms_to_apply);
+ node->ipa_transforms_to_apply.release ();
/* Incremental inlining access removed nodes stored in the postorder list.
*/
@@ -2168,7 +2166,7 @@ cgraph_only_called_directly_p (struct cgraph_node *node)
static bool
collect_callers_of_node_1 (struct cgraph_node *node, void *data)
{
- VEC (cgraph_edge_p, heap) ** redirect_callers = (VEC (cgraph_edge_p, heap) **)data;
+ vec<cgraph_edge_p> *redirect_callers = (vec<cgraph_edge_p> *)data;
struct cgraph_edge *cs;
enum availability avail;
cgraph_function_or_thunk_node (node, &avail);
@@ -2176,17 +2174,17 @@ collect_callers_of_node_1 (struct cgraph_node *node, void *data)
if (avail > AVAIL_OVERWRITABLE)
for (cs = node->callers; cs != NULL; cs = cs->next_caller)
if (!cs->indirect_inlining_edge)
- VEC_safe_push (cgraph_edge_p, heap, *redirect_callers, cs);
+ redirect_callers->safe_push (cs);
return false;
}
/* Collect all callers of NODE and its aliases that are known to lead to NODE
(i.e. are not overwritable). */
-VEC (cgraph_edge_p, heap) *
+vec<cgraph_edge_p>
collect_callers_of_node (struct cgraph_node *node)
{
- VEC (cgraph_edge_p, heap) * redirect_callers = NULL;
+ vec<cgraph_edge_p> redirect_callers = vec<cgraph_edge_p>();
cgraph_for_node_and_aliases (node, collect_callers_of_node_1,
&redirect_callers, false);
return redirect_callers;
@@ -2229,9 +2227,8 @@ verify_edge_count_and_frequency (struct cgraph_edge *e)
/* FIXME: Inline-analysis sets frequency to 0 when edge is optimized out.
Remove this once edges are actually removed from the function at that time. */
&& (e->frequency
- || (inline_edge_summary_vec
- && ((VEC_length(inline_edge_summary_t, inline_edge_summary_vec)
- <= (unsigned) e->uid)
+ || (inline_edge_summary_vec.exists ()
+ && ((inline_edge_summary_vec.length () <= (unsigned) e->uid)
|| !inline_edge_summary (e)->predicate)))
&& (e->frequency
!= compute_call_stmt_bb_frequency (e->caller->symbol.decl,
diff --git a/gcc/cgraph.h b/gcc/cgraph.h
index 25c1f33eb57..28c3497c566 100644
--- a/gcc/cgraph.h
+++ b/gcc/cgraph.h
@@ -191,12 +191,10 @@ struct GTY(()) ipa_replace_map
bool ref_p;
};
typedef struct ipa_replace_map *ipa_replace_map_p;
-DEF_VEC_P(ipa_replace_map_p);
-DEF_VEC_ALLOC_P(ipa_replace_map_p,gc);
struct GTY(()) cgraph_clone_info
{
- VEC(ipa_replace_map_p,gc)* tree_map;
+ vec<ipa_replace_map_p, va_gc> *tree_map;
bitmap args_to_skip;
bitmap combined_args_to_skip;
};
@@ -238,7 +236,7 @@ struct GTY(()) cgraph_node {
/* Interprocedural passes scheduled to have their transform functions
applied next time we execute local pass on them. We maintain it
per-function in order to allow IPA passes to introduce new functions. */
- VEC(ipa_opt_pass,heap) * GTY((skip)) ipa_transforms_to_apply;
+ vec<ipa_opt_pass> GTY((skip)) ipa_transforms_to_apply;
struct cgraph_local_info local;
struct cgraph_global_info global;
@@ -284,15 +282,9 @@ struct GTY(()) cgraph_node {
unsigned dispatcher_function : 1;
};
-DEF_VEC_P(symtab_node);
-DEF_VEC_ALLOC_P(symtab_node,heap);
-DEF_VEC_ALLOC_P(symtab_node,gc);
typedef struct cgraph_node *cgraph_node_ptr;
-DEF_VEC_P(cgraph_node_ptr);
-DEF_VEC_ALLOC_P(cgraph_node_ptr,heap);
-DEF_VEC_ALLOC_P(cgraph_node_ptr,gc);
/* Function Multiversioning info. */
struct GTY(()) cgraph_function_version_info {
@@ -340,34 +332,25 @@ void delete_function_version (tree decl);
struct cgraph_node_set_def
{
struct pointer_map_t *map;
- VEC(cgraph_node_ptr, heap) *nodes;
+ vec<cgraph_node_ptr> nodes;
};
typedef struct varpool_node *varpool_node_ptr;
-DEF_VEC_P(varpool_node_ptr);
-DEF_VEC_ALLOC_P(varpool_node_ptr,heap);
-DEF_VEC_ALLOC_P(varpool_node_ptr,gc);
/* A varpool node set is a collection of varpool nodes. A varpool node
can appear in multiple sets. */
struct varpool_node_set_def
{
struct pointer_map_t * map;
- VEC(varpool_node_ptr, heap) *nodes;
+ vec<varpool_node_ptr> nodes;
};
typedef struct cgraph_node_set_def *cgraph_node_set;
-DEF_VEC_P(cgraph_node_set);
-DEF_VEC_ALLOC_P(cgraph_node_set,gc);
-DEF_VEC_ALLOC_P(cgraph_node_set,heap);
typedef struct varpool_node_set_def *varpool_node_set;
-DEF_VEC_P(varpool_node_set);
-DEF_VEC_ALLOC_P(varpool_node_set,gc);
-DEF_VEC_ALLOC_P(varpool_node_set,heap);
/* Iterator structure for cgraph node sets. */
typedef struct
@@ -462,8 +445,6 @@ struct GTY((chain_next ("%h.next_caller"), chain_prev ("%h.prev_caller"))) cgrap
typedef struct cgraph_edge *cgraph_edge_p;
-DEF_VEC_P(cgraph_edge_p);
-DEF_VEC_ALLOC_P(cgraph_edge_p,heap);
/* The varpool data structure.
Each static variable decl has assigned varpool_node. */
@@ -640,7 +621,7 @@ bool cgraph_for_node_thunks_and_aliases (struct cgraph_node *,
bool cgraph_for_node_and_aliases (struct cgraph_node *,
bool (*) (struct cgraph_node *, void *),
void *, bool);
-VEC (cgraph_edge_p, heap) * collect_callers_of_node (struct cgraph_node *node);
+vec<cgraph_edge_p> collect_callers_of_node (struct cgraph_node *node);
void verify_cgraph (void);
void verify_cgraph_node (struct cgraph_node *);
void cgraph_mark_address_taken_node (struct cgraph_node *);
@@ -691,12 +672,12 @@ struct cgraph_edge * cgraph_clone_edge (struct cgraph_edge *,
struct cgraph_node *, gimple,
unsigned, gcov_type, int, bool);
struct cgraph_node * cgraph_clone_node (struct cgraph_node *, tree, gcov_type,
- int, bool, VEC(cgraph_edge_p,heap) *,
+ int, bool, vec<cgraph_edge_p>,
bool);
tree clone_function_name (tree decl, const char *);
struct cgraph_node * cgraph_create_virtual_clone (struct cgraph_node *old_node,
- VEC(cgraph_edge_p,heap)*,
- VEC(ipa_replace_map_p,gc)* tree_map,
+ vec<cgraph_edge_p>,
+ vec<ipa_replace_map_p, va_gc> *tree_map,
bitmap args_to_skip,
const char *clone_name);
struct cgraph_node *cgraph_find_replacement_node (struct cgraph_node *);
@@ -708,13 +689,13 @@ void cgraph_create_edge_including_clones (struct cgraph_node *,
cgraph_inline_failed_t);
void cgraph_materialize_all_clones (void);
struct cgraph_node * cgraph_copy_node_for_versioning (struct cgraph_node *,
- tree, VEC(cgraph_edge_p,heap)*, bitmap);
+ tree, vec<cgraph_edge_p>, bitmap);
struct cgraph_node *cgraph_function_versioning (struct cgraph_node *,
- VEC(cgraph_edge_p,heap)*,
- VEC(ipa_replace_map_p,gc)*,
+ vec<cgraph_edge_p>,
+ vec<ipa_replace_map_p, va_gc> *,
bitmap, bool, bitmap,
basic_block, const char *);
-void tree_function_versioning (tree, tree, VEC (ipa_replace_map_p,gc)*,
+void tree_function_versioning (tree, tree, vec<ipa_replace_map_p, va_gc> *,
bool, bitmap, bool, bitmap, basic_block);
/* In cgraphbuild.c */
@@ -1051,7 +1032,7 @@ tree add_new_static_var (tree type);
static inline bool
csi_end_p (cgraph_node_set_iterator csi)
{
- return csi.index >= VEC_length (cgraph_node_ptr, csi.set->nodes);
+ return csi.index >= csi.set->nodes.length ();
}
/* Advance iterator CSI. */
@@ -1065,7 +1046,7 @@ csi_next (cgraph_node_set_iterator *csi)
static inline struct cgraph_node *
csi_node (cgraph_node_set_iterator csi)
{
- return VEC_index (cgraph_node_ptr, csi.set->nodes, csi.index);
+ return csi.set->nodes[csi.index];
}
/* Return an iterator to the first node in SET. */
@@ -1092,14 +1073,14 @@ cgraph_node_in_set_p (struct cgraph_node *node, cgraph_node_set set)
static inline size_t
cgraph_node_set_size (cgraph_node_set set)
{
- return VEC_length (cgraph_node_ptr, set->nodes);
+ return set->nodes.length ();
}
/* Return true if iterator VSI points to nothing. */
static inline bool
vsi_end_p (varpool_node_set_iterator vsi)
{
- return vsi.index >= VEC_length (varpool_node_ptr, vsi.set->nodes);
+ return vsi.index >= vsi.set->nodes.length ();
}
/* Advance iterator VSI. */
@@ -1113,7 +1094,7 @@ vsi_next (varpool_node_set_iterator *vsi)
static inline struct varpool_node *
vsi_node (varpool_node_set_iterator vsi)
{
- return VEC_index (varpool_node_ptr, vsi.set->nodes, vsi.index);
+ return vsi.set->nodes[vsi.index];
}
/* Return an iterator to the first node in SET. */
@@ -1140,7 +1121,7 @@ varpool_node_in_set_p (struct varpool_node *node, varpool_node_set set)
static inline size_t
varpool_node_set_size (varpool_node_set set)
{
- return VEC_length (varpool_node_ptr, set->nodes);
+ return set->nodes.length ();
}
/* Uniquize all constants that appear in memory.
@@ -1164,14 +1145,14 @@ struct GTY(()) constant_descriptor_tree {
static inline bool
cgraph_node_set_nonempty_p (cgraph_node_set set)
{
- return !VEC_empty (cgraph_node_ptr, set->nodes);
+ return !set->nodes.is_empty ();
}
/* Return true if set is nonempty. */
static inline bool
varpool_node_set_nonempty_p (varpool_node_set set)
{
- return !VEC_empty (varpool_node_ptr, set->nodes);
+ return !set->nodes.is_empty ();
}
/* Return true when function NODE is only called directly or it has alias.
diff --git a/gcc/cgraphclones.c b/gcc/cgraphclones.c
index 730e70b08e0..d5bc04e0b09 100644
--- a/gcc/cgraphclones.c
+++ b/gcc/cgraphclones.c
@@ -173,7 +173,7 @@ cgraph_clone_edge (struct cgraph_edge *e, struct cgraph_node *n,
struct cgraph_node *
cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
bool update_original,
- VEC(cgraph_edge_p,heap) *redirect_callers,
+ vec<cgraph_edge_p> redirect_callers,
bool call_duplication_hook)
{
struct cgraph_node *new_node = cgraph_create_empty_node ();
@@ -198,7 +198,7 @@ cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
new_node->count = count;
new_node->frequency = n->frequency;
new_node->clone = n->clone;
- new_node->clone.tree_map = 0;
+ new_node->clone.tree_map = NULL;
if (n->count)
{
if (new_node->count > n->count)
@@ -215,7 +215,7 @@ cgraph_clone_node (struct cgraph_node *n, tree decl, gcov_type count, int freq,
n->count = 0;
}
- FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
+ FOR_EACH_VEC_ELT (redirect_callers, i, e)
{
/* Redirect calls to the old version node to point to its new
version. */
@@ -276,8 +276,8 @@ clone_function_name (tree decl, const char *suffix)
*/
struct cgraph_node *
cgraph_create_virtual_clone (struct cgraph_node *old_node,
- VEC(cgraph_edge_p,heap) *redirect_callers,
- VEC(ipa_replace_map_p,gc) *tree_map,
+ vec<cgraph_edge_p> redirect_callers,
+ vec<ipa_replace_map_p, va_gc> *tree_map,
bitmap args_to_skip,
const char * suffix)
{
@@ -323,7 +323,7 @@ cgraph_create_virtual_clone (struct cgraph_node *old_node,
DECL_STATIC_DESTRUCTOR (new_node->symbol.decl) = 0;
new_node->clone.tree_map = tree_map;
new_node->clone.args_to_skip = args_to_skip;
- FOR_EACH_VEC_ELT (ipa_replace_map_p, tree_map, i, map)
+ FOR_EACH_VEC_SAFE_ELT (tree_map, i, map)
{
tree var = map->new_tree;
symtab_node ref_node;
@@ -615,7 +615,7 @@ update_call_expr (struct cgraph_node *new_version)
struct cgraph_node *
cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
tree new_decl,
- VEC(cgraph_edge_p,heap) *redirect_callers,
+ vec<cgraph_edge_p> redirect_callers,
bitmap bbs_to_copy)
{
struct cgraph_node *new_version;
@@ -648,7 +648,7 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
e->lto_stmt_uid, REG_BR_PROB_BASE,
CGRAPH_FREQ_BASE,
true);
- FOR_EACH_VEC_ELT (cgraph_edge_p, redirect_callers, i, e)
+ FOR_EACH_VEC_ELT (redirect_callers, i, e)
{
/* Redirect calls to the old version node to point to its new
version. */
@@ -682,8 +682,8 @@ cgraph_copy_node_for_versioning (struct cgraph_node *old_version,
struct cgraph_node *
cgraph_function_versioning (struct cgraph_node *old_version_node,
- VEC(cgraph_edge_p,heap) *redirect_callers,
- VEC (ipa_replace_map_p,gc)* tree_map,
+ vec<cgraph_edge_p> redirect_callers,
+ vec<ipa_replace_map_p, va_gc> *tree_map,
bitmap args_to_skip,
bool skip_return,
bitmap bbs_to_copy,
@@ -822,14 +822,12 @@ cgraph_materialize_all_clones (void)
{
unsigned int i;
fprintf (cgraph_dump_file, " replace map: ");
- for (i = 0; i < VEC_length (ipa_replace_map_p,
- node->clone.tree_map);
- i++)
+ for (i = 0;
+ i < vec_safe_length (node->clone.tree_map);
+ i++)
{
struct ipa_replace_map *replace_info;
- replace_info = VEC_index (ipa_replace_map_p,
- node->clone.tree_map,
- i);
+ replace_info = (*node->clone.tree_map)[i];
print_generic_expr (cgraph_dump_file, replace_info->old_tree, 0);
fprintf (cgraph_dump_file, " -> ");
print_generic_expr (cgraph_dump_file, replace_info->new_tree, 0);
diff --git a/gcc/cgraphunit.c b/gcc/cgraphunit.c
index e14e52ed668..4d508cb6b84 100644
--- a/gcc/cgraphunit.c
+++ b/gcc/cgraphunit.c
@@ -610,7 +610,7 @@ cgraph_analyze_function (struct cgraph_node *node)
input_location = saved_loc;
return;
}
- if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
+ if (!vec_safe_length (node->symbol.ref_list.references))
ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
IPA_REF_ALIAS, NULL);
if (node->same_body_alias)
@@ -695,7 +695,7 @@ cgraph_process_same_body_aliases (void)
struct cgraph_node *node;
FOR_EACH_FUNCTION (node)
if (node->same_body_alias
- && !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
+ && !vec_safe_length (node->symbol.ref_list.references))
{
struct cgraph_node *tgt = cgraph_get_node (node->thunk.alias);
ipa_record_reference ((symtab_node)node, (symtab_node)tgt,
@@ -1060,7 +1060,7 @@ handle_alias_pairs (void)
alias_pair *p;
unsigned i;
- for (i = 0; VEC_iterate (alias_pair, alias_pairs, i, p);)
+ for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
{
symtab_node target_node = symtab_node_for_asm (p->target);
@@ -1074,13 +1074,13 @@ handle_alias_pairs (void)
else
varpool_get_node (p->decl)->alias = true;
DECL_EXTERNAL (p->decl) = 1;
- VEC_unordered_remove (alias_pair, alias_pairs, i);
+ alias_pairs->unordered_remove (i);
continue;
}
else if (!target_node)
{
error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
- VEC_unordered_remove (alias_pair, alias_pairs, i);
+ alias_pairs->unordered_remove (i);
continue;
}
@@ -1113,13 +1113,13 @@ handle_alias_pairs (void)
if (src_node && src_node->local.finalized)
cgraph_reset_node (src_node);
cgraph_create_function_alias (p->decl, target_node->symbol.decl);
- VEC_unordered_remove (alias_pair, alias_pairs, i);
+ alias_pairs->unordered_remove (i);
}
else if (TREE_CODE (p->decl) == VAR_DECL
&& target_node && is_a <varpool_node> (target_node))
{
varpool_create_variable_alias (p->decl, target_node->symbol.decl);
- VEC_unordered_remove (alias_pair, alias_pairs, i);
+ alias_pairs->unordered_remove (i);
}
else
{
@@ -1127,10 +1127,10 @@ handle_alias_pairs (void)
p->decl);
warning (0, "%q+D aliased declaration",
target_node->symbol.decl);
- VEC_unordered_remove (alias_pair, alias_pairs, i);
+ alias_pairs->unordered_remove (i);
}
}
- VEC_free (alias_pair, gc, alias_pairs);
+ vec_free (alias_pairs);
}
@@ -1440,7 +1440,7 @@ assemble_thunk (struct cgraph_node *node)
int i;
tree resdecl;
tree restmp = NULL;
- VEC(tree, heap) *vargs;
+ vec<tree> vargs;
gimple call;
gimple ret;
@@ -1482,18 +1482,16 @@ assemble_thunk (struct cgraph_node *node)
for (arg = a; arg; arg = DECL_CHAIN (arg))
nargs++;
- vargs = VEC_alloc (tree, heap, nargs);
+ vargs.create (nargs);
if (this_adjusting)
- VEC_quick_push (tree, vargs,
- thunk_adjust (&bsi,
- a, 1, fixed_offset,
- virtual_offset));
+ vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
+ virtual_offset));
else
- VEC_quick_push (tree, vargs, a);
+ vargs.quick_push (a);
for (i = 1, arg = DECL_CHAIN (a); i < nargs; i++, arg = DECL_CHAIN (arg))
- VEC_quick_push (tree, vargs, arg);
+ vargs.quick_push (arg);
call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
- VEC_free (tree, heap, vargs);
+ vargs.release ();
gimple_call_set_from_thunk (call, true);
if (restmp)
gimple_call_set_lhs (call, restmp);
diff --git a/gcc/combine.c b/gcc/combine.c
index 4875c967f4f..5bf08ff2fb6 100644
--- a/gcc/combine.c
+++ b/gcc/combine.c
@@ -252,10 +252,8 @@ typedef struct reg_stat_struct {
ENUM_BITFIELD(machine_mode) truncated_to_mode : 8;
} reg_stat_type;
-DEF_VEC_O(reg_stat_type);
-DEF_VEC_ALLOC_O(reg_stat_type,heap);
-static VEC(reg_stat_type,heap) *reg_stat;
+static vec<reg_stat_type> reg_stat;
/* Record the luid of the last insn that invalidated memory
(anything that writes memory, and subroutine calls, but not pushes). */
@@ -510,8 +508,8 @@ combine_split_insns (rtx pattern, rtx insn)
ret = split_insns (pattern, insn);
nregs = max_reg_num ();
- if (nregs > VEC_length (reg_stat_type, reg_stat))
- VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
+ if (nregs > reg_stat.length ())
+ reg_stat.safe_grow_cleared (nregs);
return ret;
}
@@ -1121,7 +1119,7 @@ combine_instructions (rtx f, unsigned int nregs)
rtl_hooks = combine_rtl_hooks;
- VEC_safe_grow_cleared (reg_stat_type, heap, reg_stat, nregs);
+ reg_stat.safe_grow_cleared (nregs);
init_recog_no_volatile ();
@@ -1446,7 +1444,7 @@ combine_instructions (rtx f, unsigned int nregs)
obstack_free (&insn_link_obstack, NULL);
free (uid_log_links);
free (uid_insn_cost);
- VEC_free (reg_stat_type, heap, reg_stat);
+ reg_stat.release ();
{
struct undo *undo, *next;
@@ -1480,7 +1478,7 @@ init_reg_last (void)
unsigned int i;
reg_stat_type *p;
- FOR_EACH_VEC_ELT (reg_stat_type, reg_stat, i, p)
+ FOR_EACH_VEC_ELT (reg_stat, i, p)
memset (p, 0, offsetof (reg_stat_type, sign_bit_copies));
}
@@ -1587,7 +1585,7 @@ set_nonzero_bits_and_sign_copies (rtx x, const_rtx set, void *data)
(DF_LR_IN (ENTRY_BLOCK_PTR->next_bb), REGNO (x))
&& HWI_COMPUTABLE_MODE_P (GET_MODE (x)))
{
- reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ reg_stat_type *rsp = &reg_stat[REGNO (x)];
if (set == 0 || GET_CODE (set) == CLOBBER)
{
@@ -3637,22 +3635,18 @@ try_combine (rtx i3, rtx i2, rtx i1, rtx i0, int *new_direct_jump_p,
&& GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
&& ! (temp = SET_DEST (XVECEXP (newpat, 0, 1)),
(REG_P (temp)
- && VEC_index (reg_stat_type, reg_stat,
- REGNO (temp)).nonzero_bits != 0
+ && reg_stat[REGNO (temp)].nonzero_bits != 0
&& GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
&& GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
- && (VEC_index (reg_stat_type, reg_stat,
- REGNO (temp)).nonzero_bits
+ && (reg_stat[REGNO (temp)].nonzero_bits
!= GET_MODE_MASK (word_mode))))
&& ! (GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == SUBREG
&& (temp = SUBREG_REG (SET_DEST (XVECEXP (newpat, 0, 1))),
(REG_P (temp)
- && VEC_index (reg_stat_type, reg_stat,
- REGNO (temp)).nonzero_bits != 0
+ && reg_stat[REGNO (temp)].nonzero_bits != 0
&& GET_MODE_PRECISION (GET_MODE (temp)) < BITS_PER_WORD
&& GET_MODE_PRECISION (GET_MODE (temp)) < HOST_BITS_PER_INT
- && (VEC_index (reg_stat_type, reg_stat,
- REGNO (temp)).nonzero_bits
+ && (reg_stat[REGNO (temp)].nonzero_bits
!= GET_MODE_MASK (word_mode)))))
&& ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
SET_SRC (XVECEXP (newpat, 0, 1)))
@@ -9413,7 +9407,7 @@ reg_nonzero_bits_for_combine (const_rtx x, enum machine_mode mode,
value. Otherwise, use the previously-computed global nonzero bits
for this register. */
- rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ rsp = &reg_stat[REGNO (x)];
if (rsp->last_set_value != 0
&& (rsp->last_set_mode == mode
|| (GET_MODE_CLASS (rsp->last_set_mode) == MODE_INT
@@ -9482,7 +9476,7 @@ reg_num_sign_bit_copies_for_combine (const_rtx x, enum machine_mode mode,
rtx tem;
reg_stat_type *rsp;
- rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ rsp = &reg_stat[REGNO (x)];
if (rsp->last_set_value != 0
&& rsp->last_set_mode == mode
&& ((rsp->last_set_label >= label_tick_ebb_start
@@ -12046,7 +12040,7 @@ update_table_tick (rtx x)
for (r = regno; r < endregno; r++)
{
- reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, r);
+ reg_stat_type *rsp = &reg_stat[r];
rsp->last_set_table_tick = label_tick;
}
@@ -12148,7 +12142,7 @@ record_value_for_reg (rtx reg, rtx insn, rtx value)
register. */
for (i = regno; i < endregno; i++)
{
- rsp = &VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &reg_stat[i];
if (insn)
rsp->last_set = insn;
@@ -12174,7 +12168,7 @@ record_value_for_reg (rtx reg, rtx insn, rtx value)
for (i = regno; i < endregno; i++)
{
- rsp = &VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &reg_stat[i];
rsp->last_set_label = label_tick;
if (!insn
|| (value && rsp->last_set_table_tick >= label_tick_ebb_start))
@@ -12186,7 +12180,7 @@ record_value_for_reg (rtx reg, rtx insn, rtx value)
/* The value being assigned might refer to X (like in "x++;"). In that
case, we must replace it with (clobber (const_int 0)) to prevent
infinite loops. */
- rsp = &VEC_index (reg_stat_type, reg_stat, regno);
+ rsp = &reg_stat[regno];
if (value && !get_last_value_validate (&value, insn, label_tick, 0))
{
value = copy_rtx (value);
@@ -12284,7 +12278,7 @@ record_dead_and_set_regs (rtx insn)
{
reg_stat_type *rsp;
- rsp = &VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &reg_stat[i];
rsp->last_death = insn;
}
}
@@ -12299,7 +12293,7 @@ record_dead_and_set_regs (rtx insn)
{
reg_stat_type *rsp;
- rsp = &VEC_index (reg_stat_type, reg_stat, i);
+ rsp = &reg_stat[i];
rsp->last_set_invalid = 1;
rsp->last_set = insn;
rsp->last_set_value = 0;
@@ -12357,7 +12351,7 @@ record_promoted_value (rtx insn, rtx subreg)
continue;
}
- rsp = &VEC_index (reg_stat_type, reg_stat, regno);
+ rsp = &reg_stat[regno];
if (rsp->last_set == insn)
{
if (SUBREG_PROMOTED_UNSIGNED_P (subreg) > 0)
@@ -12382,7 +12376,7 @@ record_promoted_value (rtx insn, rtx subreg)
static bool
reg_truncated_to_mode (enum machine_mode mode, const_rtx x)
{
- reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ reg_stat_type *rsp = &reg_stat[REGNO (x)];
enum machine_mode truncated = rsp->truncated_to_mode;
if (truncated == 0
@@ -12427,7 +12421,7 @@ record_truncated_value (rtx *p, void *data ATTRIBUTE_UNUSED)
else
return 0;
- rsp = &VEC_index (reg_stat_type, reg_stat, REGNO (x));
+ rsp = &reg_stat[REGNO (x)];
if (rsp->truncated_to_mode == 0
|| rsp->truncation_label < label_tick_ebb_start
|| (GET_MODE_SIZE (truncated_mode)
@@ -12506,7 +12500,7 @@ get_last_value_validate (rtx *loc, rtx insn, int tick, int replace)
for (j = regno; j < endregno; j++)
{
- reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, j);
+ reg_stat_type *rsp = &reg_stat[j];
if (rsp->last_set_invalid
/* If this is a pseudo-register that was only set once and not
live at the beginning of the function, it is always valid. */
@@ -12610,7 +12604,7 @@ get_last_value (const_rtx x)
return 0;
regno = REGNO (x);
- rsp = &VEC_index (reg_stat_type, reg_stat, regno);
+ rsp = &reg_stat[regno];
value = rsp->last_set_value;
/* If we don't have a value, or if it isn't for this basic block and
@@ -12674,7 +12668,7 @@ use_crosses_set_p (const_rtx x, int from_luid)
#endif
for (; regno < endreg; regno++)
{
- reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, regno);
+ reg_stat_type *rsp = &reg_stat[regno];
if (rsp->last_set
&& rsp->last_set_label == label_tick
&& DF_INSN_LUID (rsp->last_set) > from_luid)
@@ -12920,7 +12914,7 @@ move_deaths (rtx x, rtx maybe_kill_insn, int from_luid, rtx to_insn,
if (code == REG)
{
unsigned int regno = REGNO (x);
- rtx where_dead = VEC_index (reg_stat_type, reg_stat, regno).last_death;
+ rtx where_dead = reg_stat[regno].last_death;
/* Don't move the register if it gets killed in between from and to. */
if (maybe_kill_insn && reg_set_p (x, maybe_kill_insn)
@@ -13535,7 +13529,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
if (place && REG_NOTE_KIND (note) == REG_DEAD)
{
unsigned int regno = REGNO (XEXP (note, 0));
- reg_stat_type *rsp = &VEC_index (reg_stat_type, reg_stat, regno);
+ reg_stat_type *rsp = &reg_stat[regno];
if (dead_or_set_p (place, XEXP (note, 0))
|| reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
diff --git a/gcc/compare-elim.c b/gcc/compare-elim.c
index 1761d0d2aff..cc1502d8162 100644
--- a/gcc/compare-elim.c
+++ b/gcc/compare-elim.c
@@ -120,10 +120,8 @@ struct comparison
};
typedef struct comparison *comparison_struct_p;
-DEF_VEC_P(comparison_struct_p);
-DEF_VEC_ALLOC_P(comparison_struct_p, heap);
-static VEC(comparison_struct_p, heap) *all_compares;
+static vec<comparison_struct_p> all_compares;
/* Look for a "conforming" comparison, as defined above. If valid, return
the rtx for the COMPARE itself. */
@@ -337,7 +335,7 @@ find_comparisons_in_bb (struct dom_walk_data *data ATTRIBUTE_UNUSED,
last_cmp->in_a = XEXP (src, 0);
last_cmp->in_b = XEXP (src, 1);
last_cmp->orig_mode = src_mode;
- VEC_safe_push (comparison_struct_p, heap, all_compares, last_cmp);
+ all_compares.safe_push (last_cmp);
/* It's unusual, but be prepared for comparison patterns that
also clobber an input, or perhaps a scratch. */
@@ -623,24 +621,23 @@ execute_compare_elim_after_reload (void)
{
df_analyze ();
- gcc_checking_assert (all_compares == NULL);
+ gcc_checking_assert (!all_compares.exists ());
/* Locate all comparisons and their uses, and eliminate duplicates. */
find_comparisons ();
- if (all_compares)
+ if (all_compares.exists ())
{
struct comparison *cmp;
size_t i;
/* Eliminate comparisons that are redundant with flags computation. */
- FOR_EACH_VEC_ELT (comparison_struct_p, all_compares, i, cmp)
+ FOR_EACH_VEC_ELT (all_compares, i, cmp)
{
try_eliminate_compare (cmp);
XDELETE (cmp);
}
- VEC_free (comparison_struct_p, heap, all_compares);
- all_compares = NULL;
+ all_compares.release ();
}
return 0;
diff --git a/gcc/config/bfin/bfin.c b/gcc/config/bfin/bfin.c
index 2c01cf7a092..f2d8473946e 100644
--- a/gcc/config/bfin/bfin.c
+++ b/gcc/config/bfin/bfin.c
@@ -3480,8 +3480,8 @@ hwloop_optimize (hwloop_info loop)
insn = BB_END (loop->incoming_src);
/* If we have to insert the LSETUP before a jump, count that jump in the
length. */
- if (VEC_length (edge, loop->incoming) > 1
- || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
+ if (vec_safe_length (loop->incoming) > 1
+ || !(loop->incoming->last ()->flags & EDGE_FALLTHRU))
{
gcc_assert (JUMP_P (insn));
insn = PREV_INSN (insn);
@@ -3749,8 +3749,8 @@ hwloop_optimize (hwloop_info loop)
if (loop->incoming_src)
{
rtx prev = BB_END (loop->incoming_src);
- if (VEC_length (edge, loop->incoming) > 1
- || !(VEC_last (edge, loop->incoming)->flags & EDGE_FALLTHRU))
+ if (vec_safe_length (loop->incoming) > 1
+ || !(loop->incoming->last ()->flags & EDGE_FALLTHRU))
{
gcc_assert (JUMP_P (prev));
prev = PREV_INSN (prev);
diff --git a/gcc/config/c6x/c6x.c b/gcc/config/c6x/c6x.c
index 32807d8eaca..175e119b688 100644
--- a/gcc/config/c6x/c6x.c
+++ b/gcc/config/c6x/c6x.c
@@ -119,14 +119,12 @@ typedef struct
unsigned int unit_mask;
} c6x_sched_insn_info;
-DEF_VEC_O(c6x_sched_insn_info);
-DEF_VEC_ALLOC_O(c6x_sched_insn_info, heap);
/* Record a c6x_sched_insn_info structure for every insn in the function. */
-static VEC(c6x_sched_insn_info, heap) *insn_info;
+static vec<c6x_sched_insn_info> insn_info;
-#define INSN_INFO_LENGTH (VEC_length (c6x_sched_insn_info, insn_info))
-#define INSN_INFO_ENTRY(N) (VEC_index (c6x_sched_insn_info, insn_info, (N)))
+#define INSN_INFO_LENGTH (insn_info).length ()
+#define INSN_INFO_ENTRY(N) (insn_info[(N)])
static bool done_cfi_sections;
@@ -1971,7 +1969,7 @@ c6x_get_unit_specifier (rtx insn)
{
enum attr_units units;
- if (insn_info)
+ if (insn_info.exists ())
{
int unit = INSN_INFO_ENTRY (INSN_UID (insn)).reservation;
return c6x_unit_names[unit][0];
@@ -2023,7 +2021,7 @@ c6x_print_unit_specifier_field (FILE *file, rtx insn)
return;
}
- if (insn_info)
+ if (insn_info.exists ())
{
int unit = INSN_INFO_ENTRY (INSN_UID (insn)).reservation;
fputs (".", file);
@@ -3422,7 +3420,7 @@ try_rename_operands (rtx head, rtx tail, unit_req_table reqs, rtx insn,
int i;
unsigned tmp_mask;
int best_reg, old_reg;
- VEC (du_head_p, heap) *involved_chains = NULL;
+ vec<du_head_p> involved_chains = vec<du_head_p>();
unit_req_table new_reqs;
for (i = 0, tmp_mask = op_mask; tmp_mask; i++)
@@ -3433,14 +3431,14 @@ try_rename_operands (rtx head, rtx tail, unit_req_table reqs, rtx insn,
if (info->op_info[i].n_chains != 1)
goto out_fail;
op_chain = regrename_chain_from_id (info->op_info[i].heads[0]->id);
- VEC_safe_push (du_head_p, heap, involved_chains, op_chain);
+ involved_chains.safe_push (op_chain);
tmp_mask &= ~(1 << i);
}
- if (VEC_length (du_head_p, involved_chains) > 1)
+ if (involved_chains.length () > 1)
goto out_fail;
- this_head = VEC_index (du_head_p, involved_chains, 0);
+ this_head = involved_chains[0];
if (this_head->cannot_rename)
goto out_fail;
@@ -3448,8 +3446,7 @@ try_rename_operands (rtx head, rtx tail, unit_req_table reqs, rtx insn,
{
unsigned int mask1, mask2, mask_changed;
int count, side1, side2, req1, req2;
- insn_rr_info *this_rr = &VEC_index (insn_rr_info, insn_rr,
- INSN_UID (chain->insn));
+ insn_rr_info *this_rr = &insn_rr[INSN_UID (chain->insn)];
count = get_unit_reqs (chain->insn, &req1, &side1, &req2, &side2);
@@ -3508,7 +3505,7 @@ try_rename_operands (rtx head, rtx tail, unit_req_table reqs, rtx insn,
memcpy (reqs, new_reqs, sizeof (unit_req_table));
out_fail:
- VEC_free (du_head_p, heap, involved_chains);
+ involved_chains.release ();
}
/* Find insns in LOOP which would, if shifted to the other side
@@ -3555,7 +3552,7 @@ reshuffle_units (basic_block loop)
if (!get_unit_operand_masks (insn, &mask1, &mask2))
continue;
- info = &VEC_index (insn_rr_info, insn_rr, INSN_UID (insn));
+ info = &insn_rr[INSN_UID (insn)];
if (info->op_info == NULL)
continue;
@@ -3707,7 +3704,7 @@ insn_set_clock (rtx insn, int cycle)
unsigned uid = INSN_UID (insn);
if (uid >= INSN_INFO_LENGTH)
- VEC_safe_grow (c6x_sched_insn_info, heap, insn_info, uid * 5 / 4 + 10);
+ insn_info.safe_grow (uid * 5 / 4 + 10);
INSN_INFO_ENTRY (uid).clock = cycle;
INSN_INFO_ENTRY (uid).new_cond = NULL;
@@ -4361,7 +4358,7 @@ c6x_variable_issue (FILE *dump ATTRIBUTE_UNUSED,
ss.last_scheduled_iter0 = insn;
if (GET_CODE (PATTERN (insn)) != USE && GET_CODE (PATTERN (insn)) != CLOBBER)
ss.issued_this_cycle++;
- if (insn_info)
+ if (insn_info.exists ())
{
state_t st_after = alloca (dfa_state_size);
int curr_clock = ss.curr_sched_clock;
@@ -5537,7 +5534,7 @@ hwloop_optimize (hwloop_info loop)
gcc_assert (loop->incoming_dest == loop->head);
entry_edge = NULL;
- FOR_EACH_VEC_ELT (edge, loop->incoming, i, entry_edge)
+ FOR_EACH_VEC_SAFE_ELT (loop->incoming, i, entry_edge)
if (entry_edge->flags & EDGE_FALLTHRU)
break;
if (entry_edge == NULL)
@@ -5777,7 +5774,7 @@ hwloop_optimize (hwloop_info loop)
seq = get_insns ();
- if (!single_succ_p (entry_bb) || VEC_length (edge, loop->incoming) > 1)
+ if (!single_succ_p (entry_bb) || vec_safe_length (loop->incoming) > 1)
{
basic_block new_bb;
edge e;
@@ -5809,7 +5806,7 @@ hwloop_optimize (hwloop_info loop)
end_sequence ();
/* Make sure we don't try to schedule this loop again. */
- for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
+ for (ix = 0; loop->blocks.iterate (ix, &bb); ix++)
bb->flags |= BB_DISABLE_SCHEDULE;
return true;
@@ -5928,7 +5925,7 @@ c6x_reorg (void)
{
int sz = get_max_uid () * 3 / 2 + 1;
- insn_info = VEC_alloc (c6x_sched_insn_info, heap, sz);
+ insn_info.create (sz);
}
/* Make sure the real-jump insns we create are not deleted. When modulo-
@@ -5993,9 +5990,7 @@ c6x_function_end (FILE *file, const char *fname)
{
c6x_output_fn_unwind (file);
- if (insn_info)
- VEC_free (c6x_sched_insn_info, heap, insn_info);
- insn_info = NULL;
+ insn_info.release ();
if (!flag_inhibit_size_directive)
ASM_OUTPUT_MEASURED_SIZE (file, fname);
diff --git a/gcc/config/darwin.c b/gcc/config/darwin.c
index e56205b5113..6645270a380 100644
--- a/gcc/config/darwin.c
+++ b/gcc/config/darwin.c
@@ -1791,10 +1791,8 @@ static unsigned int lto_section_num = 0;
typedef struct GTY (()) darwin_lto_section_e {
const char *sectname;
} darwin_lto_section_e ;
-DEF_VEC_O(darwin_lto_section_e);
-DEF_VEC_ALLOC_O(darwin_lto_section_e, gc);
-static GTY (()) VEC (darwin_lto_section_e, gc) * lto_section_names;
+static GTY (()) vec<darwin_lto_section_e, va_gc> *lto_section_names;
/* Segment for LTO data. */
#define LTO_SEGMENT_NAME "__GNU_LTO"
@@ -1877,8 +1875,8 @@ darwin_asm_named_section (const char *name,
TODO: check that we do not revisit sections, that would break
the assumption of how this is done. */
if (lto_section_names == NULL)
- lto_section_names = VEC_alloc (darwin_lto_section_e, gc, 16);
- VEC_safe_push (darwin_lto_section_e, gc, lto_section_names, e);
+ vec_alloc (lto_section_names, 16);
+ vec_safe_push (lto_section_names, e);
}
else if (strncmp (name, "__DWARF,", 8) == 0)
darwin_asm_dwarf_section (name, flags, decl);
@@ -2635,7 +2633,7 @@ darwin_assemble_visibility (tree decl, int vis)
"not supported in this configuration; ignored");
}
-/* VEC Used by darwin_asm_dwarf_section.
+/* vec used by darwin_asm_dwarf_section.
Maybe a hash tab would be better here - but the intention is that this is
a very short list (fewer than 16 items) and each entry should (ideally,
eventually) only be presented once.
@@ -2648,11 +2646,9 @@ typedef struct GTY(()) dwarf_sect_used_entry {
}
dwarf_sect_used_entry;
-DEF_VEC_O(dwarf_sect_used_entry);
-DEF_VEC_ALLOC_O(dwarf_sect_used_entry, gc);
/* A list of used __DWARF sections. */
-static GTY (()) VEC (dwarf_sect_used_entry, gc) * dwarf_sect_names_table;
+static GTY (()) vec<dwarf_sect_used_entry, va_gc> *dwarf_sect_names_table;
/* This is called when we are asked to assemble a named section and the
name begins with __DWARF,. We keep a list of the section names (without
@@ -2675,10 +2671,10 @@ darwin_asm_dwarf_section (const char *name, unsigned int flags,
namelen = strchr (sname, ',') - sname;
gcc_assert (namelen);
if (dwarf_sect_names_table == NULL)
- dwarf_sect_names_table = VEC_alloc (dwarf_sect_used_entry, gc, 16);
+ vec_alloc (dwarf_sect_names_table, 16);
else
for (i = 0;
- VEC_iterate (dwarf_sect_used_entry, dwarf_sect_names_table, i, ref);
+ dwarf_sect_names_table->iterate (i, &ref);
i++)
{
if (!ref)
@@ -2698,7 +2694,7 @@ darwin_asm_dwarf_section (const char *name, unsigned int flags,
fprintf (asm_out_file, "Lsection%.*s:\n", namelen, sname);
e.count = 1;
e.name = xstrdup (sname);
- VEC_safe_push (dwarf_sect_used_entry, gc, dwarf_sect_names_table, e);
+ vec_safe_push (dwarf_sect_names_table, e);
}
}
@@ -2813,7 +2809,7 @@ darwin_file_end (void)
}
/* Output the names and indices. */
- if (lto_section_names && VEC_length (darwin_lto_section_e, lto_section_names))
+ if (lto_section_names && lto_section_names->length ())
{
int count;
darwin_lto_section_e *ref;
@@ -2824,7 +2820,7 @@ darwin_file_end (void)
/* Emit the names. */
fprintf (asm_out_file, "\t.section %s,%s,regular,debug\n",
LTO_SEGMENT_NAME, LTO_NAMES_SECTION);
- FOR_EACH_VEC_ELT (darwin_lto_section_e, lto_section_names, count, ref)
+ FOR_EACH_VEC_ELT (*lto_section_names, count, ref)
{
fprintf (asm_out_file, "L_GNU_LTO_NAME%d:\n", count);
/* We have to jump through hoops to get the values of the intra-section
@@ -2847,7 +2843,7 @@ darwin_file_end (void)
fputs ("\t.align\t2\n", asm_out_file);
fputs ("# Section offset, Section length, Name offset, Name length\n",
asm_out_file);
- FOR_EACH_VEC_ELT (darwin_lto_section_e, lto_section_names, count, ref)
+ FOR_EACH_VEC_ELT (*lto_section_names, count, ref)
{
fprintf (asm_out_file, "%s L$gnu$lto$offs%d\t;# %s\n",
op, count, ref->sectname);
@@ -3338,7 +3334,7 @@ darwin_build_constant_cfstring (tree str)
if (!desc)
{
tree var, constructor, field;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int length = TREE_STRING_LENGTH (str) - 1;
if (darwin_warn_nonportable_cfstrings)
diff --git a/gcc/config/epiphany/resolve-sw-modes.c b/gcc/config/epiphany/resolve-sw-modes.c
index ec0dfcc00ca..97045537497 100644
--- a/gcc/config/epiphany/resolve-sw-modes.c
+++ b/gcc/config/epiphany/resolve-sw-modes.c
@@ -56,12 +56,12 @@ resolve_sw_modes (void)
{
basic_block bb;
rtx insn, src;
- VEC (basic_block, heap) *todo;
+ vec<basic_block> todo;
sbitmap pushed;
bool need_commit = false;
bool finalize_fp_sets = (MACHINE_FUNCTION (cfun)->unknown_mode_sets == 0);
- todo = VEC_alloc (basic_block, heap, last_basic_block);
+ todo.create (last_basic_block);
pushed = sbitmap_alloc (last_basic_block);
bitmap_clear (pushed);
if (!finalize_fp_sets)
@@ -98,7 +98,7 @@ resolve_sw_modes (void)
checking the total frequency of the affected edges. */
selected_mode = (enum attr_fp_mode) epiphany_normal_fp_rounding;
- VEC_quick_push (basic_block, todo, bb);
+ todo.quick_push (bb);
bitmap_set_bit (pushed, bb->index);
}
XVECEXP (XEXP (src, 0), 0, 0) = GEN_INT (selected_mode);
@@ -106,9 +106,9 @@ resolve_sw_modes (void)
SET_SRC (XVECEXP (PATTERN (insn), 0, 2)) = copy_rtx (src);
df_insn_rescan (insn);
}
- while (VEC_length (basic_block, todo))
+ while (todo.length ())
{
- basic_block bb = VEC_pop (basic_block, todo);
+ basic_block bb = todo.pop ();
int selected_reg, jilted_reg;
enum attr_fp_mode jilted_mode;
edge e;
@@ -141,7 +141,7 @@ resolve_sw_modes (void)
{
if (bitmap_bit_p (pushed, succ->index))
continue;
- VEC_quick_push (basic_block, todo, succ);
+ todo.quick_push (succ);
bitmap_set_bit (pushed, bb->index);
continue;
}
@@ -154,7 +154,7 @@ resolve_sw_modes (void)
insert_insn_on_edge (seq, e);
}
}
- VEC_free (basic_block, heap, todo);
+ todo.release ();
sbitmap_free (pushed);
if (need_commit)
commit_edge_insertions ();
diff --git a/gcc/config/i386/i386.c b/gcc/config/i386/i386.c
index 03c6675ba14..4bc105dc856 100644
--- a/gcc/config/i386/i386.c
+++ b/gcc/config/i386/i386.c
@@ -28737,7 +28737,7 @@ dispatch_function_versions (tree dispatch_decl,
gimple_seq gseq;
int ix;
tree ele;
- VEC (tree, heap) *fndecls;
+ vec<tree> *fndecls;
unsigned int num_versions = 0;
unsigned int actual_versions = 0;
unsigned int i;
@@ -28754,17 +28754,17 @@ dispatch_function_versions (tree dispatch_decl,
&& empty_bb != NULL);
/*fndecls_p is actually a vector. */
- fndecls = (VEC (tree, heap) *)fndecls_p;
+ fndecls = static_cast<vec<tree> *> (fndecls_p);
/* At least one more version other than the default. */
- num_versions = VEC_length (tree, fndecls);
+ num_versions = fndecls->length ();
gcc_assert (num_versions >= 2);
function_version_info = (struct _function_version_info *)
XNEWVEC (struct _function_version_info, (num_versions - 1));
/* The first version in the vector is the default decl. */
- default_decl = VEC_index (tree, fndecls, 0);
+ default_decl = (*fndecls)[0];
push_cfun (DECL_STRUCT_FUNCTION (dispatch_decl));
@@ -28772,7 +28772,7 @@ dispatch_function_versions (tree dispatch_decl,
/* Function version dispatch is via IFUNC. IFUNC resolvers fire before
constructors, so explicity call __builtin_cpu_init here. */
ifunc_cpu_init_stmt = gimple_build_call_vec (
- ix86_builtins [(int) IX86_BUILTIN_CPU_INIT], NULL);
+ ix86_builtins [(int) IX86_BUILTIN_CPU_INIT], vec<tree>());
gimple_seq_add_stmt (&gseq, ifunc_cpu_init_stmt);
gimple_set_bb (ifunc_cpu_init_stmt, *empty_bb);
set_bb_seq (*empty_bb, gseq);
@@ -28780,7 +28780,7 @@ dispatch_function_versions (tree dispatch_decl,
pop_cfun ();
- for (ix = 1; VEC_iterate (tree, fndecls, ix, ele); ++ix)
+ for (ix = 1; fndecls->iterate (ix, &ele); ++ix)
{
tree version_decl = ele;
tree predicate_chain = NULL_TREE;
@@ -29276,7 +29276,7 @@ ix86_generate_version_dispatcher_body (void *node_p)
{
tree resolver_decl;
basic_block empty_bb;
- VEC (tree, heap) *fn_ver_vec = NULL;
+ vec<tree> fn_ver_vec = vec<tree>();
tree default_ver_decl;
struct cgraph_node *versn;
struct cgraph_node *node;
@@ -29306,7 +29306,7 @@ ix86_generate_version_dispatcher_body (void *node_p)
push_cfun (DECL_STRUCT_FUNCTION (resolver_decl));
- fn_ver_vec = VEC_alloc (tree, heap, 2);
+ fn_ver_vec.create (2);
for (versn_info = node_version_info->next; versn_info;
versn_info = versn_info->next)
@@ -29320,10 +29320,10 @@ ix86_generate_version_dispatcher_body (void *node_p)
if (DECL_VINDEX (versn->symbol.decl))
error_at (DECL_SOURCE_LOCATION (versn->symbol.decl),
"Virtual function multiversioning not supported");
- VEC_safe_push (tree, heap, fn_ver_vec, versn->symbol.decl);
+ fn_ver_vec.safe_push (versn->symbol.decl);
}
- dispatch_function_versions (resolver_decl, fn_ver_vec, &empty_bb);
+ dispatch_function_versions (resolver_decl, &fn_ver_vec, &empty_bb);
rebuild_cgraph_edges ();
pop_cfun ();
diff --git a/gcc/config/ia64/ia64.c b/gcc/config/ia64/ia64.c
index 66f85dede86..9a7528f346a 100644
--- a/gcc/config/ia64/ia64.c
+++ b/gcc/config/ia64/ia64.c
@@ -5937,21 +5937,22 @@ ia64_option_override (void)
{
unsigned int i;
cl_deferred_option *opt;
- VEC(cl_deferred_option,heap) *vec
- = (VEC(cl_deferred_option,heap) *) ia64_deferred_options;
+ vec<cl_deferred_option> *v
+ = (vec<cl_deferred_option> *) ia64_deferred_options;
- FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
- {
- switch (opt->opt_index)
- {
- case OPT_mfixed_range_:
- fix_range (opt->arg);
- break;
+ if (v)
+ FOR_EACH_VEC_ELT (*v, i, opt)
+ {
+ switch (opt->opt_index)
+ {
+ case OPT_mfixed_range_:
+ fix_range (opt->arg);
+ break;
- default:
- gcc_unreachable ();
- }
- }
+ default:
+ gcc_unreachable ();
+ }
+ }
if (TARGET_AUTO_PIC)
target_flags |= MASK_CONST_GP;
diff --git a/gcc/config/mep/mep.c b/gcc/config/mep/mep.c
index 79611a8fdb3..a44e521594e 100644
--- a/gcc/config/mep/mep.c
+++ b/gcc/config/mep/mep.c
@@ -300,54 +300,54 @@ mep_option_override (void)
unsigned int i;
int j;
cl_deferred_option *opt;
- VEC(cl_deferred_option,heap) *vec
- = (VEC(cl_deferred_option,heap) *) mep_deferred_options;
+ vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) mep_deferred_options;
- FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
- {
- switch (opt->opt_index)
- {
- case OPT_mivc2:
- for (j = 0; j < 32; j++)
- fixed_regs[j + 48] = 0;
- for (j = 0; j < 32; j++)
- call_used_regs[j + 48] = 1;
- for (j = 6; j < 8; j++)
- call_used_regs[j + 48] = 0;
+ if (v)
+ FOR_EACH_VEC_ELT (*v, i, opt)
+ {
+ switch (opt->opt_index)
+ {
+ case OPT_mivc2:
+ for (j = 0; j < 32; j++)
+ fixed_regs[j + 48] = 0;
+ for (j = 0; j < 32; j++)
+ call_used_regs[j + 48] = 1;
+ for (j = 6; j < 8; j++)
+ call_used_regs[j + 48] = 0;
#define RN(n,s) reg_names[FIRST_CCR_REGNO + n] = s
- RN (0, "$csar0");
- RN (1, "$cc");
- RN (4, "$cofr0");
- RN (5, "$cofr1");
- RN (6, "$cofa0");
- RN (7, "$cofa1");
- RN (15, "$csar1");
-
- RN (16, "$acc0_0");
- RN (17, "$acc0_1");
- RN (18, "$acc0_2");
- RN (19, "$acc0_3");
- RN (20, "$acc0_4");
- RN (21, "$acc0_5");
- RN (22, "$acc0_6");
- RN (23, "$acc0_7");
-
- RN (24, "$acc1_0");
- RN (25, "$acc1_1");
- RN (26, "$acc1_2");
- RN (27, "$acc1_3");
- RN (28, "$acc1_4");
- RN (29, "$acc1_5");
- RN (30, "$acc1_6");
- RN (31, "$acc1_7");
+ RN (0, "$csar0");
+ RN (1, "$cc");
+ RN (4, "$cofr0");
+ RN (5, "$cofr1");
+ RN (6, "$cofa0");
+ RN (7, "$cofa1");
+ RN (15, "$csar1");
+
+ RN (16, "$acc0_0");
+ RN (17, "$acc0_1");
+ RN (18, "$acc0_2");
+ RN (19, "$acc0_3");
+ RN (20, "$acc0_4");
+ RN (21, "$acc0_5");
+ RN (22, "$acc0_6");
+ RN (23, "$acc0_7");
+
+ RN (24, "$acc1_0");
+ RN (25, "$acc1_1");
+ RN (26, "$acc1_2");
+ RN (27, "$acc1_3");
+ RN (28, "$acc1_4");
+ RN (29, "$acc1_5");
+ RN (30, "$acc1_6");
+ RN (31, "$acc1_7");
#undef RN
- break;
+ break;
- default:
- gcc_unreachable ();
- }
- }
+ default:
+ gcc_unreachable ();
+ }
+ }
if (flag_pic == 1)
warning (OPT_fpic, "-fpic is not supported");
diff --git a/gcc/config/mips/mips.c b/gcc/config/mips/mips.c
index 01f6b6176e1..78c5a884ac7 100644
--- a/gcc/config/mips/mips.c
+++ b/gcc/config/mips/mips.c
@@ -4012,12 +4012,8 @@ struct mips_multi_member {
};
typedef struct mips_multi_member mips_multi_member;
-/* Vector definitions for the above. */
-DEF_VEC_O(mips_multi_member);
-DEF_VEC_ALLOC_O(mips_multi_member, heap);
-
/* The instructions that make up the current multi-insn sequence. */
-static VEC (mips_multi_member, heap) *mips_multi_members;
+static vec<mips_multi_member> mips_multi_members;
/* How many instructions (as opposed to labels) are in the current
multi-insn sequence. */
@@ -4028,7 +4024,7 @@ static unsigned int mips_multi_num_insns;
static void
mips_multi_start (void)
{
- VEC_truncate (mips_multi_member, mips_multi_members, 0);
+ mips_multi_members.truncate (0);
mips_multi_num_insns = 0;
}
@@ -4038,7 +4034,7 @@ static struct mips_multi_member *
mips_multi_add (void)
{
mips_multi_member empty;
- return VEC_safe_push (mips_multi_member, heap, mips_multi_members, empty);
+ return mips_multi_members.safe_push (empty);
}
/* Add a normal insn with the given asm format to the current multi-insn
@@ -4081,7 +4077,7 @@ mips_multi_add_label (const char *label)
static unsigned int
mips_multi_last_index (void)
{
- return VEC_length (mips_multi_member, mips_multi_members) - 1;
+ return mips_multi_members.length () - 1;
}
/* Add a copy of an existing instruction to the current multi-insn
@@ -4093,8 +4089,7 @@ mips_multi_copy_insn (unsigned int i)
struct mips_multi_member *member;
member = mips_multi_add ();
- memcpy (member, &VEC_index (mips_multi_member, mips_multi_members, i),
- sizeof (*member));
+ memcpy (member, &mips_multi_members[i], sizeof (*member));
gcc_assert (!member->is_label_p);
}
@@ -4105,7 +4100,7 @@ mips_multi_copy_insn (unsigned int i)
static void
mips_multi_set_operand (unsigned int i, unsigned int op, rtx x)
{
- VEC_index (mips_multi_member, mips_multi_members, i).operands[op] = x;
+ mips_multi_members[i].operands[op] = x;
}
/* Write out the asm code for the current multi-insn sequence. */
@@ -4116,7 +4111,7 @@ mips_multi_write (void)
struct mips_multi_member *member;
unsigned int i;
- FOR_EACH_VEC_ELT (mips_multi_member, mips_multi_members, i, member)
+ FOR_EACH_VEC_ELT (mips_multi_members, i, member)
if (member->is_label_p)
fprintf (asm_out_file, "%s\n", member->format);
else
diff --git a/gcc/config/pa/pa.c b/gcc/config/pa/pa.c
index afcfd1a8d22..8b93aa9e5ad 100644
--- a/gcc/config/pa/pa.c
+++ b/gcc/config/pa/pa.c
@@ -469,21 +469,22 @@ pa_option_override (void)
{
unsigned int i;
cl_deferred_option *opt;
- VEC(cl_deferred_option,heap) *vec
- = (VEC(cl_deferred_option,heap) *) pa_deferred_options;
+ vec<cl_deferred_option> *v
+ = (vec<cl_deferred_option> *) pa_deferred_options;
- FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
- {
- switch (opt->opt_index)
- {
- case OPT_mfixed_range_:
- fix_range (opt->arg);
- break;
+ if (v)
+ FOR_EACH_VEC_ELT (*v, i, opt)
+ {
+ switch (opt->opt_index)
+ {
+ case OPT_mfixed_range_:
+ fix_range (opt->arg);
+ break;
- default:
- gcc_unreachable ();
- }
- }
+ default:
+ gcc_unreachable ();
+ }
+ }
/* Unconditional branches in the delay slot are not compatible with dwarf2
call frame information. There is no benefit in using this optimization
@@ -4401,7 +4402,7 @@ hppa_pic_save_rtx (void)
/* Vector of funcdef numbers. */
-static VEC(int,heap) *funcdef_nos;
+static vec<int> funcdef_nos;
/* Output deferred profile counters. */
static void
@@ -4410,20 +4411,20 @@ output_deferred_profile_counters (void)
unsigned int i;
int align, n;
- if (VEC_empty (int, funcdef_nos))
+ if (funcdef_nos.is_empty ())
return;
switch_to_section (data_section);
align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
- for (i = 0; VEC_iterate (int, funcdef_nos, i, n); i++)
+ for (i = 0; funcdef_nos.iterate (i, &n); i++)
{
targetm.asm_out.internal_label (asm_out_file, "LP", n);
assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
}
- VEC_free (int, heap, funcdef_nos);
+ funcdef_nos.release ();
}
void
@@ -4465,7 +4466,7 @@ hppa_profile_hook (int label_no)
rtx count_label_rtx, addr, r24;
char count_label_name[16];
- VEC_safe_push (int, heap, funcdef_nos, label_no);
+ funcdef_nos.safe_push (label_no);
ASM_GENERATE_INTERNAL_LABEL (count_label_name, "LP", label_no);
count_label_rtx = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (count_label_name));
@@ -9948,11 +9949,9 @@ typedef struct GTY(()) extern_symbol
} extern_symbol;
/* Define gc'd vector type for extern_symbol. */
-DEF_VEC_O(extern_symbol);
-DEF_VEC_ALLOC_O(extern_symbol,gc);
/* Vector of extern_symbol pointers. */
-static GTY(()) VEC(extern_symbol,gc) *extern_symbols;
+static GTY(()) vec<extern_symbol, va_gc> *extern_symbols;
#ifdef ASM_OUTPUT_EXTERNAL_REAL
/* Mark DECL (name NAME) as an external reference (assembler output
@@ -9964,7 +9963,7 @@ pa_hpux_asm_output_external (FILE *file, tree decl, const char *name)
{
gcc_assert (file == asm_out_file);
extern_symbol p = {decl, name};
- VEC_safe_push (extern_symbol, gc, extern_symbols, p);
+ vec_safe_push (extern_symbols, p);
}
/* Output text required at the end of an assembler file.
@@ -9982,7 +9981,7 @@ pa_hpux_file_end (void)
output_deferred_plabels ();
- for (i = 0; VEC_iterate (extern_symbol, extern_symbols, i, p); i++)
+ for (i = 0; vec_safe_iterate (extern_symbols, i, &p); i++)
{
tree decl = p->decl;
@@ -9991,7 +9990,7 @@ pa_hpux_file_end (void)
ASM_OUTPUT_EXTERNAL_REAL (asm_out_file, decl, p->name);
}
- VEC_free (extern_symbol, gc, extern_symbols);
+ vec_free (extern_symbols);
}
#endif
diff --git a/gcc/config/rs6000/rs6000-c.c b/gcc/config/rs6000/rs6000-c.c
index 295015f6fd0..79da7a62db3 100644
--- a/gcc/config/rs6000/rs6000-c.c
+++ b/gcc/config/rs6000/rs6000-c.c
@@ -3505,8 +3505,8 @@ tree
altivec_resolve_overloaded_builtin (location_t loc, tree fndecl,
void *passed_arglist)
{
- VEC(tree,gc) *arglist = (VEC(tree,gc) *) passed_arglist;
- unsigned int nargs = VEC_length (tree, arglist);
+ vec<tree, va_gc> *arglist = static_cast<vec<tree, va_gc> *> (passed_arglist);
+ unsigned int nargs = vec_safe_length (arglist);
enum rs6000_builtins fcode
= (enum rs6000_builtins)DECL_FUNCTION_CODE (fndecl);
tree fnargs = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
@@ -3529,7 +3529,7 @@ altivec_resolve_overloaded_builtin (location_t loc, tree fndecl,
int size;
int i;
bool unsigned_p;
- VEC(constructor_elt,gc) *vec;
+ vec<constructor_elt, va_gc> *vec;
const char *name = fcode == ALTIVEC_BUILTIN_VEC_SPLATS ? "vec_splats": "vec_promote";
if (nargs == 0)
@@ -3549,10 +3549,10 @@ altivec_resolve_overloaded_builtin (location_t loc, tree fndecl,
}
/* Ignore promote's element argument. */
if (fcode == ALTIVEC_BUILTIN_VEC_PROMOTE
- && !INTEGRAL_TYPE_P (TREE_TYPE (VEC_index (tree, arglist, 1))))
+ && !INTEGRAL_TYPE_P (TREE_TYPE ((*arglist)[1])))
goto bad;
- arg = VEC_index (tree, arglist, 0);
+ arg = (*arglist)[0];
type = TREE_TYPE (arg);
if (!SCALAR_FLOAT_TYPE_P (type)
&& !INTEGRAL_TYPE_P (type))
@@ -3582,11 +3582,11 @@ altivec_resolve_overloaded_builtin (location_t loc, tree fndecl,
goto bad;
}
arg = save_expr (fold_convert (TREE_TYPE (type), arg));
- vec = VEC_alloc (constructor_elt, gc, size);
+ vec_alloc (vec, size);
for(i = 0; i < size; i++)
{
constructor_elt elt = {NULL_TREE, arg};
- VEC_quick_push (constructor_elt, vec, elt);
+ vec->quick_push (elt);
}
return build_constructor (type, vec);
}
@@ -3610,8 +3610,8 @@ altivec_resolve_overloaded_builtin (location_t loc, tree fndecl,
return error_mark_node;
}
- arg2 = VEC_index (tree, arglist, 1);
- arg1 = VEC_index (tree, arglist, 0);
+ arg2 = (*arglist)[1];
+ arg1 = (*arglist)[0];
arg1_type = TREE_TYPE (arg1);
if (TREE_CODE (arg1_type) != VECTOR_TYPE)
@@ -3686,10 +3686,10 @@ altivec_resolve_overloaded_builtin (location_t loc, tree fndecl,
return error_mark_node;
}
- arg0 = VEC_index (tree, arglist, 0);
- arg1 = VEC_index (tree, arglist, 1);
+ arg0 = (*arglist)[0];
+ arg1 = (*arglist)[1];
arg1_type = TREE_TYPE (arg1);
- arg2 = VEC_index (tree, arglist, 2);
+ arg2 = (*arglist)[2];
if (TREE_CODE (arg1_type) != VECTOR_TYPE)
goto bad;
@@ -3752,7 +3752,7 @@ altivec_resolve_overloaded_builtin (location_t loc, tree fndecl,
fnargs = TREE_CHAIN (fnargs), n++)
{
tree decl_type = TREE_VALUE (fnargs);
- tree arg = VEC_index (tree, arglist, n);
+ tree arg = (*arglist)[n];
tree type;
if (arg == error_mark_node)
diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c
index b37bca935a8..4bde3201a54 100644
--- a/gcc/config/rs6000/rs6000.c
+++ b/gcc/config/rs6000/rs6000.c
@@ -24928,10 +24928,8 @@ typedef struct branch_island_d {
int line_number;
} branch_island;
-DEF_VEC_O(branch_island);
-DEF_VEC_ALLOC_O(branch_island,gc);
-static VEC(branch_island,gc) *branch_islands;
+static vec<branch_island, va_gc> *branch_islands;
/* Remember to generate a branch island for far calls to the given
function. */
@@ -24941,7 +24939,7 @@ add_compiler_branch_island (tree label_name, tree function_name,
int line_number)
{
branch_island bi = {function_name, label_name, line_number};
- VEC_safe_push (branch_island, gc, branch_islands, bi);
+ vec_safe_push (branch_islands, bi);
}
/* Generate far-jump branch islands for everything recorded in
@@ -24955,9 +24953,9 @@ macho_branch_islands (void)
{
char tmp_buf[512];
- while (!VEC_empty (branch_island, branch_islands))
+ while (!vec_safe_is_empty (branch_islands))
{
- branch_island *bi = &VEC_last (branch_island, branch_islands);
+ branch_island *bi = &branch_islands->last ();
const char *label = IDENTIFIER_POINTER (bi->label_name);
const char *name = IDENTIFIER_POINTER (bi->function_name);
char name_buf[512];
@@ -25025,7 +25023,7 @@ macho_branch_islands (void)
if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
dbxout_stabd (N_SLINE, bi->line_number);
#endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
- VEC_pop (branch_island, branch_islands);
+ branch_islands->pop ();
}
}
@@ -25038,7 +25036,7 @@ no_previous_def (tree function_name)
branch_island *bi;
unsigned ix;
- FOR_EACH_VEC_ELT (branch_island, branch_islands, ix, bi)
+ FOR_EACH_VEC_SAFE_ELT (branch_islands, ix, bi)
if (function_name == bi->function_name)
return 0;
return 1;
@@ -25053,7 +25051,7 @@ get_prev_label (tree function_name)
branch_island *bi;
unsigned ix;
- FOR_EACH_VEC_ELT (branch_island, branch_islands, ix, bi)
+ FOR_EACH_VEC_SAFE_ELT (branch_islands, ix, bi)
if (function_name == bi->function_name)
return bi->label_name;
return NULL_TREE;
diff --git a/gcc/config/rx/rx.c b/gcc/config/rx/rx.c
index 5d31eac9738..8cd9253e4d5 100644
--- a/gcc/config/rx/rx.c
+++ b/gcc/config/rx/rx.c
@@ -2614,43 +2614,43 @@ rx_option_override (void)
{
unsigned int i;
cl_deferred_option *opt;
- VEC(cl_deferred_option,heap) *vec
- = (VEC(cl_deferred_option,heap) *) rx_deferred_options;
+ vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
- FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
- {
- switch (opt->opt_index)
- {
- case OPT_mint_register_:
- switch (opt->value)
- {
- case 4:
- fixed_regs[10] = call_used_regs [10] = 1;
- /* Fall through. */
- case 3:
- fixed_regs[11] = call_used_regs [11] = 1;
- /* Fall through. */
- case 2:
- fixed_regs[12] = call_used_regs [12] = 1;
- /* Fall through. */
- case 1:
- fixed_regs[13] = call_used_regs [13] = 1;
- /* Fall through. */
- case 0:
- rx_num_interrupt_regs = opt->value;
- break;
- default:
- rx_num_interrupt_regs = 0;
- /* Error message already given because rx_handle_option
- returned false. */
- break;
- }
- break;
+ if (v)
+ FOR_EACH_VEC_ELT (*v, i, opt)
+ {
+ switch (opt->opt_index)
+ {
+ case OPT_mint_register_:
+ switch (opt->value)
+ {
+ case 4:
+ fixed_regs[10] = call_used_regs [10] = 1;
+ /* Fall through. */
+ case 3:
+ fixed_regs[11] = call_used_regs [11] = 1;
+ /* Fall through. */
+ case 2:
+ fixed_regs[12] = call_used_regs [12] = 1;
+ /* Fall through. */
+ case 1:
+ fixed_regs[13] = call_used_regs [13] = 1;
+ /* Fall through. */
+ case 0:
+ rx_num_interrupt_regs = opt->value;
+ break;
+ default:
+ rx_num_interrupt_regs = 0;
+ /* Error message already given because rx_handle_option
+ returned false. */
+ break;
+ }
+ break;
- default:
- gcc_unreachable ();
- }
- }
+ default:
+ gcc_unreachable ();
+ }
+ }
/* This target defaults to strict volatile bitfields. */
if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
diff --git a/gcc/config/spu/spu-c.c b/gcc/config/spu/spu-c.c
index 905030d5e4f..6fa8d17e467 100644
--- a/gcc/config/spu/spu-c.c
+++ b/gcc/config/spu/spu-c.c
@@ -93,8 +93,8 @@ spu_resolve_overloaded_builtin (location_t loc, tree fndecl, void *passed_args)
#define SCALAR_TYPE_P(t) (INTEGRAL_TYPE_P (t) \
|| SCALAR_FLOAT_TYPE_P (t) \
|| POINTER_TYPE_P (t))
- VEC(tree,gc) *fnargs = (VEC(tree,gc) *) passed_args;
- unsigned int nargs = VEC_length (tree, fnargs);
+ vec<tree, va_gc> *fnargs = static_cast <vec<tree, va_gc> *> (passed_args);
+ unsigned int nargs = vec_safe_length (fnargs);
int new_fcode, fcode = DECL_FUNCTION_CODE (fndecl);
struct spu_builtin_description *desc;
tree match = NULL_TREE;
@@ -137,7 +137,7 @@ spu_resolve_overloaded_builtin (location_t loc, tree fndecl, void *passed_args)
return error_mark_node;
}
- var = VEC_index (tree, fnargs, p);
+ var = (*fnargs)[p];
if (TREE_CODE (var) == NON_LVALUE_EXPR)
var = TREE_OPERAND (var, 0);
diff --git a/gcc/config/vms/vms.c b/gcc/config/vms/vms.c
index d23e8a8456a..65bf42acc5f 100644
--- a/gcc/config/vms/vms.c
+++ b/gcc/config/vms/vms.c
@@ -101,7 +101,7 @@ static const struct vms_crtl_name vms_crtl_names[] =
/* List of aliased identifiers. They must be persistent across gc. */
-static GTY(()) VEC(tree,gc) *aliases_id;
+static GTY(()) vec<tree, va_gc> *aliases_id;
/* Add a CRTL translation. This simply use the transparent alias
mechanism, which is platform independent and works with the
@@ -120,7 +120,7 @@ vms_add_crtl_xlat (const char *name, size_t nlen,
IDENTIFIER_TRANSPARENT_ALIAS (targ) = 1;
TREE_CHAIN (targ) = get_identifier_with_length (id_str, id_len);
- VEC_safe_push (tree, gc, aliases_id, targ);
+ vec_safe_push (aliases_id, targ);
}
/* Do VMS specific stuff on builtins: disable the ones that are not
diff --git a/gcc/config/vxworks.c b/gcc/config/vxworks.c
index 9eeefc0096f..e3059044d8d 100644
--- a/gcc/config/vxworks.c
+++ b/gcc/config/vxworks.c
@@ -96,23 +96,24 @@ vxworks_emutls_var_fields (tree type, tree *name)
static tree
vxworks_emutls_var_init (tree var, tree decl, tree tmpl_addr)
{
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 3);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 3);
tree type = TREE_TYPE (var);
tree field = TYPE_FIELDS (type);
constructor_elt elt = {field, fold_convert (TREE_TYPE (field), tmpl_addr)};
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
field = DECL_CHAIN (field);
elt.index = field;
elt.value = build_int_cst (TREE_TYPE (field), 0);
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
field = DECL_CHAIN (field);
elt.index = field;
elt.value = fold_convert (TREE_TYPE (field), DECL_SIZE_UNIT (decl));
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
return build_constructor (type, v);
}
diff --git a/gcc/coretypes.h b/gcc/coretypes.h
index a2ca9c8327f..3bc2f404559 100644
--- a/gcc/coretypes.h
+++ b/gcc/coretypes.h
@@ -200,5 +200,8 @@ enum memmodel
is a pointer to a pointer, the second a cookie. */
typedef void (*gt_pointer_operator) (void *, void *);
-#endif /* coretypes.h */
+#if !defined (HAVE_UCHAR)
+typedef unsigned char uchar;
+#endif
+#endif /* coretypes.h */
diff --git a/gcc/coverage.c b/gcc/coverage.c
index b634c821396..7581a97f35f 100644
--- a/gcc/coverage.c
+++ b/gcc/coverage.c
@@ -128,9 +128,9 @@ static void build_info_type (tree, tree);
static tree build_fn_info (const struct coverage_data *, tree, tree);
static tree build_info (tree, tree);
static bool coverage_obj_init (void);
-static VEC(constructor_elt,gc) *coverage_obj_fn
-(VEC(constructor_elt,gc) *, tree, struct coverage_data const *);
-static void coverage_obj_finish (VEC(constructor_elt,gc) *);
+static vec<constructor_elt, va_gc> *coverage_obj_fn
+(vec<constructor_elt, va_gc> *, tree, struct coverage_data const *);
+static void coverage_obj_finish (vec<constructor_elt, va_gc> *);
/* Return the type node for gcov_type. */
@@ -764,8 +764,8 @@ build_fn_info (const struct coverage_data *data, tree type, tree key)
tree fields = TYPE_FIELDS (type);
tree ctr_type;
unsigned ix;
- VEC(constructor_elt,gc) *v1 = NULL;
- VEC(constructor_elt,gc) *v2 = NULL;
+ vec<constructor_elt, va_gc> *v1 = NULL;
+ vec<constructor_elt, va_gc> *v2 = NULL;
/* key */
CONSTRUCTOR_APPEND_ELT (v1, fields,
@@ -795,7 +795,7 @@ build_fn_info (const struct coverage_data *data, tree type, tree key)
for (ix = 0; ix != GCOV_COUNTERS; ix++)
if (prg_ctr_mask & (1 << ix))
{
- VEC(constructor_elt,gc) *ctr = NULL;
+ vec<constructor_elt, va_gc> *ctr = NULL;
tree var = data->ctr_vars[ix];
unsigned count = 0;
@@ -898,8 +898,8 @@ build_info (tree info_type, tree fn_ary)
unsigned ix;
tree filename_string;
int da_file_name_len;
- VEC(constructor_elt,gc) *v1 = NULL;
- VEC(constructor_elt,gc) *v2 = NULL;
+ vec<constructor_elt, va_gc> *v1 = NULL;
+ vec<constructor_elt, va_gc> *v2 = NULL;
/* Version ident */
CONSTRUCTOR_APPEND_ELT (v1, info_fields,
@@ -1043,8 +1043,8 @@ coverage_obj_init (void)
/* Generate the coverage function info for FN and DATA. Append a
pointer to that object to CTOR and return the appended CTOR. */
-static VEC(constructor_elt,gc) *
-coverage_obj_fn (VEC(constructor_elt,gc) *ctor, tree fn,
+static vec<constructor_elt, va_gc> *
+coverage_obj_fn (vec<constructor_elt, va_gc> *ctor, tree fn,
struct coverage_data const *data)
{
tree init = build_fn_info (data, gcov_fn_info_type, gcov_info_var);
@@ -1062,9 +1062,9 @@ coverage_obj_fn (VEC(constructor_elt,gc) *ctor, tree fn,
function objects from CTOR. Generate the gcov_info initializer. */
static void
-coverage_obj_finish (VEC(constructor_elt,gc) *ctor)
+coverage_obj_finish (vec<constructor_elt, va_gc> *ctor)
{
- unsigned n_functions = VEC_length(constructor_elt, ctor);
+ unsigned n_functions = vec_safe_length (ctor);
tree fn_info_ary_type = build_array_type
(build_qualified_type (gcov_fn_info_ptr_type, TYPE_QUAL_CONST),
build_index_type (size_int (n_functions - 1)));
@@ -1153,7 +1153,7 @@ coverage_finish (void)
if (coverage_obj_init ())
{
- VEC(constructor_elt,gc) *fn_ctor = NULL;
+ vec<constructor_elt, va_gc> *fn_ctor = NULL;
struct coverage_data *fn;
for (fn = functions_head; fn; fn = fn->next)
diff --git a/gcc/cp/ChangeLog b/gcc/cp/ChangeLog
index 1a71dc969fe..110cdc43688 100644
--- a/gcc/cp/ChangeLog
+++ b/gcc/cp/ChangeLog
@@ -1,3 +1,33 @@
+2012-11-17 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * Make-lang.in: Remove dependencies on vecir.h and vecprim.h everywhere.
+ * call.c: Use new vec API in vec.h.
+ * class.c: Likewise.
+ * cp-gimplify.c: Likewise.
+ * cp-tree.h: Likewise.
+ * cvt.c: Likewise.
+ * decl.c: Likewise.
+ * decl2.c: Likewise.
+ * error.c: Likewise.
+ * except.c: Likewise.
+ * init.c: Likewise.
+ * mangle.c: Likewise.
+ * method.c: Likewise.
+ * name-lookup.c: Likewise.
+ * name-lookup.h: Likewise.
+ * parser.c: Likewise.
+ * parser.h: Likewise.
+ * pt.c: Likewise.
+ * repo.c: Likewise.
+ * rtti.c: Likewise.
+ * search.c: Likewise.
+ * semantics.c: Likewise.
+ * tree.c: Likewise.
+ * typeck.c: Likewise.
+ * typeck2.c: Likewise.
+
2012-11-17 Gabriel Dos Reis <gdr@integrable-solutions.net>
* semantics.c (finish_id_expression): Tidy diagnostic message.
diff --git a/gcc/cp/Make-lang.in b/gcc/cp/Make-lang.in
index 812f3cb5c7e..9d92f648bee 100644
--- a/gcc/cp/Make-lang.in
+++ b/gcc/cp/Make-lang.in
@@ -317,7 +317,7 @@ cp/except.o: cp/except.c $(CXX_TREE_H) $(TM_H) $(FLAGS_H) \
cp/cfns.h $(TREE_INLINE_H) $(TARGET_H) gt-cp-except.h
cp/expr.o: cp/expr.c $(CXX_TREE_H) $(TM_H) $(FLAGS_H) $(TM_P_H)
cp/pt.o: cp/pt.c $(CXX_TREE_H) $(TM_H) cp/decl.h cp/cp-objcp-common.h \
- toplev.h $(TREE_INLINE_H) pointer-set.h gt-cp-pt.h vecprim.h intl.h \
+ toplev.h $(TREE_INLINE_H) pointer-set.h gt-cp-pt.h intl.h \
c-family/c-objc.h
cp/error.o: cp/error.c $(CXX_TREE_H) $(TM_H) $(DIAGNOSTIC_H) \
$(FLAGS_H) $(REAL_H) $(LANGHOOKS_DEF_H) $(CXX_PRETTY_PRINT_H) \
diff --git a/gcc/cp/call.c b/gcc/cp/call.c
index 77bd28882d6..35a3f919997 100644
--- a/gcc/cp/call.c
+++ b/gcc/cp/call.c
@@ -169,14 +169,14 @@ static tree build_this (tree);
static struct z_candidate *splice_viable (struct z_candidate *, bool, bool *);
static bool any_strictly_viable (struct z_candidate *);
static struct z_candidate *add_template_candidate
- (struct z_candidate **, tree, tree, tree, tree, const VEC(tree,gc) *,
+ (struct z_candidate **, tree, tree, tree, tree, const vec<tree, va_gc> *,
tree, tree, tree, int, unification_kind_t, tsubst_flags_t);
static struct z_candidate *add_template_candidate_real
- (struct z_candidate **, tree, tree, tree, tree, const VEC(tree,gc) *,
+ (struct z_candidate **, tree, tree, tree, tree, const vec<tree, va_gc> *,
tree, tree, tree, int, tree, unification_kind_t, tsubst_flags_t);
static struct z_candidate *add_template_conv_candidate
- (struct z_candidate **, tree, tree, tree, const VEC(tree,gc) *, tree,
- tree, tree, tsubst_flags_t);
+ (struct z_candidate **, tree, tree, tree, const vec<tree, va_gc> *,
+ tree, tree, tree, tsubst_flags_t);
static void add_builtin_candidates
(struct z_candidate **, enum tree_code, enum tree_code,
tree, tree *, int, tsubst_flags_t);
@@ -188,10 +188,10 @@ static void build_builtin_candidate
(struct z_candidate **, tree, tree, tree, tree *, tree *,
int, tsubst_flags_t);
static struct z_candidate *add_conv_candidate
- (struct z_candidate **, tree, tree, tree, const VEC(tree,gc) *, tree,
+ (struct z_candidate **, tree, tree, tree, const vec<tree, va_gc> *, tree,
tree, tsubst_flags_t);
static struct z_candidate *add_function_candidate
- (struct z_candidate **, tree, tree, tree, const VEC(tree,gc) *, tree,
+ (struct z_candidate **, tree, tree, tree, const vec<tree, va_gc> *, tree,
tree, int, tsubst_flags_t);
static conversion *implicit_conversion (tree, tree, tree, bool, int,
tsubst_flags_t);
@@ -205,7 +205,7 @@ static bool is_subseq (conversion *, conversion *);
static conversion *maybe_handle_ref_bind (conversion **);
static void maybe_handle_implicit_object (conversion **);
static struct z_candidate *add_candidate
- (struct z_candidate **, tree, tree, const VEC(tree,gc) *, size_t,
+ (struct z_candidate **, tree, tree, const vec<tree, va_gc> *, size_t,
conversion **, tree, tree, int, struct rejection_reason *);
static tree source_type (conversion *);
static void add_warning (struct z_candidate *, struct z_candidate *);
@@ -215,8 +215,8 @@ static bool promoted_arithmetic_type_p (tree);
static conversion *conditional_conversion (tree, tree, tsubst_flags_t);
static char *name_as_c_string (tree, tree, bool *);
static tree prep_operand (tree);
-static void add_candidates (tree, tree, const VEC(tree,gc) *, tree, tree, bool,
- tree, tree, int, struct z_candidate **,
+static void add_candidates (tree, tree, const vec<tree, va_gc> *, tree, tree,
+ bool, tree, tree, int, struct z_candidate **,
tsubst_flags_t);
static conversion *merge_conversion_sequences (conversion *, conversion *);
static bool magic_varargs_p (tree);
@@ -511,7 +511,7 @@ struct z_candidate {
/* The rest of the arguments to use when calling this function. If
there are no further arguments this may be NULL or it may be an
empty vector. */
- const VEC(tree,gc) *args;
+ const vec<tree, va_gc> *args;
/* The implicit conversion sequences for each of the arguments to
FN. */
conversion **convs;
@@ -1772,7 +1772,7 @@ implicit_conversion (tree to, tree from, tree expr, bool c_cast_p,
static struct z_candidate *
add_candidate (struct z_candidate **candidates,
- tree fn, tree first_arg, const VEC(tree,gc) *args,
+ tree fn, tree first_arg, const vec<tree, va_gc> *args,
size_t num_convs, conversion **convs,
tree access_path, tree conversion_path,
int viable, struct rejection_reason *reason)
@@ -1822,7 +1822,7 @@ remaining_arguments (tree arg)
static struct z_candidate *
add_function_candidate (struct z_candidate **candidates,
tree fn, tree ctype, tree first_arg,
- const VEC(tree,gc) *args, tree access_path,
+ const vec<tree, va_gc> *args, tree access_path,
tree conversion_path, int flags,
tsubst_flags_t complain)
{
@@ -1855,7 +1855,7 @@ add_function_candidate (struct z_candidate **candidates,
else
skip = 0;
- len = VEC_length (tree, args) - skip + (first_arg != NULL_TREE ? 1 : 0);
+ len = vec_safe_length (args) - skip + (first_arg != NULL_TREE ? 1 : 0);
convs = alloc_conversions (len);
/* 13.3.2 - Viable functions [over.match.viable]
@@ -1927,8 +1927,7 @@ add_function_candidate (struct z_candidate **candidates,
arg = first_arg;
else
arg = CONST_CAST_TREE (
- VEC_index (tree, args,
- i + skip - (first_arg != NULL_TREE ? 1 : 0)));
+ (*args)[i + skip - (first_arg != NULL_TREE ? 1 : 0)]);
argtype = lvalue_type (arg);
is_this = (i == 0 && DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)
@@ -2032,7 +2031,7 @@ add_function_candidate (struct z_candidate **candidates,
static struct z_candidate *
add_conv_candidate (struct z_candidate **candidates, tree fn, tree obj,
- tree first_arg, const VEC(tree,gc) *arglist,
+ tree first_arg, const vec<tree, va_gc> *arglist,
tree access_path, tree conversion_path,
tsubst_flags_t complain)
{
@@ -2046,7 +2045,7 @@ add_conv_candidate (struct z_candidate **candidates, tree fn, tree obj,
parmlist = TREE_TYPE (parmlist);
parmlist = TYPE_ARG_TYPES (parmlist);
- len = VEC_length (tree, arglist) + (first_arg != NULL_TREE ? 1 : 0) + 1;
+ len = vec_safe_length (arglist) + (first_arg != NULL_TREE ? 1 : 0) + 1;
convs = alloc_conversions (len);
parmnode = parmlist;
viable = 1;
@@ -2067,8 +2066,7 @@ add_conv_candidate (struct z_candidate **candidates, tree fn, tree obj,
else if (i == 1 && first_arg != NULL_TREE)
arg = first_arg;
else
- arg = VEC_index (tree, arglist,
- i - (first_arg != NULL_TREE ? 1 : 0) - 1);
+ arg = (*arglist)[i - (first_arg != NULL_TREE ? 1 : 0) - 1];
argtype = lvalue_type (arg);
if (i == 0)
@@ -2676,7 +2674,7 @@ add_builtin_candidates (struct z_candidate **candidates, enum tree_code code,
tree type, argtypes[3], t;
/* TYPES[i] is the set of possible builtin-operator parameter types
we will consider for the Ith argument. */
- VEC(tree,gc) *types[2];
+ vec<tree, va_gc> *types[2];
unsigned ix;
for (i = 0; i < 3; ++i)
@@ -2758,11 +2756,9 @@ add_builtin_candidates (struct z_candidate **candidates, enum tree_code code,
if (code == COND_EXPR)
{
if (real_lvalue_p (args[i]))
- VEC_safe_push (tree, gc, types[i],
- build_reference_type (argtypes[i]));
+ vec_safe_push (types[i], build_reference_type (argtypes[i]));
- VEC_safe_push (tree, gc, types[i],
- TYPE_MAIN_VARIANT (argtypes[i]));
+ vec_safe_push (types[i], TYPE_MAIN_VARIANT (argtypes[i]));
}
else if (! convs)
@@ -2778,49 +2774,48 @@ add_builtin_candidates (struct z_candidate **candidates, enum tree_code code,
continue;
if (code == COND_EXPR && TREE_CODE (type) == REFERENCE_TYPE)
- VEC_safe_push (tree, gc, types[i], type);
+ vec_safe_push (types[i], type);
type = non_reference (type);
if (i != 0 || ! ref1)
{
type = cv_unqualified (type_decays_to (type));
if (enum_p && TREE_CODE (type) == ENUMERAL_TYPE)
- VEC_safe_push (tree, gc, types[i], type);
+ vec_safe_push (types[i], type);
if (INTEGRAL_OR_UNSCOPED_ENUMERATION_TYPE_P (type))
type = type_promotes_to (type);
}
if (! vec_member (type, types[i]))
- VEC_safe_push (tree, gc, types[i], type);
+ vec_safe_push (types[i], type);
}
}
else
{
if (code == COND_EXPR && real_lvalue_p (args[i]))
- VEC_safe_push (tree, gc, types[i],
- build_reference_type (argtypes[i]));
+ vec_safe_push (types[i], build_reference_type (argtypes[i]));
type = non_reference (argtypes[i]);
if (i != 0 || ! ref1)
{
type = cv_unqualified (type_decays_to (type));
if (enum_p && UNSCOPED_ENUM_P (type))
- VEC_safe_push (tree, gc, types[i], type);
+ vec_safe_push (types[i], type);
if (INTEGRAL_OR_UNSCOPED_ENUMERATION_TYPE_P (type))
type = type_promotes_to (type);
}
- VEC_safe_push (tree, gc, types[i], type);
+ vec_safe_push (types[i], type);
}
}
/* Run through the possible parameter types of both arguments,
creating candidates with those parameter types. */
- FOR_EACH_VEC_ELT_REVERSE (tree, types[0], ix, t)
+ FOR_EACH_VEC_ELT_REVERSE (*(types[0]), ix, t)
{
unsigned jx;
tree u;
- if (!VEC_empty (tree, types[1]))
- FOR_EACH_VEC_ELT_REVERSE (tree, types[1], jx, u)
+ if (!types[1]->is_empty ())
+ FOR_EACH_VEC_ELT_REVERSE (*(types[1]), jx, u)
add_builtin_candidate
(candidates, code, code2, fnname, t,
u, args, argtypes, flags, complain);
@@ -2848,14 +2843,14 @@ add_builtin_candidates (struct z_candidate **candidates, enum tree_code code,
static struct z_candidate*
add_template_candidate_real (struct z_candidate **candidates, tree tmpl,
tree ctype, tree explicit_targs, tree first_arg,
- const VEC(tree,gc) *arglist, tree return_type,
+ const vec<tree, va_gc> *arglist, tree return_type,
tree access_path, tree conversion_path,
int flags, tree obj, unification_kind_t strict,
tsubst_flags_t complain)
{
int ntparms = DECL_NTPARMS (tmpl);
tree targs = make_tree_vec (ntparms);
- unsigned int len = VEC_length (tree, arglist);
+ unsigned int len = vec_safe_length (arglist);
unsigned int nargs = (first_arg == NULL_TREE ? 0 : 1) + len;
unsigned int skip_without_in_chrg = 0;
tree first_arg_without_in_chrg = first_arg;
@@ -2901,7 +2896,7 @@ add_template_candidate_real (struct z_candidate **candidates, tree tmpl,
++ia;
}
for (ix = skip_without_in_chrg;
- VEC_iterate (tree, arglist, ix, arg);
+ vec_safe_iterate (arglist, ix, &arg);
++ix)
{
args_without_in_chrg[ia] = arg;
@@ -3002,7 +2997,7 @@ add_template_candidate_real (struct z_candidate **candidates, tree tmpl,
static struct z_candidate *
add_template_candidate (struct z_candidate **candidates, tree tmpl, tree ctype,
tree explicit_targs, tree first_arg,
- const VEC(tree,gc) *arglist, tree return_type,
+ const vec<tree, va_gc> *arglist, tree return_type,
tree access_path, tree conversion_path, int flags,
unification_kind_t strict, tsubst_flags_t complain)
{
@@ -3017,7 +3012,7 @@ add_template_candidate (struct z_candidate **candidates, tree tmpl, tree ctype,
static struct z_candidate *
add_template_conv_candidate (struct z_candidate **candidates, tree tmpl,
tree obj, tree first_arg,
- const VEC(tree,gc) *arglist,
+ const vec<tree, va_gc> *arglist,
tree return_type, tree access_path,
tree conversion_path, tsubst_flags_t complain)
{
@@ -3336,7 +3331,7 @@ merge_conversion_sequences (conversion *user_seq, conversion *std_seq)
non-list constructor.
Parameters are as for add_candidates, except that the arguments are in
- the form of a CONSTRUCTOR (the initializer list) rather than a VEC, and
+ the form of a CONSTRUCTOR (the initializer list) rather than a vector, and
the RETURN_TYPE parameter is replaced by TOTYPE, the desired type. */
static void
@@ -3348,7 +3343,7 @@ add_list_candidates (tree fns, tree first_arg,
struct z_candidate **candidates,
tsubst_flags_t complain)
{
- VEC(tree,gc) *args;
+ vec<tree, va_gc> *args;
gcc_assert (*candidates == NULL);
@@ -3403,7 +3398,7 @@ build_user_type_conversion_1 (tree totype, tree expr, int flags,
tree conv_fns = NULL_TREE;
conversion *conv = NULL;
tree first_arg = NULL_TREE;
- VEC(tree,gc) *args = NULL;
+ vec<tree, va_gc> *args = NULL;
bool any_viable_p;
int convflags;
@@ -3734,13 +3729,13 @@ build_integral_nontype_arg_conv (tree type, tree expr, tsubst_flags_t complain)
/* Do any initial processing on the arguments to a function call. */
-static VEC(tree,gc) *
-resolve_args (VEC(tree,gc) *args, tsubst_flags_t complain)
+static vec<tree, va_gc> *
+resolve_args (vec<tree, va_gc> *args, tsubst_flags_t complain)
{
unsigned int ix;
tree arg;
- FOR_EACH_VEC_ELT (tree, args, ix, arg)
+ FOR_EACH_VEC_SAFE_ELT (args, ix, arg)
{
if (error_operand_p (arg))
return NULL;
@@ -3770,7 +3765,7 @@ resolve_args (VEC(tree,gc) *args, tsubst_flags_t complain)
static struct z_candidate *
perform_overload_resolution (tree fn,
- const VEC(tree,gc) *args,
+ const vec<tree, va_gc> *args,
struct z_candidate **candidates,
bool *any_viable_p, tsubst_flags_t complain)
{
@@ -3823,7 +3818,7 @@ perform_overload_resolution (tree fn,
functions. */
static void
-print_error_for_call_failure (tree fn, VEC(tree,gc) *args, bool any_viable_p,
+print_error_for_call_failure (tree fn, vec<tree, va_gc> *args, bool any_viable_p,
struct z_candidate *candidates)
{
tree name = DECL_NAME (OVL_CURRENT (fn));
@@ -3844,7 +3839,7 @@ print_error_for_call_failure (tree fn, VEC(tree,gc) *args, bool any_viable_p,
ARGS. */
tree
-build_new_function_call (tree fn, VEC(tree,gc) **args, bool koenig_p,
+build_new_function_call (tree fn, vec<tree, va_gc> **args, bool koenig_p,
tsubst_flags_t complain)
{
struct z_candidate *candidates, *cand;
@@ -3929,7 +3924,7 @@ build_new_function_call (tree fn, VEC(tree,gc) **args, bool koenig_p,
function called. */
tree
-build_operator_new_call (tree fnname, VEC(tree,gc) **args,
+build_operator_new_call (tree fnname, vec<tree, va_gc> **args,
tree *size, tree *cookie_size, tree size_check,
tree *fn, tsubst_flags_t complain)
{
@@ -3945,7 +3940,7 @@ build_operator_new_call (tree fnname, VEC(tree,gc) **args,
if (size_check != NULL_TREE)
*size = fold_build3 (COND_EXPR, sizetype, size_check,
original_size, TYPE_MAX_VALUE (sizetype));
- VEC_safe_insert (tree, gc, *args, 0, *size);
+ vec_safe_insert (*args, 0, *size);
*args = resolve_args (*args, complain);
if (*args == NULL)
return error_mark_node;
@@ -3985,9 +3980,8 @@ build_operator_new_call (tree fnname, VEC(tree,gc) **args,
/* In G++ 3.2, the check was implemented incorrectly; it
looked at the placement expression, rather than the
type of the function. */
- if (VEC_length (tree, *args) == 2
- && same_type_p (TREE_TYPE (VEC_index (tree, *args, 1)),
- ptr_type_node))
+ if ((*args)->length () == 2
+ && same_type_p (TREE_TYPE ((**args)[1]), ptr_type_node))
use_cookie = false;
}
else
@@ -4014,7 +4008,7 @@ build_operator_new_call (tree fnname, VEC(tree,gc) **args,
*size = fold_build3 (COND_EXPR, sizetype, size_check,
*size, TYPE_MAX_VALUE (sizetype));
/* Update the argument list to reflect the adjusted size. */
- VEC_replace (tree, *args, 0, *size);
+ (**args)[0] = *size;
}
else
*cookie_size = NULL_TREE;
@@ -4031,7 +4025,7 @@ build_operator_new_call (tree fnname, VEC(tree,gc) **args,
/* Build a new call to operator(). This may change ARGS. */
static tree
-build_op_call_1 (tree obj, VEC(tree,gc) **args, tsubst_flags_t complain)
+build_op_call_1 (tree obj, vec<tree, va_gc> **args, tsubst_flags_t complain)
{
struct z_candidate *candidates = 0, *cand;
tree fns, convs, first_mem_arg = NULL_TREE;
@@ -4165,7 +4159,7 @@ build_op_call_1 (tree obj, VEC(tree,gc) **args, tsubst_flags_t complain)
/* Wrapper for above. */
tree
-build_op_call (tree obj, VEC(tree,gc) **args, tsubst_flags_t complain)
+build_op_call (tree obj, vec<tree, va_gc> **args, tsubst_flags_t complain)
{
tree ret;
bool subtime = timevar_cond_start (TV_OVERLOAD);
@@ -4899,7 +4893,7 @@ prep_operand (tree operand)
add_function_candidate. */
static void
-add_candidates (tree fns, tree first_arg, const VEC(tree,gc) *args,
+add_candidates (tree fns, tree first_arg, const vec<tree, va_gc> *args,
tree return_type,
tree explicit_targs, bool template_only,
tree conversion_path, tree access_path,
@@ -4908,7 +4902,7 @@ add_candidates (tree fns, tree first_arg, const VEC(tree,gc) *args,
tsubst_flags_t complain)
{
tree ctype;
- const VEC(tree,gc) *non_static_args;
+ const vec<tree, va_gc> *non_static_args;
bool check_list_ctor;
bool check_converting;
unification_kind_t strict;
@@ -4964,7 +4958,7 @@ add_candidates (tree fns, tree first_arg, const VEC(tree,gc) *args,
for (; fns; fns = OVL_NEXT (fns))
{
tree fn_first_arg;
- const VEC(tree,gc) *fn_args;
+ const vec<tree, va_gc> *fn_args;
fn = OVL_CURRENT (fns);
@@ -4982,12 +4976,12 @@ add_candidates (tree fns, tree first_arg, const VEC(tree,gc) *args,
{
unsigned int ix;
tree arg;
- VEC(tree,gc) *tempvec
- = VEC_alloc (tree, gc, VEC_length (tree, args) - 1);
- for (ix = 1; VEC_iterate (tree, args, ix, arg); ++ix)
- VEC_quick_push (tree, tempvec, arg);
+ vec<tree, va_gc> *tempvec;
+ vec_alloc (tempvec, args->length () - 1);
+ for (ix = 1; args->iterate (ix, &arg); ++ix)
+ tempvec->quick_push (arg);
non_static_args = tempvec;
- first_arg = build_this (VEC_index (tree, args, 0));
+ first_arg = build_this ((*args)[0]);
}
fn_first_arg = first_arg;
@@ -5031,7 +5025,7 @@ build_new_op_1 (location_t loc, enum tree_code code, int flags, tree arg1,
tree arg2, tree arg3, tree *overload, tsubst_flags_t complain)
{
struct z_candidate *candidates = 0, *cand;
- VEC(tree,gc) *arglist;
+ vec<tree, va_gc> *arglist;
tree fnname;
tree args[3];
tree result = NULL_TREE;
@@ -5098,12 +5092,12 @@ build_new_op_1 (location_t loc, enum tree_code code, int flags, tree arg1,
if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
arg2 = integer_zero_node;
- arglist = VEC_alloc (tree, gc, 3);
- VEC_quick_push (tree, arglist, arg1);
+ vec_alloc (arglist, 3);
+ arglist->quick_push (arg1);
if (arg2 != NULL_TREE)
- VEC_quick_push (tree, arglist, arg2);
+ arglist->quick_push (arg2);
if (arg3 != NULL_TREE)
- VEC_quick_push (tree, arglist, arg3);
+ arglist->quick_push (arg3);
/* Get the high-water mark for the CONVERSION_OBSTACK. */
p = conversion_obstack_alloc (0);
@@ -5645,12 +5639,13 @@ build_op_delete_call (enum tree_code code, tree addr, tree size,
else
{
tree ret;
- VEC(tree,gc) *args = VEC_alloc (tree, gc, 2);
- VEC_quick_push (tree, args, addr);
+ vec<tree, va_gc> *args;
+ vec_alloc (args, 2);
+ args->quick_push (addr);
if (FUNCTION_ARG_CHAIN (fn) != void_list_node)
- VEC_quick_push (tree, args, size);
+ args->quick_push (size);
ret = cp_build_function_call_vec (fn, &args, complain);
- VEC_free (tree, gc, args);
+ vec_free (args);
return ret;
}
}
@@ -5715,7 +5710,7 @@ build_temp (tree expr, tree type, int flags,
diagnostic_t *diagnostic_kind, tsubst_flags_t complain)
{
int savew, savee;
- VEC(tree,gc) *args;
+ vec<tree, va_gc> *args;
savew = warningcount, savee = errorcount;
args = make_tree_vector_single (expr);
@@ -5965,7 +5960,7 @@ convert_like_real (conversion *convs, tree expr, tree fn, int argnum,
tree new_ctor = build_constructor (init_list_type_node, NULL);
unsigned len = CONSTRUCTOR_NELTS (expr);
tree array, val, field;
- VEC(constructor_elt,gc) *vec = NULL;
+ vec<constructor_elt, va_gc> *vec = NULL;
unsigned ix;
/* Convert all the elements. */
@@ -6340,13 +6335,14 @@ cxx_type_promotes_to (tree type)
zero-based argument number. Do any required conversions. Return
the converted value. */
-static GTY(()) VEC(tree,gc) *default_arg_context;
+static GTY(()) vec<tree, va_gc> *default_arg_context;
void
push_defarg_context (tree fn)
-{ VEC_safe_push (tree, gc, default_arg_context, fn); }
+{ vec_safe_push (default_arg_context, fn); }
+
void
pop_defarg_context (void)
-{ VEC_pop (tree, default_arg_context); }
+{ default_arg_context->pop (); }
tree
convert_default_arg (tree type, tree arg, tree fn, int parmnum,
@@ -6359,7 +6355,7 @@ convert_default_arg (tree type, tree arg, tree fn, int parmnum,
fn = DECL_ORIGIN (fn);
/* Detect recursion. */
- FOR_EACH_VEC_ELT (tree, default_arg_context, i, t)
+ FOR_EACH_VEC_SAFE_ELT (default_arg_context, i, t)
if (t == fn)
{
if (complain & tf_error)
@@ -6581,7 +6577,7 @@ static tree
build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
{
tree fn = cand->fn;
- const VEC(tree,gc) *args = cand->args;
+ const vec<tree, va_gc> *args = cand->args;
tree first_arg = cand->first_arg;
conversion **convs = cand->convs;
conversion *conv;
@@ -6608,9 +6604,9 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
unsigned int nargs;
return_type = TREE_TYPE (TREE_TYPE (fn));
- nargs = VEC_length (tree, args);
+ nargs = vec_safe_length (args);
if (first_arg == NULL_TREE)
- argarray = VEC_address (tree, CONST_CAST (VEC(tree,gc) *, args));
+ argarray = args->address ();
else
{
tree *alcarray;
@@ -6620,7 +6616,7 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
++nargs;
alcarray = XALLOCAVEC (tree, nargs);
alcarray[0] = first_arg;
- FOR_EACH_VEC_ELT (tree, args, ix, arg)
+ FOR_EACH_VEC_SAFE_ELT (args, ix, arg)
alcarray[ix + 1] = arg;
argarray = alcarray;
}
@@ -6705,7 +6701,7 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
/* Find maximum size of vector to hold converted arguments. */
parmlen = list_length (parm);
- nargs = VEC_length (tree, args) + (first_arg != NULL_TREE ? 1 : 0);
+ nargs = vec_safe_length (args) + (first_arg != NULL_TREE ? 1 : 0);
if (parmlen > nargs)
nargs = parmlen;
argarray = XALLOCAVEC (tree, nargs);
@@ -6721,7 +6717,7 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
}
else
{
- argarray[j++] = VEC_index (tree, args, arg_index);
+ argarray[j++] = (*args)[arg_index];
++arg_index;
}
parm = TREE_CHAIN (parm);
@@ -6730,7 +6726,7 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
if (DECL_HAS_VTT_PARM_P (fn))
{
- argarray[j++] = VEC_index (tree, args, arg_index);
+ argarray[j++] = (*args)[arg_index];
++arg_index;
parm = TREE_CHAIN (parm);
}
@@ -6741,7 +6737,7 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
tree parmtype = TREE_VALUE (parm);
tree arg = (first_arg != NULL_TREE
? first_arg
- : VEC_index (tree, args, arg_index));
+ : (*args)[arg_index]);
tree argtype = TREE_TYPE (arg);
tree converted_arg;
tree base_binfo;
@@ -6800,11 +6796,11 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
}
gcc_assert (first_arg == NULL_TREE);
- for (; arg_index < VEC_length (tree, args) && parm;
+ for (; arg_index < vec_safe_length (args) && parm;
parm = TREE_CHAIN (parm), ++arg_index, ++i)
{
tree type = TREE_VALUE (parm);
- tree arg = VEC_index (tree, args, arg_index);
+ tree arg = (*args)[arg_index];
bool conversion_warning = true;
conv = convs[i];
@@ -6889,9 +6885,9 @@ build_over_call (struct z_candidate *cand, int flags, tsubst_flags_t complain)
}
/* Ellipsis */
- for (; arg_index < VEC_length (tree, args); ++arg_index)
+ for (; arg_index < vec_safe_length (args); ++arg_index)
{
- tree a = VEC_index (tree, args, arg_index);
+ tree a = (*args)[arg_index];
if (magic_varargs_p (fn))
/* Do no conversions for magic varargs. */
a = mark_type_use (a);
@@ -7221,13 +7217,13 @@ in_charge_arg_for_name (tree name)
store the newly constructed object into a VAR_DECL. */
tree
-build_special_member_call (tree instance, tree name, VEC(tree,gc) **args,
+build_special_member_call (tree instance, tree name, vec<tree, va_gc> **args,
tree binfo, int flags, tsubst_flags_t complain)
{
tree fns;
/* The type of the subobject to be constructed or destroyed. */
tree class_type;
- VEC(tree,gc) *allocated = NULL;
+ vec<tree, va_gc> *allocated = NULL;
tree ret;
gcc_assert (name == complete_ctor_identifier
@@ -7260,7 +7256,7 @@ build_special_member_call (tree instance, tree name, VEC(tree,gc) **args,
if (name == complete_dtor_identifier
|| name == base_dtor_identifier
|| name == deleting_dtor_identifier)
- gcc_assert (args == NULL || VEC_empty (tree, *args));
+ gcc_assert (args == NULL || vec_safe_is_empty (*args));
/* Convert to the base class, if necessary. */
if (!same_type_ignoring_top_level_qualifiers_p
@@ -7318,7 +7314,7 @@ build_special_member_call (tree instance, tree name, VEC(tree,gc) **args,
args = &allocated;
}
- VEC_safe_insert (tree, gc, *args, 0, sub_vtt);
+ vec_safe_insert (*args, 0, sub_vtt);
}
ret = build_new_method_call (instance, fns, args,
@@ -7382,7 +7378,7 @@ name_as_c_string (tree name, tree type, bool *free_p)
This may change ARGS. */
static tree
-build_new_method_call_1 (tree instance, tree fns, VEC(tree,gc) **args,
+build_new_method_call_1 (tree instance, tree fns, vec<tree, va_gc> **args,
tree conversion_path, int flags,
tree *fn_p, tsubst_flags_t complain)
{
@@ -7395,14 +7391,14 @@ build_new_method_call_1 (tree instance, tree fns, VEC(tree,gc) **args,
tree instance_ptr;
tree name;
bool skip_first_for_error;
- VEC(tree,gc) *user_args;
+ vec<tree, va_gc> *user_args;
tree call;
tree fn;
int template_only = 0;
bool any_viable_p;
tree orig_instance;
tree orig_fns;
- VEC(tree,gc) *orig_args = NULL;
+ vec<tree, va_gc> *orig_args = NULL;
void *p;
gcc_assert (instance != NULL_TREE);
@@ -7522,14 +7518,14 @@ build_new_method_call_1 (tree instance, tree fns, VEC(tree,gc) **args,
/* If CONSTRUCTOR_IS_DIRECT_INIT is set, this was a T{ } form
initializer, not T({ }). */
- if (DECL_CONSTRUCTOR_P (fn) && args != NULL && !VEC_empty (tree, *args)
- && BRACE_ENCLOSED_INITIALIZER_P (VEC_index (tree, *args, 0))
- && CONSTRUCTOR_IS_DIRECT_INIT (VEC_index (tree, *args, 0)))
+ if (DECL_CONSTRUCTOR_P (fn) && args != NULL && !vec_safe_is_empty (*args)
+ && BRACE_ENCLOSED_INITIALIZER_P ((**args)[0])
+ && CONSTRUCTOR_IS_DIRECT_INIT ((**args)[0]))
{
- tree init_list = VEC_index (tree, *args, 0);
+ tree init_list = (**args)[0];
tree init = NULL_TREE;
- gcc_assert (VEC_length (tree, *args) == 1
+ gcc_assert ((*args)->length () == 1
&& !(flags & LOOKUP_ONLYCONVERTING));
/* If the initializer list has no elements and T is a class type with
@@ -7732,7 +7728,7 @@ build_new_method_call_1 (tree instance, tree fns, VEC(tree,gc) **args,
/* Wrapper for above. */
tree
-build_new_method_call (tree instance, tree fns, VEC(tree,gc) **args,
+build_new_method_call (tree instance, tree fns, vec<tree, va_gc> **args,
tree conversion_path, int flags,
tree *fn_p, tsubst_flags_t complain)
{
@@ -8896,7 +8892,7 @@ perform_direct_initialization_if_possible (tree type,
ill-formed. */
if (CLASS_TYPE_P (type))
{
- VEC(tree,gc) *args = make_tree_vector_single (expr);
+ vec<tree, va_gc> *args = make_tree_vector_single (expr);
expr = build_special_member_call (NULL_TREE, complete_ctor_identifier,
&args, type, LOOKUP_NORMAL, complain);
release_tree_vector (args);
@@ -9003,7 +8999,7 @@ make_temporary_var_for_ref_to_temp (tree decl, tree type)
code to initialize the new variable is returned through INITP. */
static tree
-set_up_extended_ref_temp (tree decl, tree expr, VEC(tree,gc) **cleanups,
+set_up_extended_ref_temp (tree decl, tree expr, vec<tree, va_gc> **cleanups,
tree *initp)
{
tree init;
@@ -9076,7 +9072,7 @@ set_up_extended_ref_temp (tree decl, tree expr, VEC(tree,gc) **cleanups,
{
tree cleanup = cxx_maybe_build_cleanup (var, tf_warning_or_error);
if (cleanup)
- VEC_safe_push (tree, gc, *cleanups, cleanup);
+ vec_safe_push (*cleanups, cleanup);
}
/* We must be careful to destroy the temporary only
@@ -9170,7 +9166,7 @@ initialize_reference (tree type, tree expr,
which is bound either to a reference or a std::initializer_list. */
static tree
-extend_ref_init_temps_1 (tree decl, tree init, VEC(tree,gc) **cleanups)
+extend_ref_init_temps_1 (tree decl, tree init, vec<tree, va_gc> **cleanups)
{
tree sub = init;
tree *p;
@@ -9201,7 +9197,7 @@ extend_ref_init_temps_1 (tree decl, tree init, VEC(tree,gc) **cleanups)
lifetime to match that of DECL. */
tree
-extend_ref_init_temps (tree decl, tree init, VEC(tree,gc) **cleanups)
+extend_ref_init_temps (tree decl, tree init, vec<tree, va_gc> **cleanups)
{
tree type = TREE_TYPE (init);
if (processing_template_decl)
@@ -9226,8 +9222,8 @@ extend_ref_init_temps (tree decl, tree init, VEC(tree,gc) **cleanups)
{
unsigned i;
constructor_elt *p;
- VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (init);
- FOR_EACH_VEC_ELT (constructor_elt, elts, i, p)
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
+ FOR_EACH_VEC_SAFE_ELT (elts, i, p)
p->value = extend_ref_init_temps (decl, p->value, cleanups);
}
diff --git a/gcc/cp/class.c b/gcc/cp/class.c
index cdc02ae7d7a..56fe1d1073c 100644
--- a/gcc/cp/class.c
+++ b/gcc/cp/class.c
@@ -79,13 +79,13 @@ typedef struct vtbl_init_data_s
tree rtti_binfo;
/* The negative-index vtable initializers built up so far. These
are in order from least negative index to most negative index. */
- VEC(constructor_elt,gc) *inits;
+ vec<constructor_elt, va_gc> *inits;
/* The binfo for the virtual base for which we're building
vcall offset initializers. */
tree vbase;
/* The functions in vbase for which we have already provided vcall
offsets. */
- VEC(tree,gc) *fns;
+ vec<tree, va_gc> *fns;
/* The vtable index of the next vcall or vbase offset. */
tree index;
/* Nonzero if we are building the initializer for the primary
@@ -112,7 +112,7 @@ static GTY (()) tree sizeof_biggest_empty_class;
/* An array of all local classes present in this translation unit, in
declaration order. */
-VEC(tree,gc) *local_classes;
+vec<tree, va_gc> *local_classes;
static tree get_vfield_name (tree);
static void finish_struct_anon (tree);
@@ -137,7 +137,7 @@ static tree fixed_type_or_null (tree, int *, int *);
static tree build_simple_base_path (tree expr, tree binfo);
static tree build_vtbl_ref_1 (tree, tree);
static void build_vtbl_initializer (tree, tree, tree, tree, int *,
- VEC(constructor_elt,gc) **);
+ vec<constructor_elt, va_gc> **);
static int count_fields (tree);
static int add_fields_to_record_type (tree, struct sorted_fields_type*, int);
static void insert_into_classtype_sorted_fields (tree, tree, int);
@@ -175,15 +175,15 @@ static void dump_vtable (tree, tree, tree);
static void dump_vtt (tree, tree);
static void dump_thunk (FILE *, int, tree);
static tree build_vtable (tree, tree, tree);
-static void initialize_vtable (tree, VEC(constructor_elt,gc) *);
+static void initialize_vtable (tree, vec<constructor_elt, va_gc> *);
static void layout_nonempty_base_or_field (record_layout_info,
tree, tree, splay_tree);
static tree end_of_class (tree, int);
static bool layout_empty_base (record_layout_info, tree, tree, splay_tree);
static void accumulate_vtbl_inits (tree, tree, tree, tree, tree,
- VEC(constructor_elt,gc) **);
+ vec<constructor_elt, va_gc> **);
static void dfs_accumulate_vtbl_inits (tree, tree, tree, tree, tree,
- VEC(constructor_elt,gc) **);
+ vec<constructor_elt, va_gc> **);
static void build_rtti_vtbl_entries (tree, vtbl_init_data *);
static void build_vcall_and_vbase_vtbl_entries (tree, vtbl_init_data *);
static void clone_constructors_and_destructors (tree);
@@ -192,7 +192,8 @@ static void update_vtable_entry_for_fn (tree, tree, tree, tree *, unsigned);
static void build_ctor_vtbl_group (tree, tree);
static void build_vtt (tree);
static tree binfo_ctor_vtable (tree);
-static void build_vtt_inits (tree, tree, VEC(constructor_elt,gc) **, tree *);
+static void build_vtt_inits (tree, tree, vec<constructor_elt, va_gc> **,
+ tree *);
static tree dfs_build_secondary_vptr_vtt_inits (tree, void *);
static tree dfs_fixup_binfo_vtbls (tree, void *);
static int record_subobject_offset (tree, tree, splay_tree);
@@ -948,7 +949,7 @@ add_method (tree type, tree method, tree using_decl)
tree overload;
bool template_conv_p = false;
bool conv_p;
- VEC(tree,gc) *method_vec;
+ vec<tree, va_gc> *method_vec;
bool complete_p;
bool insert_p = false;
tree current_fns;
@@ -970,10 +971,10 @@ add_method (tree type, tree method, tree using_decl)
allocate at least two (for constructors and destructors), and
we're going to end up with an assignment operator at some
point as well. */
- method_vec = VEC_alloc (tree, gc, 8);
+ vec_alloc (method_vec, 8);
/* Create slots for constructors and destructors. */
- VEC_quick_push (tree, method_vec, NULL_TREE);
- VEC_quick_push (tree, method_vec, NULL_TREE);
+ method_vec->quick_push (NULL_TREE);
+ method_vec->quick_push (NULL_TREE);
CLASSTYPE_METHOD_VEC (type) = method_vec;
}
@@ -1004,7 +1005,7 @@ add_method (tree type, tree method, tree using_decl)
insert_p = true;
/* See if we already have an entry with this name. */
for (slot = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, method_vec, slot, m);
+ vec_safe_iterate (method_vec, slot, &m);
++slot)
{
m = OVL_CURRENT (m);
@@ -1028,7 +1029,7 @@ add_method (tree type, tree method, tree using_decl)
break;
}
}
- current_fns = insert_p ? NULL_TREE : VEC_index (tree, method_vec, slot);
+ current_fns = insert_p ? NULL_TREE : (*method_vec)[slot];
/* Check to see if we've already got this method. */
for (fns = current_fns; fns; fns = OVL_NEXT (fns))
@@ -1175,19 +1176,19 @@ add_method (tree type, tree method, tree using_decl)
/* We only expect to add few methods in the COMPLETE_P case, so
just make room for one more method in that case. */
if (complete_p)
- reallocated = VEC_reserve_exact (tree, gc, method_vec, 1);
+ reallocated = vec_safe_reserve_exact (method_vec, 1);
else
- reallocated = VEC_reserve (tree, gc, method_vec, 1);
+ reallocated = vec_safe_reserve (method_vec, 1);
if (reallocated)
CLASSTYPE_METHOD_VEC (type) = method_vec;
- if (slot == VEC_length (tree, method_vec))
- VEC_quick_push (tree, method_vec, overload);
+ if (slot == method_vec->length ())
+ method_vec->quick_push (overload);
else
- VEC_quick_insert (tree, method_vec, slot, overload);
+ method_vec->quick_insert (slot, overload);
}
else
/* Replace the current slot. */
- VEC_replace (tree, method_vec, slot, overload);
+ (*method_vec)[slot] = overload;
return true;
}
@@ -1953,15 +1954,15 @@ resort_type_method_vec (void* obj,
gt_pointer_operator new_value,
void* cookie)
{
- VEC(tree,gc) *method_vec = (VEC(tree,gc) *) obj;
- int len = VEC_length (tree, method_vec);
+ vec<tree, va_gc> *method_vec = (vec<tree, va_gc> *) obj;
+ int len = vec_safe_length (method_vec);
size_t slot;
tree fn;
/* The type conversion ops have to live at the front of the vec, so we
can't sort them. */
for (slot = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, method_vec, slot, fn);
+ vec_safe_iterate (method_vec, slot, &fn);
++slot)
if (!DECL_CONV_FN_P (OVL_CURRENT (fn)))
break;
@@ -1970,7 +1971,7 @@ resort_type_method_vec (void* obj,
{
resort_data.new_value = new_value;
resort_data.cookie = cookie;
- qsort (VEC_address (tree, method_vec) + slot, len - slot, sizeof (tree),
+ qsort (method_vec->address () + slot, len - slot, sizeof (tree),
resort_method_name_cmp);
}
}
@@ -1985,14 +1986,14 @@ static void
finish_struct_methods (tree t)
{
tree fn_fields;
- VEC(tree,gc) *method_vec;
+ vec<tree, va_gc> *method_vec;
int slot, len;
method_vec = CLASSTYPE_METHOD_VEC (t);
if (!method_vec)
return;
- len = VEC_length (tree, method_vec);
+ len = method_vec->length ();
/* Clear DECL_IN_AGGR_P for all functions. */
for (fn_fields = TYPE_METHODS (t); fn_fields;
@@ -2006,12 +2007,12 @@ finish_struct_methods (tree t)
/* The type conversion ops have to live at the front of the vec, so we
can't sort them. */
for (slot = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, method_vec, slot, fn_fields);
+ method_vec->iterate (slot, &fn_fields);
++slot)
if (!DECL_CONV_FN_P (OVL_CURRENT (fn_fields)))
break;
if (len - slot > 1)
- qsort (VEC_address (tree, method_vec) + slot,
+ qsort (method_vec->address () + slot,
len-slot, sizeof (tree), method_name_cmp);
}
@@ -2103,7 +2104,7 @@ typedef struct find_final_overrider_data_s {
/* The candidate overriders. */
tree candidates;
/* Path to most derived. */
- VEC(tree,heap) *path;
+ vec<tree> path;
} find_final_overrider_data;
/* Add the overrider along the current path to FFOD->CANDIDATES.
@@ -2122,7 +2123,7 @@ dfs_find_final_overrider_1 (tree binfo,
{
depth--;
if (dfs_find_final_overrider_1
- (VEC_index (tree, ffod->path, depth), ffod, depth))
+ (ffod->path[depth], ffod, depth))
return true;
}
@@ -2161,8 +2162,8 @@ dfs_find_final_overrider_pre (tree binfo, void *data)
find_final_overrider_data *ffod = (find_final_overrider_data *) data;
if (binfo == ffod->declaring_base)
- dfs_find_final_overrider_1 (binfo, ffod, VEC_length (tree, ffod->path));
- VEC_safe_push (tree, heap, ffod->path, binfo);
+ dfs_find_final_overrider_1 (binfo, ffod, ffod->path.length ());
+ ffod->path.safe_push (binfo);
return NULL_TREE;
}
@@ -2171,7 +2172,7 @@ static tree
dfs_find_final_overrider_post (tree /*binfo*/, void *data)
{
find_final_overrider_data *ffod = (find_final_overrider_data *) data;
- VEC_pop (tree, ffod->path);
+ ffod->path.pop ();
return NULL_TREE;
}
@@ -2211,12 +2212,12 @@ find_final_overrider (tree derived, tree binfo, tree fn)
ffod.fn = fn;
ffod.declaring_base = binfo;
ffod.candidates = NULL_TREE;
- ffod.path = VEC_alloc (tree, heap, 30);
+ ffod.path.create (30);
dfs_walk_all (derived, dfs_find_final_overrider_pre,
dfs_find_final_overrider_post, &ffod);
- VEC_free (tree, heap, ffod.path);
+ ffod.path.release ();
/* If there was no winner, issue an error message. */
if (!ffod.candidates || TREE_CHAIN (ffod.candidates))
@@ -2231,11 +2232,11 @@ find_final_overrider (tree derived, tree binfo, tree fn)
static tree
get_vcall_index (tree fn, tree type)
{
- VEC(tree_pair_s,gc) *indices = CLASSTYPE_VCALL_INDICES (type);
+ vec<tree_pair_s, va_gc> *indices = CLASSTYPE_VCALL_INDICES (type);
tree_pair_p p;
unsigned ix;
- FOR_EACH_VEC_ELT (tree_pair_s, indices, ix, p)
+ FOR_EACH_VEC_SAFE_ELT (indices, ix, p)
if ((DECL_DESTRUCTOR_P (fn) && DECL_DESTRUCTOR_P (p->purpose))
|| same_signature_p (fn, p->purpose))
return p->value;
@@ -2589,7 +2590,7 @@ get_basefndecls (tree name, tree t)
/* Find virtual functions in T with the indicated NAME. */
i = lookup_fnfields_1 (t, name);
if (i != -1)
- for (methods = VEC_index (tree, CLASSTYPE_METHOD_VEC (t), i);
+ for (methods = (*CLASSTYPE_METHOD_VEC (t))[i];
methods;
methods = OVL_NEXT (methods))
{
@@ -2660,13 +2661,13 @@ check_for_override (tree decl, tree ctype)
static void
warn_hidden (tree t)
{
- VEC(tree,gc) *method_vec = CLASSTYPE_METHOD_VEC (t);
+ vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (t);
tree fns;
size_t i;
/* We go through each separately named virtual function. */
for (i = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, method_vec, i, fns);
+ vec_safe_iterate (method_vec, i, &fns);
++i)
{
tree fn;
@@ -3704,7 +3705,7 @@ walk_subobject_offsets (tree type,
if (abi_version_at_least (2) && CLASSTYPE_VBASECLASSES (type))
{
unsigned ix;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
/* Iterate through the virtual base classes of TYPE. In G++
3.2, we included virtual bases in the direct base class
@@ -3713,7 +3714,7 @@ walk_subobject_offsets (tree type,
working with the most derived type. */
if (vbases_p)
for (vbases = CLASSTYPE_VBASECLASSES (type), ix = 0;
- VEC_iterate (tree, vbases, ix, binfo); ix++)
+ vec_safe_iterate (vbases, ix, &binfo); ix++)
{
r = walk_subobject_offsets (binfo,
f,
@@ -4221,7 +4222,7 @@ check_methods (tree t)
{
TYPE_POLYMORPHIC_P (t) = 1;
if (DECL_PURE_VIRTUAL_P (x))
- VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (t), x);
+ vec_safe_push (CLASSTYPE_PURE_VIRTUALS (t), x);
}
/* All user-provided destructors are non-trivial.
Constructors and assignment ops are handled in
@@ -5620,7 +5621,7 @@ static tree
end_of_class (tree t, int include_virtuals_p)
{
tree result = size_zero_node;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
tree binfo;
tree base_binfo;
tree offset;
@@ -5643,7 +5644,7 @@ end_of_class (tree t, int include_virtuals_p)
/* G++ 3.2 did not check indirect virtual bases. */
if (abi_version_at_least (2) && include_virtuals_p)
for (vbases = CLASSTYPE_VBASECLASSES (t), i = 0;
- VEC_iterate (tree, vbases, i, base_binfo); i++)
+ vec_safe_iterate (vbases, i, &base_binfo); i++)
{
offset = end_of_base (base_binfo);
if (INT_CST_LT_UNSIGNED (result, offset))
@@ -5667,7 +5668,7 @@ static void
warn_about_ambiguous_bases (tree t)
{
int i;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
tree basetype;
tree binfo;
tree base_binfo;
@@ -5690,7 +5691,7 @@ warn_about_ambiguous_bases (tree t)
/* Check for ambiguous virtual bases. */
if (extra_warnings)
for (vbases = CLASSTYPE_VBASECLASSES (t), i = 0;
- VEC_iterate (tree, vbases, i, binfo); i++)
+ vec_safe_iterate (vbases, i, &binfo); i++)
{
basetype = BINFO_TYPE (binfo);
@@ -6520,7 +6521,7 @@ finish_struct (tree t, tree attributes)
CLASSTYPE_PURE_VIRTUALS (t) = NULL;
for (x = TYPE_METHODS (t); x; x = DECL_CHAIN (x))
if (DECL_PURE_VIRTUAL_P (x))
- VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (t), x);
+ vec_safe_push (CLASSTYPE_PURE_VIRTUALS (t), x);
complete_vars (t);
/* We need to add the target functions to the CLASSTYPE_METHOD_VEC if
an enclosing scope is a template class, so that this function be
@@ -6766,7 +6767,7 @@ init_class_processing (void)
current_class_stack_size = 10;
current_class_stack
= XNEWVEC (struct class_stack_node, current_class_stack_size);
- local_classes = VEC_alloc (tree, gc, 8);
+ vec_alloc (local_classes, 8);
sizeof_biggest_empty_class = size_zero_node;
ridpointers[(int) RID_PUBLIC] = access_public_node;
@@ -7028,7 +7029,7 @@ pop_nested_class (void)
int
current_lang_depth (void)
{
- return VEC_length (tree, current_lang_base);
+ return vec_safe_length (current_lang_base);
}
/* Set global variables CURRENT_LANG_NAME to appropriate value
@@ -7037,7 +7038,7 @@ current_lang_depth (void)
void
push_lang_context (tree name)
{
- VEC_safe_push (tree, gc, current_lang_base, current_lang_name);
+ vec_safe_push (current_lang_base, current_lang_name);
if (name == lang_name_cplusplus)
{
@@ -7072,7 +7073,7 @@ push_lang_context (tree name)
void
pop_lang_context (void)
{
- current_lang_name = VEC_pop (tree, current_lang_base);
+ current_lang_name = current_lang_base->pop ();
}
/* Type instantiation routines. */
@@ -8051,7 +8052,7 @@ static void
finish_vtbls (tree t)
{
tree vbase;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
tree vtable = BINFO_VTABLE (TYPE_BINFO (t));
/* We lay out the primary and secondary vtables in one contiguous
@@ -8075,11 +8076,11 @@ finish_vtbls (tree t)
/* Initialize the vtable for BINFO with the INITS. */
static void
-initialize_vtable (tree binfo, VEC(constructor_elt,gc) *inits)
+initialize_vtable (tree binfo, vec<constructor_elt, va_gc> *inits)
{
tree decl;
- layout_vtable_decl (binfo, VEC_length (constructor_elt, inits));
+ layout_vtable_decl (binfo, vec_safe_length (inits));
decl = get_vtbl_decl_for_binfo (binfo);
initialize_artificial_var (decl, inits);
dump_vtable (BINFO_TYPE (binfo), binfo, decl);
@@ -8104,7 +8105,7 @@ build_vtt (tree t)
tree type;
tree vtt;
tree index;
- VEC(constructor_elt,gc) *inits;
+ vec<constructor_elt, va_gc> *inits;
/* Build up the initializers for the VTT. */
inits = NULL;
@@ -8117,7 +8118,7 @@ build_vtt (tree t)
/* Figure out the type of the VTT. */
type = build_array_of_n_type (const_ptr_type_node,
- VEC_length (constructor_elt, inits));
+ inits->length ());
/* Now, build the VTT object itself. */
vtt = build_vtable (t, mangle_vtt_for_type (t), type);
@@ -8163,7 +8164,7 @@ typedef struct secondary_vptr_vtt_init_data_s
tree index;
/* Vector of initializers built up. */
- VEC(constructor_elt,gc) *inits;
+ vec<constructor_elt, va_gc> *inits;
/* The type being constructed by this secondary VTT. */
tree type_being_constructed;
@@ -8178,7 +8179,8 @@ typedef struct secondary_vptr_vtt_init_data_s
vtables for the BINFO-in-T variant. */
static void
-build_vtt_inits (tree binfo, tree t, VEC(constructor_elt,gc) **inits, tree *index)
+build_vtt_inits (tree binfo, tree t, vec<constructor_elt, va_gc> **inits,
+ tree *index)
{
int i;
tree b;
@@ -8338,7 +8340,7 @@ build_ctor_vtbl_group (tree binfo, tree t)
tree vtbl;
tree id;
tree vbase;
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
/* See if we've already created this construction vtable group. */
id = mangle_ctor_vtbl_for_type (t, binfo);
@@ -8372,8 +8374,7 @@ build_ctor_vtbl_group (tree binfo, tree t)
}
/* Figure out the type of the construction vtable. */
- type = build_array_of_n_type (vtable_entry_type,
- VEC_length (constructor_elt, v));
+ type = build_array_of_n_type (vtable_entry_type, v->length ());
layout_type (type);
TREE_TYPE (vtbl) = type;
DECL_SIZE (vtbl) = DECL_SIZE_UNIT (vtbl) = NULL_TREE;
@@ -8401,7 +8402,7 @@ accumulate_vtbl_inits (tree binfo,
tree rtti_binfo,
tree vtbl,
tree t,
- VEC(constructor_elt,gc) **inits)
+ vec<constructor_elt, va_gc> **inits)
{
int i;
tree base_binfo;
@@ -8449,7 +8450,7 @@ dfs_accumulate_vtbl_inits (tree binfo,
tree rtti_binfo,
tree orig_vtbl,
tree t,
- VEC(constructor_elt,gc) **l)
+ vec<constructor_elt, va_gc> **l)
{
tree vtbl = NULL_TREE;
int ctor_vtbl_p = !SAME_BINFO_TYPE_P (BINFO_TYPE (rtti_binfo), t);
@@ -8509,7 +8510,7 @@ dfs_accumulate_vtbl_inits (tree binfo,
else if (!BINFO_NEW_VTABLE_MARKED (orig_binfo))
return;
- n_inits = VEC_length (constructor_elt, *l);
+ n_inits = vec_safe_length (*l);
if (!vtbl)
{
@@ -8535,7 +8536,7 @@ dfs_accumulate_vtbl_inits (tree binfo,
BINFO_VTABLE (binfo) = tree_cons (rtti_binfo, vtbl, BINFO_VTABLE (binfo));
else if (BINFO_PRIMARY_P (binfo) && BINFO_VIRTUAL_P (binfo))
/* Throw away any unneeded intializers. */
- VEC_truncate (constructor_elt, *l, n_inits);
+ (*l)->truncate (n_inits);
else
/* For an ordinary vtable, set BINFO_VTABLE. */
BINFO_VTABLE (binfo) = vtbl;
@@ -8572,13 +8573,13 @@ build_vtbl_initializer (tree binfo,
tree t,
tree rtti_binfo,
int* non_fn_entries_p,
- VEC(constructor_elt,gc) **inits)
+ vec<constructor_elt, va_gc> **inits)
{
tree v;
vtbl_init_data vid;
unsigned ix, jx;
tree vbinfo;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
constructor_elt *e;
/* Initialize VID. */
@@ -8598,41 +8599,39 @@ build_vtbl_initializer (tree binfo,
/* Create an array for keeping track of the functions we've
processed. When we see multiple functions with the same
signature, we share the vcall offsets. */
- vid.fns = VEC_alloc (tree, gc, 32);
+ vec_alloc (vid.fns, 32);
/* Add the vcall and vbase offset entries. */
build_vcall_and_vbase_vtbl_entries (binfo, &vid);
/* Clear BINFO_VTABLE_PATH_MARKED; it's set by
build_vbase_offset_vtbl_entries. */
for (vbases = CLASSTYPE_VBASECLASSES (t), ix = 0;
- VEC_iterate (tree, vbases, ix, vbinfo); ix++)
+ vec_safe_iterate (vbases, ix, &vbinfo); ix++)
BINFO_VTABLE_PATH_MARKED (vbinfo) = 0;
/* If the target requires padding between data entries, add that now. */
if (TARGET_VTABLE_DATA_ENTRY_DISTANCE > 1)
{
- int n_entries = VEC_length (constructor_elt, vid.inits);
+ int n_entries = vec_safe_length (vid.inits);
- VEC_safe_grow (constructor_elt, gc, vid.inits,
- TARGET_VTABLE_DATA_ENTRY_DISTANCE * n_entries);
+ vec_safe_grow (vid.inits, TARGET_VTABLE_DATA_ENTRY_DISTANCE * n_entries);
/* Move data entries into their new positions and add padding
after the new positions. Iterate backwards so we don't
overwrite entries that we would need to process later. */
for (ix = n_entries - 1;
- VEC_iterate (constructor_elt, vid.inits, ix, e);
+ vid.inits->iterate (ix, &e);
ix--)
{
int j;
int new_position = (TARGET_VTABLE_DATA_ENTRY_DISTANCE * ix
+ (TARGET_VTABLE_DATA_ENTRY_DISTANCE - 1));
- VEC_replace (constructor_elt, vid.inits, new_position, *e);
+ (*vid.inits)[new_position] = *e;
for (j = 1; j < TARGET_VTABLE_DATA_ENTRY_DISTANCE; ++j)
{
- constructor_elt *f = &VEC_index (constructor_elt, vid.inits,
- new_position - j);
+ constructor_elt *f = &(*vid.inits)[new_position - j];
f->index = NULL_TREE;
f->value = build1 (NOP_EXPR, vtable_entry_type,
null_pointer_node);
@@ -8641,19 +8640,18 @@ build_vtbl_initializer (tree binfo,
}
if (non_fn_entries_p)
- *non_fn_entries_p = VEC_length (constructor_elt, vid.inits);
+ *non_fn_entries_p = vec_safe_length (vid.inits);
/* The initializers for virtual functions were built up in reverse
order. Straighten them out and add them to the running list in one
step. */
- jx = VEC_length (constructor_elt, *inits);
- VEC_safe_grow (constructor_elt, gc, *inits,
- (jx + VEC_length (constructor_elt, vid.inits)));
+ jx = vec_safe_length (*inits);
+ vec_safe_grow (*inits, jx + vid.inits->length ());
- for (ix = VEC_length (constructor_elt, vid.inits) - 1;
- VEC_iterate (constructor_elt, vid.inits, ix, e);
+ for (ix = vid.inits->length () - 1;
+ vid.inits->iterate (ix, &e);
ix--, jx++)
- VEC_replace (constructor_elt, *inits, jx, *e);
+ (**inits)[jx] = *e;
/* Go through all the ordinary virtual functions, building up
initializers. */
@@ -9053,7 +9051,7 @@ add_vcall_offset (tree orig_fn, tree binfo, vtbl_init_data *vid)
signature as FN, then we do not need a second vcall offset.
Check the list of functions already present in the derived
class vtable. */
- FOR_EACH_VEC_ELT (tree, vid->fns, i, derived_entry)
+ FOR_EACH_VEC_SAFE_ELT (vid->fns, i, derived_entry)
{
if (same_signature_p (derived_entry, orig_fn)
/* We only use one vcall offset for virtual destructors,
@@ -9069,8 +9067,7 @@ add_vcall_offset (tree orig_fn, tree binfo, vtbl_init_data *vid)
if (vid->binfo == TYPE_BINFO (vid->derived))
{
tree_pair_s elt = {orig_fn, vid->index};
- VEC_safe_push (tree_pair_s, gc, CLASSTYPE_VCALL_INDICES (vid->derived),
- elt);
+ vec_safe_push (CLASSTYPE_VCALL_INDICES (vid->derived), elt);
}
/* The next vcall offset will be found at a more negative
@@ -9079,7 +9076,7 @@ add_vcall_offset (tree orig_fn, tree binfo, vtbl_init_data *vid)
ssize_int (TARGET_VTABLE_DATA_ENTRY_DISTANCE));
/* Keep track of this function. */
- VEC_safe_push (tree, gc, vid->fns, orig_fn);
+ vec_safe_push (vid->fns, orig_fn);
if (vid->generate_vcall_entries)
{
diff --git a/gcc/cp/cp-gimplify.c b/gcc/cp/cp-gimplify.c
index f715e963c49..cde480d3548 100644
--- a/gcc/cp/cp-gimplify.c
+++ b/gcc/cp/cp-gimplify.c
@@ -824,7 +824,7 @@ omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
struct cp_genericize_data
{
struct pointer_set_t *p_set;
- VEC (tree, heap) *bind_expr_stack;
+ vec<tree> bind_expr_stack;
struct cp_genericize_omp_taskreg *omp_ctx;
};
@@ -1015,10 +1015,10 @@ cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
: OMP_CLAUSE_DEFAULT_PRIVATE);
}
}
- VEC_safe_push (tree, heap, wtd->bind_expr_stack, stmt);
+ wtd->bind_expr_stack.safe_push (stmt);
cp_walk_tree (&BIND_EXPR_BODY (stmt),
cp_genericize_r, data, NULL);
- VEC_pop (tree, wtd->bind_expr_stack);
+ wtd->bind_expr_stack.pop ();
}
else if (TREE_CODE (stmt) == USING_STMT)
@@ -1028,12 +1028,11 @@ cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
/* Get the innermost inclosing GIMPLE_BIND that has a non NULL
BLOCK, and append an IMPORTED_DECL to its
BLOCK_VARS chained list. */
- if (wtd->bind_expr_stack)
+ if (wtd->bind_expr_stack.exists ())
{
int i;
- for (i = VEC_length (tree, wtd->bind_expr_stack) - 1; i >= 0; i--)
- if ((block = BIND_EXPR_BLOCK (VEC_index (tree,
- wtd->bind_expr_stack, i))))
+ for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
+ if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
break;
}
if (block)
@@ -1151,11 +1150,11 @@ cp_genericize_tree (tree* t_p)
struct cp_genericize_data wtd;
wtd.p_set = pointer_set_create ();
- wtd.bind_expr_stack = NULL;
+ wtd.bind_expr_stack.create (0);
wtd.omp_ctx = NULL;
cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
pointer_set_destroy (wtd.p_set);
- VEC_free (tree, heap, wtd.bind_expr_stack);
+ wtd.bind_expr_stack.release ();
}
void
diff --git a/gcc/cp/cp-tree.h b/gcc/cp/cp-tree.h
index 7dd6679868c..8592125e4cf 100644
--- a/gcc/cp/cp-tree.h
+++ b/gcc/cp/cp-tree.h
@@ -535,7 +535,7 @@ typedef enum impl_conv_void {
struct GTY (()) tree_default_arg {
struct tree_common common;
struct cp_token_cache *tokens;
- VEC(tree,gc) *instantiations;
+ vec<tree, va_gc> *instantiations;
};
@@ -709,7 +709,7 @@ struct GTY (()) tree_lambda_expr
tree return_type;
tree extra_scope;
tree closure;
- VEC(tree,gc)* pending_proxies;
+ vec<tree, va_gc> *pending_proxies;
location_t locus;
enum cp_lambda_default_capture_mode_type default_capture_mode;
int discriminator;
@@ -735,8 +735,6 @@ struct GTY(()) qualified_typedef_usage_s {
location_t locus;
};
typedef struct qualified_typedef_usage_s qualified_typedef_usage_t;
-DEF_VEC_O (qualified_typedef_usage_t);
-DEF_VEC_ALLOC_O (qualified_typedef_usage_t,gc);
/* Non-zero if this template specialization has access violations that
should be rechecked when the function is instantiated outside argument
@@ -748,7 +746,7 @@ DEF_VEC_ALLOC_O (qualified_typedef_usage_t,gc);
struct GTY(()) tree_template_info {
struct tree_common common;
- VEC(qualified_typedef_usage_t,gc) *typedefs_needing_access_checking;
+ vec<qualified_typedef_usage_t, va_gc> *typedefs_needing_access_checking;
};
enum cp_tree_node_structure_enum {
@@ -986,14 +984,14 @@ extern GTY(()) tree cp_global_trees[CPTI_MAX];
/* Global state. */
struct GTY(()) saved_scope {
- VEC(cxx_saved_binding,gc) *old_bindings;
+ vec<cxx_saved_binding, va_gc> *old_bindings;
tree old_namespace;
- VEC(tree,gc) *decl_ns_list;
+ vec<tree, va_gc> *decl_ns_list;
tree class_name;
tree class_type;
tree access_specifier;
tree function_decl;
- VEC(tree,gc) *lang_base;
+ vec<tree, va_gc> *lang_base;
tree lang_name;
tree template_parms;
cp_binding_level *x_previous_class_level;
@@ -1102,7 +1100,7 @@ struct GTY(()) language_function {
htab_t GTY((param_is(struct named_label_entry))) x_named_labels;
cp_binding_level *bindings;
- VEC(tree,gc) *x_local_names;
+ vec<tree, va_gc> *x_local_names;
htab_t GTY((param_is (struct cxx_int_tree_map))) extern_decl_map;
};
@@ -1304,8 +1302,6 @@ typedef struct GTY (()) tree_pair_s {
tree value;
} tree_pair_s;
typedef tree_pair_s *tree_pair_p;
-DEF_VEC_O (tree_pair_s);
-DEF_VEC_ALLOC_O (tree_pair_s,gc);
/* This is a few header flags for 'struct lang_type'. Actually,
all but the first are used only for lang_type_class; they
@@ -1401,15 +1397,15 @@ struct GTY(()) lang_type_class {
unsigned dummy : 2;
tree primary_base;
- VEC(tree_pair_s,gc) *vcall_indices;
+ vec<tree_pair_s, va_gc> *vcall_indices;
tree vtables;
tree typeinfo_var;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
binding_table nested_udts;
tree as_base;
- VEC(tree,gc) *pure_virtuals;
+ vec<tree, va_gc> *pure_virtuals;
tree friend_classes;
- VEC(tree,gc) * GTY((reorder ("resort_type_method_vec"))) methods;
+ vec<tree, va_gc> * GTY((reorder ("resort_type_method_vec"))) methods;
tree key_method;
tree decl_list;
tree template_info;
@@ -1603,7 +1599,7 @@ struct GTY((variable_size)) lang_type {
/* A FUNCTION_DECL or OVERLOAD for the constructors for NODE. These
are the constructors that take an in-charge parameter. */
#define CLASSTYPE_CONSTRUCTORS(NODE) \
- (VEC_index (tree, CLASSTYPE_METHOD_VEC (NODE), CLASSTYPE_CONSTRUCTOR_SLOT))
+ ((*CLASSTYPE_METHOD_VEC (NODE))[CLASSTYPE_CONSTRUCTOR_SLOT])
/* A FUNCTION_DECL for the destructor for NODE. These are the
destructors that take an in-charge parameter. If
@@ -1611,7 +1607,7 @@ struct GTY((variable_size)) lang_type {
until the destructor is created with lazily_declare_fn. */
#define CLASSTYPE_DESTRUCTORS(NODE) \
(CLASSTYPE_METHOD_VEC (NODE) \
- ? VEC_index (tree, CLASSTYPE_METHOD_VEC (NODE), CLASSTYPE_DESTRUCTOR_SLOT) \
+ ? (*CLASSTYPE_METHOD_VEC (NODE))[CLASSTYPE_DESTRUCTOR_SLOT] \
: NULL_TREE)
/* A dictionary of the nested user-defined-types (class-types, or enums)
@@ -1665,7 +1661,7 @@ struct GTY((variable_size)) lang_type {
#define TYPE_JAVA_INTERFACE(NODE) \
(LANG_TYPE_CLASS_CHECK (NODE)->java_interface)
-/* A VEC(tree) of virtual functions which cannot be inherited by
+/* A vec<tree> of virtual functions which cannot be inherited by
derived classes. When deriving from this type, the derived
class must provide its own definition for each of these functions. */
#define CLASSTYPE_PURE_VIRTUALS(NODE) \
@@ -1815,7 +1811,7 @@ struct GTY((variable_size)) lang_type {
/* Used by various search routines. */
#define IDENTIFIER_MARKED(NODE) TREE_LANG_FLAG_0 (NODE)
-/* A VEC(tree_pair_s) of the vcall indices associated with the class
+/* A vec<tree_pair_s> of the vcall indices associated with the class
NODE. The PURPOSE of each element is a FUNCTION_DECL for a virtual
function. The VALUE is the index into the virtual table where the
vcall offset for that function is stored, when NODE is a virtual
@@ -3345,8 +3341,7 @@ more_aggr_init_expr_args_p (const aggr_init_expr_arg_iterator *iter)
(TREE_CODE (NODE) == CONSTRUCTOR && TREE_HAS_CONSTRUCTOR (NODE))
#define EMPTY_CONSTRUCTOR_P(NODE) (TREE_CODE (NODE) == CONSTRUCTOR \
- && VEC_empty (constructor_elt, \
- CONSTRUCTOR_ELTS (NODE)) \
+ && vec_safe_is_empty(CONSTRUCTOR_ELTS(NODE))\
&& !TREE_HAS_CONSTRUCTOR (NODE))
/* True if NODE is a init-list used as a direct-initializer, i.e.
@@ -4281,7 +4276,7 @@ extern int current_class_depth;
/* An array of all local classes present in this translation unit, in
declaration order. */
-extern GTY(()) VEC(tree,gc) *local_classes;
+extern GTY(()) vec<tree, va_gc> *local_classes;
/* Here's where we control how name mangling takes place. */
@@ -4917,20 +4912,20 @@ extern bool sufficient_parms_p (const_tree);
extern tree type_decays_to (tree);
extern tree build_user_type_conversion (tree, tree, int,
tsubst_flags_t);
-extern tree build_new_function_call (tree, VEC(tree,gc) **, bool,
+extern tree build_new_function_call (tree, vec<tree, va_gc> **, bool,
tsubst_flags_t);
-extern tree build_operator_new_call (tree, VEC(tree,gc) **, tree *,
+extern tree build_operator_new_call (tree, vec<tree, va_gc> **, tree *,
tree *, tree, tree *,
tsubst_flags_t);
-extern tree build_new_method_call (tree, tree, VEC(tree,gc) **,
+extern tree build_new_method_call (tree, tree, vec<tree, va_gc> **,
tree, int, tree *,
tsubst_flags_t);
-extern tree build_special_member_call (tree, tree, VEC(tree,gc) **,
+extern tree build_special_member_call (tree, tree, vec<tree, va_gc> **,
tree, int, tsubst_flags_t);
extern tree build_new_op (location_t, enum tree_code,
int, tree, tree, tree, tree *,
tsubst_flags_t);
-extern tree build_op_call (tree, VEC(tree,gc) **,
+extern tree build_op_call (tree, vec<tree, va_gc> **,
tsubst_flags_t);
extern tree build_op_delete_call (enum tree_code, tree, tree,
bool, tree, tree,
@@ -4954,7 +4949,7 @@ extern tree convert_for_arg_passing (tree, tree, tsubst_flags_t);
extern bool is_properly_derived_from (tree, tree);
extern tree initialize_reference (tree, tree, int,
tsubst_flags_t);
-extern tree extend_ref_init_temps (tree, tree, VEC(tree,gc)**);
+extern tree extend_ref_init_temps (tree, tree, vec<tree, va_gc>**);
extern tree make_temporary_var_for_ref_to_temp (tree, tree);
extern tree strip_top_quals (tree);
extern bool reference_related_p (tree, tree);
@@ -5161,13 +5156,13 @@ extern tree check_elaborated_type_specifier (enum tag_types, tree, bool);
extern void warn_extern_redeclared_static (tree, tree);
extern tree cxx_comdat_group (tree);
extern bool cp_missing_noreturn_ok_p (tree);
-extern void initialize_artificial_var (tree, VEC(constructor_elt,gc) *);
+extern void initialize_artificial_var (tree, vec<constructor_elt, va_gc> *);
extern tree check_var_type (tree, tree);
extern tree reshape_init (tree, tree, tsubst_flags_t);
extern tree next_initializable_field (tree);
extern bool defer_mark_used_calls;
-extern GTY(()) VEC(tree, gc) *deferred_mark_used_calls;
+extern GTY(()) vec<tree, va_gc> *deferred_mark_used_calls;
extern tree finish_case_label (location_t, tree, tree);
extern tree cxx_maybe_build_cleanup (tree, tsubst_flags_t);
@@ -5199,7 +5194,7 @@ extern void determine_visibility (tree);
extern void constrain_class_visibility (tree);
extern void import_export_decl (tree);
extern tree build_cleanup (tree);
-extern tree build_offset_ref_call_from_tree (tree, VEC(tree,gc) **,
+extern tree build_offset_ref_call_from_tree (tree, vec<tree, va_gc> **,
tsubst_flags_t);
extern bool decl_constant_var_p (tree);
extern bool decl_maybe_constant_var_p (tree);
@@ -5285,8 +5280,8 @@ extern tree build_zero_init (tree, tree, bool);
extern tree build_value_init (tree, tsubst_flags_t);
extern tree build_value_init_noctor (tree, tsubst_flags_t);
extern tree build_offset_ref (tree, tree, bool);
-extern tree build_new (VEC(tree,gc) **, tree, tree,
- VEC(tree,gc) **, int,
+extern tree build_new (vec<tree, va_gc> **, tree, tree,
+ vec<tree, va_gc> **, int,
tsubst_flags_t);
extern tree get_temp_regvar (tree, tree);
extern tree build_vec_init (tree, tree, tree, bool, int,
@@ -5408,7 +5403,7 @@ extern tree make_pack_expansion (tree);
extern bool check_for_bare_parameter_packs (tree);
extern tree build_template_info (tree, tree);
extern tree get_template_info (const_tree);
-extern VEC(qualified_typedef_usage_t,gc)* get_types_needing_access_check (tree);
+extern vec<qualified_typedef_usage_t, va_gc> *get_types_needing_access_check (tree);
extern int template_class_depth (tree);
extern int is_specialization_of (tree, tree);
extern bool is_specialization_of_friend (tree, tree);
@@ -5435,7 +5430,7 @@ extern bool any_dependent_template_arguments_p (const_tree);
extern bool dependent_template_p (tree);
extern bool dependent_template_id_p (tree, tree);
extern bool type_dependent_expression_p (tree);
-extern bool any_type_dependent_arguments_p (const VEC(tree,gc) *);
+extern bool any_type_dependent_arguments_p (const vec<tree, va_gc> *);
extern bool any_type_dependent_elements_p (const_tree);
extern bool type_dependent_expression_p_push (tree);
extern bool value_dependent_expression_p (tree);
@@ -5445,7 +5440,7 @@ extern bool dependent_omp_for_p (tree, tree, tree, tree);
extern tree resolve_typename_type (tree, bool);
extern tree template_for_substitution (tree);
extern tree build_non_dependent_expr (tree);
-extern void make_args_non_dependent (VEC(tree,gc) *);
+extern void make_args_non_dependent (vec<tree, va_gc> *);
extern bool reregister_specialization (tree, tree, tree);
extern tree fold_non_dependent_expr (tree);
extern tree fold_non_dependent_expr_sfinae (tree, tsubst_flags_t);
@@ -5476,7 +5471,7 @@ extern void finish_repo (void);
/* in rtti.c */
/* A vector of all tinfo decls that haven't been emitted yet. */
-extern GTY(()) VEC(tree,gc) *unemitted_tinfo_decls;
+extern GTY(()) vec<tree, va_gc> *unemitted_tinfo_decls;
extern void init_rtti_processing (void);
extern tree build_typeid (tree);
@@ -5543,17 +5538,15 @@ typedef struct GTY(()) deferred_access_check {
/* The location of this access. */
location_t loc;
} deferred_access_check;
-DEF_VEC_O(deferred_access_check);
-DEF_VEC_ALLOC_O(deferred_access_check,gc);
/* in semantics.c */
extern void push_deferring_access_checks (deferring_kind);
extern void resume_deferring_access_checks (void);
extern void stop_deferring_access_checks (void);
extern void pop_deferring_access_checks (void);
-extern VEC (deferred_access_check,gc)* get_deferred_access_checks (void);
+extern vec<deferred_access_check, va_gc> *get_deferred_access_checks (void);
extern void pop_to_parent_deferring_access_checks (void);
-extern bool perform_access_checks (VEC (deferred_access_check,gc)*,
+extern bool perform_access_checks (vec<deferred_access_check, va_gc> *,
tsubst_flags_t);
extern bool perform_deferred_access_checks (tsubst_flags_t);
extern bool perform_or_defer_access_check (tree, tree, tree,
@@ -5617,7 +5610,7 @@ extern tree maybe_constant_init (tree);
extern bool is_sub_constant_expr (tree);
extern bool reduced_constant_expression_p (tree);
extern void explain_invalid_constexpr_fn (tree);
-extern VEC(tree,heap)* cx_error_context (void);
+extern vec<tree> cx_error_context (void);
enum {
BCS_NO_SCOPE = 1,
@@ -5638,9 +5631,9 @@ extern tree finish_stmt_expr_expr (tree, tree);
extern tree finish_stmt_expr (tree, bool);
extern tree stmt_expr_value_expr (tree);
bool empty_expr_stmt_p (tree);
-extern tree perform_koenig_lookup (tree, VEC(tree,gc) *, bool,
+extern tree perform_koenig_lookup (tree, vec<tree, va_gc> *, bool,
tsubst_flags_t);
-extern tree finish_call_expr (tree, VEC(tree,gc) **, bool,
+extern tree finish_call_expr (tree, vec<tree, va_gc> **, bool,
bool, tsubst_flags_t);
extern tree finish_increment_expr (tree, enum tree_code);
extern tree finish_this_expr (void);
@@ -5766,7 +5759,7 @@ extern tree build_min (enum tree_code, tree, ...);
extern tree build_min_nt_loc (location_t, enum tree_code,
...);
extern tree build_min_non_dep (enum tree_code, tree, ...);
-extern tree build_min_non_dep_call_vec (tree, tree, VEC(tree,gc) *);
+extern tree build_min_non_dep_call_vec (tree, tree, vec<tree, va_gc> *);
extern tree build_cplus_new (tree, tree, tsubst_flags_t);
extern tree build_aggr_init_expr (tree, tree, tsubst_flags_t);
extern tree get_target_expr (tree);
@@ -5887,7 +5880,7 @@ extern tree get_member_function_from_ptrfunc (tree *, tree, tsubst_flags_t);
extern tree cp_build_function_call (tree, tree, tsubst_flags_t);
extern tree cp_build_function_call_nary (tree, tsubst_flags_t, ...)
ATTRIBUTE_SENTINEL;
-extern tree cp_build_function_call_vec (tree, VEC(tree,gc) **,
+extern tree cp_build_function_call_vec (tree, vec<tree, va_gc> **,
tsubst_flags_t);
extern tree build_x_binary_op (location_t,
enum tree_code, tree,
@@ -5908,8 +5901,8 @@ extern tree build_x_conditional_expr (location_t, tree, tree, tree,
tsubst_flags_t);
extern tree build_x_compound_expr_from_list (tree, expr_list_kind,
tsubst_flags_t);
-extern tree build_x_compound_expr_from_vec (VEC(tree,gc) *, const char *,
- tsubst_flags_t);
+extern tree build_x_compound_expr_from_vec (vec<tree, va_gc> *,
+ const char *, tsubst_flags_t);
extern tree build_x_compound_expr (location_t, tree, tree,
tsubst_flags_t);
extern tree build_compound_expr (location_t, tree, tree);
@@ -5985,7 +5978,7 @@ extern void complete_type_check_abstract (tree);
extern int abstract_virtuals_error (tree, tree);
extern int abstract_virtuals_error_sfinae (tree, tree, tsubst_flags_t);
-extern tree store_init_value (tree, tree, VEC(tree,gc)**, int);
+extern tree store_init_value (tree, tree, vec<tree, va_gc>**, int);
extern void check_narrowing (tree, tree);
extern tree digest_init (tree, tree, tsubst_flags_t);
extern tree digest_init_flags (tree, tree, int);
diff --git a/gcc/cp/cvt.c b/gcc/cp/cvt.c
index d30c7e5309e..1dc789855cc 100644
--- a/gcc/cp/cvt.c
+++ b/gcc/cp/cvt.c
@@ -561,7 +561,7 @@ force_rvalue (tree expr, tsubst_flags_t complain)
tree type = TREE_TYPE (expr);
if (MAYBE_CLASS_TYPE_P (type) && TREE_CODE (expr) != TARGET_EXPR)
{
- VEC(tree,gc) *args = make_tree_vector_single (expr);
+ vec<tree, va_gc> *args = make_tree_vector_single (expr);
expr = build_special_member_call (NULL_TREE, complete_ctor_identifier,
&args, type, LOOKUP_NORMAL, complain);
release_tree_vector (args);
@@ -855,7 +855,7 @@ ocp_convert (tree type, tree expr, int convtype, int flags,
ctor = build_user_type_conversion (type, ctor, flags, complain);
else
{
- VEC(tree,gc) *ctor_vec = make_tree_vector_single (ctor);
+ vec<tree, va_gc> *ctor_vec = make_tree_vector_single (ctor);
ctor = build_special_member_call (NULL_TREE,
complete_ctor_identifier,
&ctor_vec,
diff --git a/gcc/cp/decl.c b/gcc/cp/decl.c
index 045e99b2080..bae48cefdf4 100644
--- a/gcc/cp/decl.c
+++ b/gcc/cp/decl.c
@@ -91,7 +91,7 @@ static tree lookup_and_check_tag (enum tag_types, tree, tag_scope, bool);
static int walk_namespaces_r (tree, walk_namespaces_fn, void *);
static void maybe_deduce_size_from_array_init (tree, tree);
static void layout_var_decl (tree);
-static tree check_initializer (tree, tree, int, VEC(tree,gc) **);
+static tree check_initializer (tree, tree, int, vec<tree, va_gc> **);
static void make_rtl_for_nonlocal_decl (tree, tree, const char *);
static void save_function_data (tree);
static void copy_type_enum (tree , tree);
@@ -218,7 +218,7 @@ struct GTY(()) named_label_entry {
tree names_in_scope;
/* A vector of all decls from all binding levels that would be
crossed by a backward branch to the label. */
- VEC(tree,gc) *bad_decls;
+ vec<tree, va_gc> *bad_decls;
/* A list of uses of the label, before the label is defined. */
struct named_label_use_entry *uses;
@@ -241,7 +241,7 @@ int function_depth;
/* To avoid unwanted recursion, finish_function defers all mark_used calls
encountered during its execution until it finishes. */
bool defer_mark_used_calls;
-VEC(tree, gc) *deferred_mark_used_calls;
+vec<tree, va_gc> *deferred_mark_used_calls;
/* States indicating how grokdeclarator() should handle declspecs marked
with __attribute__((deprecated)). An object declared as
@@ -258,10 +258,8 @@ typedef struct GTY(()) incomplete_var_d {
tree incomplete_type;
} incomplete_var;
-DEF_VEC_O(incomplete_var);
-DEF_VEC_ALLOC_O(incomplete_var,gc);
-static GTY(()) VEC(incomplete_var,gc) *incomplete_vars;
+static GTY(()) vec<incomplete_var, va_gc> *incomplete_vars;
/* Returns the kind of template specialization we are currently
processing, given that it's declaration contained N_CLASS_SCOPES
@@ -486,7 +484,7 @@ poplevel_named_label_1 (void **slot, void *data)
? DECL_CHAIN (decl)
: TREE_CHAIN (decl)))
if (decl_jump_unsafe (decl))
- VEC_safe_push (tree, gc, ent->bad_decls, decl);
+ vec_safe_push (ent->bad_decls, decl);
ent->binding_level = obl;
ent->names_in_scope = obl->names;
@@ -567,8 +565,7 @@ poplevel (int keep, int reverse, int functionbody)
functionbody = 0;
subblocks = functionbody >= 0 ? current_binding_level->blocks : 0;
- gcc_assert (!VEC_length(cp_class_binding,
- current_binding_level->class_shadowed));
+ gcc_assert (!vec_safe_length (current_binding_level->class_shadowed));
/* We used to use KEEP == 2 to indicate that the new block should go
at the beginning of the list of blocks at this binding level,
@@ -708,9 +705,9 @@ poplevel (int keep, int reverse, int functionbody)
/* Add it to the list of dead variables in the next
outermost binding to that we can remove these when we
leave that binding. */
- VEC_safe_push (tree, gc,
- current_binding_level->level_chain->dead_vars_from_for,
- link);
+ vec_safe_push (
+ current_binding_level->level_chain->dead_vars_from_for,
+ link);
/* Although we don't pop the cxx_binding, we do clear
its SCOPE since the scope is going away now. */
@@ -739,8 +736,8 @@ poplevel (int keep, int reverse, int functionbody)
/* Remove declarations for any `for' variables from inner scopes
that we kept around. */
- FOR_EACH_VEC_ELT_REVERSE (tree, current_binding_level->dead_vars_from_for,
- ix, decl)
+ FOR_EACH_VEC_SAFE_ELT_REVERSE (current_binding_level->dead_vars_from_for,
+ ix, decl)
pop_binding (DECL_NAME (decl), decl);
/* Restore the IDENTIFIER_TYPE_VALUEs. */
@@ -749,9 +746,8 @@ poplevel (int keep, int reverse, int functionbody)
SET_IDENTIFIER_TYPE_VALUE (TREE_PURPOSE (link), TREE_VALUE (link));
/* Restore the IDENTIFIER_LABEL_VALUEs for local labels. */
- FOR_EACH_VEC_ELT_REVERSE (cp_label_binding,
- current_binding_level->shadowed_labels,
- ix, label_bind)
+ FOR_EACH_VEC_SAFE_ELT_REVERSE (current_binding_level->shadowed_labels,
+ ix, label_bind)
pop_local_label (label_bind->label, label_bind->prev_value);
/* There may be OVERLOADs (wrapped in TREE_LISTs) on the BLOCK_VARs
@@ -866,9 +862,9 @@ int
wrapup_globals_for_namespace (tree name_space, void* data)
{
cp_binding_level *level = NAMESPACE_LEVEL (name_space);
- VEC(tree,gc) *statics = level->static_decls;
- tree *vec = VEC_address (tree, statics);
- int len = VEC_length (tree, statics);
+ vec<tree, va_gc> *statics = level->static_decls;
+ tree *vec = statics->address ();
+ int len = statics->length ();
int last_time = (data != 0);
if (last_time)
@@ -917,10 +913,10 @@ push_local_name (tree decl)
name = DECL_NAME (decl);
- nelts = VEC_length (tree, local_names);
+ nelts = vec_safe_length (local_names);
for (i = 0; i < nelts; i++)
{
- t = VEC_index (tree, local_names, i);
+ t = (*local_names)[i];
if (DECL_NAME (t) == name)
{
if (!DECL_LANG_SPECIFIC (decl))
@@ -931,13 +927,13 @@ push_local_name (tree decl)
else
DECL_DISCRIMINATOR (decl) = 1;
- VEC_replace (tree, local_names, i, decl);
+ (*local_names)[i] = decl;
timevar_stop (TV_NAME_LOOKUP);
return;
}
}
- VEC_safe_push (tree, gc, local_names, decl);
+ vec_safe_push (local_names, decl);
timevar_stop (TV_NAME_LOOKUP);
}
@@ -2706,8 +2702,7 @@ declare_local_label (tree id)
decl = make_label_decl (id, /*local_p=*/1);
bind.label = decl;
- VEC_safe_push (cp_label_binding, gc, current_binding_level->shadowed_labels,
- bind);
+ vec_safe_push (current_binding_level->shadowed_labels, bind);
return decl;
}
@@ -2890,14 +2885,14 @@ check_goto (tree decl)
}
if (ent->in_try_scope || ent->in_catch_scope
- || ent->in_omp_scope || !VEC_empty (tree, ent->bad_decls))
+ || ent->in_omp_scope || !vec_safe_is_empty (ent->bad_decls))
{
permerror (input_location, "jump to label %q+D", decl);
permerror (input_location, " from here");
identified = true;
}
- FOR_EACH_VEC_ELT (tree, ent->bad_decls, ix, bad)
+ FOR_EACH_VEC_SAFE_ELT (ent->bad_decls, ix, bad)
{
int u = decl_jump_unsafe (bad);
@@ -4824,10 +4819,10 @@ maybe_deduce_size_from_array_init (tree decl, tree init)
initializer. */
if (initializer && TREE_CODE (initializer) == CONSTRUCTOR)
{
- VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (initializer);
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (initializer);
constructor_elt *ce;
HOST_WIDE_INT i;
- FOR_EACH_VEC_ELT (constructor_elt, v, i, ce)
+ FOR_EACH_VEC_SAFE_ELT (v, i, ce)
if (!check_array_designated_initializer (ce, i))
failure = 1;
}
@@ -5308,7 +5303,7 @@ reshape_init_r (tree type, reshape_iter *d, bool first_initializer_p,
}
else if (first_initializer_p && d->cur != d->end)
{
- VEC(constructor_elt, gc) *v = 0;
+ vec<constructor_elt, va_gc> *v = 0;
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, d->cur->value);
if (has_designator_problem (d, complain))
@@ -5382,10 +5377,9 @@ reshape_init_r (tree type, reshape_iter *d, bool first_initializer_p,
element (as allowed by [dcl.init.string]). */
if (!first_initializer_p
&& TREE_CODE (str_init) == CONSTRUCTOR
- && VEC_length (constructor_elt, CONSTRUCTOR_ELTS (str_init)) == 1)
+ && vec_safe_length (CONSTRUCTOR_ELTS (str_init)) == 1)
{
- str_init = VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (str_init), 0).value;
+ str_init = (*CONSTRUCTOR_ELTS (str_init))[0].value;
}
/* If it's a string literal, then it's the initializer for the array
@@ -5453,15 +5447,15 @@ reshape_init_r (tree type, reshape_iter *d, bool first_initializer_p,
struct S { int a; int b; };
struct S a[] = { 1, 2, 3, 4 };
- Here INIT will hold a VEC of four elements, rather than a
- VEC of two elements, each itself a VEC of two elements. This
+ Here INIT will hold a vector of four elements, rather than a
+ vector of two elements, each itself a vector of two elements. This
routine transforms INIT from the former form into the latter. The
revised CONSTRUCTOR node is returned. */
tree
reshape_init (tree type, tree init, tsubst_flags_t complain)
{
- VEC(constructor_elt, gc) *v;
+ vec<constructor_elt, va_gc> *v;
reshape_iter d;
tree new_init;
@@ -5471,12 +5465,12 @@ reshape_init (tree type, tree init, tsubst_flags_t complain)
/* An empty constructor does not need reshaping, and it is always a valid
initializer. */
- if (VEC_empty (constructor_elt, v))
+ if (vec_safe_is_empty (v))
return init;
/* Recurse on this CONSTRUCTOR. */
- d.cur = &VEC_index (constructor_elt, v, 0);
- d.end = d.cur + VEC_length (constructor_elt, v);
+ d.cur = &(*v)[0];
+ d.end = d.cur + v->length ();
new_init = reshape_init_r (type, &d, true, complain);
if (new_init == error_mark_node)
@@ -5546,7 +5540,7 @@ build_aggr_init_full_exprs (tree decl, tree init, int flags)
evaluated dynamically to initialize DECL. */
static tree
-check_initializer (tree decl, tree init, int flags, VEC(tree,gc) **cleanups)
+check_initializer (tree decl, tree init, int flags, vec<tree, va_gc> **cleanups)
{
tree type = TREE_TYPE (decl);
tree init_code = NULL;
@@ -5586,7 +5580,7 @@ check_initializer (tree decl, tree init, int flags, VEC(tree,gc) **cleanups)
if (init && BRACE_ENCLOSED_INITIALIZER_P (init))
{
- int init_len = VEC_length (constructor_elt, CONSTRUCTOR_ELTS (init));
+ int init_len = vec_safe_length (CONSTRUCTOR_ELTS (init));
if (SCALAR_TYPE_P (type))
{
if (init_len == 0)
@@ -5991,7 +5985,7 @@ initialize_local_var (tree decl, tree init)
back end. */
void
-initialize_artificial_var (tree decl, VEC(constructor_elt,gc) *v)
+initialize_artificial_var (tree decl, vec<constructor_elt, va_gc> *v)
{
tree init;
gcc_assert (DECL_ARTIFICIAL (decl));
@@ -6017,15 +6011,14 @@ type_dependent_init_p (tree init)
else if (TREE_CODE (init) == CONSTRUCTOR)
/* A brace-enclosed initializer, e.g.: int i = { 3 }; ? */
{
- VEC(constructor_elt, gc) *elts;
+ vec<constructor_elt, va_gc> *elts;
size_t nelts;
size_t i;
elts = CONSTRUCTOR_ELTS (init);
- nelts = VEC_length (constructor_elt, elts);
+ nelts = vec_safe_length (elts);
for (i = 0; i < nelts; ++i)
- if (type_dependent_init_p (VEC_index (constructor_elt,
- elts, i).value))
+ if (type_dependent_init_p ((*elts)[i].value))
return true;
}
else
@@ -6047,15 +6040,14 @@ value_dependent_init_p (tree init)
else if (TREE_CODE (init) == CONSTRUCTOR)
/* A brace-enclosed initializer, e.g.: int i = { 3 }; ? */
{
- VEC(constructor_elt, gc) *elts;
+ vec<constructor_elt, va_gc> *elts;
size_t nelts;
size_t i;
elts = CONSTRUCTOR_ELTS (init);
- nelts = VEC_length (constructor_elt, elts);
+ nelts = vec_safe_length (elts);
for (i = 0; i < nelts; ++i)
- if (value_dependent_init_p (VEC_index (constructor_elt,
- elts, i).value))
+ if (value_dependent_init_p ((*elts)[i].value))
return true;
}
else
@@ -6081,7 +6073,7 @@ cp_finish_decl (tree decl, tree init, bool init_const_expr_p,
tree asmspec_tree, int flags)
{
tree type;
- VEC(tree,gc) *cleanups = NULL;
+ vec<tree, va_gc> *cleanups = NULL;
const char *asmspec = NULL;
int was_readonly = 0;
bool var_definition_p = false;
@@ -6489,7 +6481,7 @@ cp_finish_decl (tree decl, tree init, bool init_const_expr_p,
if (cleanups)
{
unsigned i; tree t;
- FOR_EACH_VEC_ELT (tree, cleanups, i, t)
+ FOR_EACH_VEC_ELT (*cleanups, i, t)
push_cleanup (decl, t, false);
release_tree_vector (cleanups);
}
@@ -6769,7 +6761,7 @@ register_dtor_fn (tree decl)
/* Find the destructor. */
idx = lookup_fnfields_1 (type, complete_dtor_identifier);
gcc_assert (idx >= 0);
- cleanup = VEC_index (tree, CLASSTYPE_METHOD_VEC (type), idx);
+ cleanup = (*CLASSTYPE_METHOD_VEC (type))[idx];
/* Make sure it is accessible. */
perform_or_defer_access_check (TYPE_BINFO (type), cleanup, cleanup,
tf_warning_or_error);
@@ -7053,13 +7045,13 @@ cp_complete_array_type (tree *ptype, tree initial_value, bool do_default)
we should just call reshape_init here? */
if (char_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (*ptype)))
&& TREE_CODE (initial_value) == CONSTRUCTOR
- && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (initial_value)))
+ && !vec_safe_is_empty (CONSTRUCTOR_ELTS (initial_value)))
{
- VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (initial_value);
- tree value = VEC_index (constructor_elt, v, 0).value;
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (initial_value);
+ tree value = (*v)[0].value;
if (TREE_CODE (value) == STRING_CST
- && VEC_length (constructor_elt, v) == 1)
+ && v->length () == 1)
initial_value = value;
}
@@ -12108,7 +12100,7 @@ xref_basetypes (tree ref, tree base_list)
if (TREE_TYPE (*basep))
max_vbases++;
if (CLASS_TYPE_P (basetype))
- max_vbases += VEC_length (tree, CLASSTYPE_VBASECLASSES (basetype));
+ max_vbases += vec_safe_length (CLASSTYPE_VBASECLASSES (basetype));
basep = &TREE_CHAIN (*basep);
}
}
@@ -12136,7 +12128,7 @@ xref_basetypes (tree ref, tree base_list)
if (max_bases)
{
- BINFO_BASE_ACCESSES (binfo) = VEC_alloc (tree, gc, max_bases);
+ vec_alloc (BINFO_BASE_ACCESSES (binfo), max_bases);
/* An aggregate cannot have baseclasses. */
CLASSTYPE_NON_AGGREGATE (ref) = 1;
@@ -12158,7 +12150,7 @@ xref_basetypes (tree ref, tree base_list)
if (max_vbases)
{
- CLASSTYPE_VBASECLASSES (ref) = VEC_alloc (tree, gc, max_vbases);
+ vec_alloc (CLASSTYPE_VBASECLASSES (ref), max_vbases);
if (TYPE_FOR_JAVA (ref))
{
@@ -12236,7 +12228,7 @@ xref_basetypes (tree ref, tree base_list)
BINFO_BASE_ACCESS_APPEND (binfo, access);
}
- if (VEC_length (tree, CLASSTYPE_VBASECLASSES (ref)) < max_vbases)
+ if (vec_safe_length (CLASSTYPE_VBASECLASSES (ref)) < max_vbases)
/* If we didn't get max_vbases vbases, we must have shared at
least one of them, and are therefore diamond shaped. */
CLASSTYPE_DIAMOND_SHAPED_P (ref) = 1;
@@ -13919,9 +13911,9 @@ finish_function (int flags)
unsigned int i;
tree decl;
- FOR_EACH_VEC_ELT (tree, deferred_mark_used_calls, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (deferred_mark_used_calls, i, decl)
mark_used (decl);
- VEC_free (tree, gc, deferred_mark_used_calls);
+ vec_free (deferred_mark_used_calls);
}
return fndecl;
@@ -14032,7 +14024,7 @@ maybe_register_incomplete_var (tree var)
&& TYPE_BEING_DEFINED (inner_type)))
{
incomplete_var iv = {var, inner_type};
- VEC_safe_push (incomplete_var, gc, incomplete_vars, iv);
+ vec_safe_push (incomplete_vars, iv);
}
}
}
@@ -14047,7 +14039,7 @@ complete_vars (tree type)
unsigned ix;
incomplete_var *iv;
- for (ix = 0; VEC_iterate (incomplete_var, incomplete_vars, ix, iv); )
+ for (ix = 0; vec_safe_iterate (incomplete_vars, ix, &iv); )
{
if (same_type_p (type, iv->incomplete_type))
{
@@ -14058,7 +14050,7 @@ complete_vars (tree type)
complete_type (type);
cp_apply_type_quals_to_decl (cp_type_quals (type), var);
/* Remove this entry from the list. */
- VEC_unordered_remove (incomplete_var, incomplete_vars, ix);
+ incomplete_vars->unordered_remove (ix);
}
else
ix++;
diff --git a/gcc/cp/decl2.c b/gcc/cp/decl2.c
index 9f20757c30f..9ed53b819fc 100644
--- a/gcc/cp/decl2.c
+++ b/gcc/cp/decl2.c
@@ -90,15 +90,15 @@ static bool decl_defined_p (tree);
/* A list of static class variables. This is needed, because a
static class variable can be declared inside the class without
an initializer, and then initialized, statically, outside the class. */
-static GTY(()) VEC(tree,gc) *pending_statics;
+static GTY(()) vec<tree, va_gc> *pending_statics;
/* A list of functions which were declared inline, but which we
may need to emit outline anyway. */
-static GTY(()) VEC(tree,gc) *deferred_fns;
+static GTY(()) vec<tree, va_gc> *deferred_fns;
/* A list of decls that use types with no linkage, which we need to make
sure are defined. */
-static GTY(()) VEC(tree,gc) *no_linkage_decls;
+static GTY(()) vec<tree, va_gc> *no_linkage_decls;
/* Nonzero if we're done parsing and into end-of-file activities. */
@@ -644,12 +644,12 @@ check_classfn (tree ctype, tree function, tree template_parms)
ix = class_method_index_for_fn (complete_type (ctype), function);
if (ix >= 0)
{
- VEC(tree,gc) *methods = CLASSTYPE_METHOD_VEC (ctype);
+ vec<tree, va_gc> *methods = CLASSTYPE_METHOD_VEC (ctype);
tree fndecls, fndecl = 0;
bool is_conv_op;
const char *format = NULL;
- for (fndecls = VEC_index (tree, methods, ix);
+ for (fndecls = (*methods)[ix];
fndecls; fndecls = OVL_NEXT (fndecls))
{
tree p1, p2;
@@ -705,7 +705,7 @@ check_classfn (tree ctype, tree function, tree template_parms)
if (is_conv_op)
ix = CLASSTYPE_FIRST_CONVERSION_SLOT;
- fndecls = VEC_index (tree, methods, ix);
+ fndecls = (*methods)[ix];
while (fndecls)
{
fndecl = OVL_CURRENT (fndecls);
@@ -713,9 +713,9 @@ check_classfn (tree ctype, tree function, tree template_parms)
if (!fndecls && is_conv_op)
{
- if (VEC_length (tree, methods) > (size_t) ++ix)
+ if (methods->length () > (size_t) ++ix)
{
- fndecls = VEC_index (tree, methods, ix);
+ fndecls = (*methods)[ix];
if (!DECL_CONV_FN_P (OVL_CURRENT (fndecls)))
{
fndecls = NULL_TREE;
@@ -753,7 +753,7 @@ void
note_vague_linkage_fn (tree decl)
{
DECL_DEFER_OUTPUT (decl) = 1;
- VEC_safe_push (tree, gc, deferred_fns, decl);
+ vec_safe_push (deferred_fns, decl);
}
/* We have just processed the DECL, which is a static data member.
@@ -772,7 +772,7 @@ finish_static_data_member_decl (tree decl,
the right thing, namely, to put this decl out straight away. */
if (! processing_template_decl)
- VEC_safe_push (tree, gc, pending_statics, decl);
+ vec_safe_push (pending_statics, decl);
if (LOCAL_CLASS_P (current_class_type)
/* We already complained about the template definition. */
@@ -1879,7 +1879,7 @@ maybe_emit_vtables (tree ctype)
if (TREE_TYPE (DECL_INITIAL (vtbl)) == 0)
{
- VEC(tree,gc)* cleanups = NULL;
+ vec<tree, va_gc> *cleanups = NULL;
tree expr = store_init_value (vtbl, DECL_INITIAL (vtbl), &cleanups,
LOOKUP_NORMAL);
@@ -3063,7 +3063,7 @@ static GTY(()) tree ssdf_decl;
/* All the static storage duration functions created in this
translation unit. */
-static GTY(()) VEC(tree,gc) *ssdf_decls;
+static GTY(()) vec<tree, va_gc> *ssdf_decls;
/* A map from priority levels to information about that priority
level. There may be many such levels, so efficient lookup is
@@ -3108,7 +3108,7 @@ start_static_storage_duration_function (unsigned count)
static constructors and destructors. */
if (!ssdf_decls)
{
- ssdf_decls = VEC_alloc (tree, gc, 32);
+ vec_alloc (ssdf_decls, 32);
/* Take this opportunity to initialize the map from priority
numbers to information about that priority level. */
@@ -3124,7 +3124,7 @@ start_static_storage_duration_function (unsigned count)
get_priority_info (DEFAULT_INIT_PRIORITY);
}
- VEC_safe_push (tree, gc, ssdf_decls, ssdf_decl);
+ vec_safe_push (ssdf_decls, ssdf_decl);
/* Create the argument list. */
initialize_p_decl = cp_build_parm_decl
@@ -3568,7 +3568,7 @@ generate_ctor_or_dtor_function (bool constructor_p, int priority,
/* Call the static storage duration function with appropriate
arguments. */
- FOR_EACH_VEC_ELT (tree, ssdf_decls, i, fndecl)
+ FOR_EACH_VEC_SAFE_ELT (ssdf_decls, i, fndecl)
{
/* Calls to pure or const functions will expand to nothing. */
if (! (flags_from_decl_or_type (fndecl) & (ECF_CONST | ECF_PURE)))
@@ -3941,7 +3941,7 @@ cp_write_global_declarations (void)
/* Bad parse errors. Just forget about it. */
if (! global_bindings_p () || current_class_type
- || !VEC_empty (tree,decl_namespace_list))
+ || !vec_safe_is_empty (decl_namespace_list))
return;
if (pch_file)
@@ -4031,12 +4031,12 @@ cp_write_global_declarations (void)
cause other variables to be needed. New elements will be
appended, and we remove from the vector those that actually
get emitted. */
- for (i = VEC_length (tree, unemitted_tinfo_decls);
- VEC_iterate (tree, unemitted_tinfo_decls, --i, t);)
+ for (i = unemitted_tinfo_decls->length ();
+ unemitted_tinfo_decls->iterate (--i, &t);)
if (emit_tinfo_decl (t))
{
reconsider = true;
- VEC_unordered_remove (tree, unemitted_tinfo_decls, i);
+ unemitted_tinfo_decls->unordered_remove (i);
}
/* The list of objects with static storage duration is built up
@@ -4102,7 +4102,7 @@ cp_write_global_declarations (void)
/* Go through the set of inline functions whose bodies have not
been emitted yet. If out-of-line copies of these functions
are required, emit them. */
- FOR_EACH_VEC_ELT (tree, deferred_fns, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (deferred_fns, i, decl)
{
/* Does it need synthesizing? */
if (DECL_DEFAULTED_FN (decl) && ! DECL_INITIAL (decl)
@@ -4196,7 +4196,7 @@ cp_write_global_declarations (void)
reconsider = true;
/* Static data members are just like namespace-scope globals. */
- FOR_EACH_VEC_ELT (tree, pending_statics, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (pending_statics, i, decl)
{
if (var_finalized_p (decl) || DECL_REALLY_EXTERN (decl)
/* Don't write it out if we haven't seen a definition. */
@@ -4208,9 +4208,9 @@ cp_write_global_declarations (void)
if (DECL_NOT_REALLY_EXTERN (decl) && decl_needed_p (decl))
DECL_EXTERNAL (decl) = 0;
}
- if (VEC_length (tree, pending_statics) != 0
- && wrapup_global_declarations (VEC_address (tree, pending_statics),
- VEC_length (tree, pending_statics)))
+ if (vec_safe_length (pending_statics) != 0
+ && wrapup_global_declarations (pending_statics->address (),
+ pending_statics->length ()))
reconsider = true;
retries++;
@@ -4218,7 +4218,7 @@ cp_write_global_declarations (void)
while (reconsider);
/* All used inline functions must have a definition at this point. */
- FOR_EACH_VEC_ELT (tree, deferred_fns, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (deferred_fns, i, decl)
{
if (/* Check online inline functions that were actually used. */
DECL_ODR_USED (decl) && DECL_DECLARED_INLINE_P (decl)
@@ -4240,7 +4240,7 @@ cp_write_global_declarations (void)
}
/* So must decls that use a type with no linkage. */
- FOR_EACH_VEC_ELT (tree, no_linkage_decls, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (no_linkage_decls, i, decl)
if (!decl_defined_p (decl))
no_linkage_error (decl);
@@ -4292,12 +4292,12 @@ cp_write_global_declarations (void)
/* Now, issue warnings about static, but not defined, functions,
etc., and emit debugging information. */
walk_namespaces (wrapup_globals_for_namespace, /*data=*/&reconsider);
- if (VEC_length (tree, pending_statics) != 0)
+ if (vec_safe_length (pending_statics) != 0)
{
- check_global_declarations (VEC_address (tree, pending_statics),
- VEC_length (tree, pending_statics));
- emit_debug_global_declarations (VEC_address (tree, pending_statics),
- VEC_length (tree, pending_statics));
+ check_global_declarations (pending_statics->address (),
+ pending_statics->length ());
+ emit_debug_global_declarations (pending_statics->address (),
+ pending_statics->length ());
}
perform_deferred_noexcept_checks ();
@@ -4345,11 +4345,11 @@ cp_write_global_declarations (void)
ARGS. */
tree
-build_offset_ref_call_from_tree (tree fn, VEC(tree,gc) **args,
+build_offset_ref_call_from_tree (tree fn, vec<tree, va_gc> **args,
tsubst_flags_t complain)
{
tree orig_fn;
- VEC(tree,gc) *orig_args = NULL;
+ vec<tree, va_gc> *orig_args = NULL;
tree expr;
tree object;
@@ -4375,7 +4375,7 @@ build_offset_ref_call_from_tree (tree fn, VEC(tree,gc) **args,
{
if (TREE_CODE (fn) == DOTSTAR_EXPR)
object = cp_build_addr_expr (object, complain);
- VEC_safe_insert (tree, gc, *args, 0, object);
+ vec_safe_insert (*args, 0, object);
}
/* Now that the arguments are done, transform FN. */
fn = build_non_dependent_expr (fn);
@@ -4393,7 +4393,7 @@ build_offset_ref_call_from_tree (tree fn, VEC(tree,gc) **args,
fn = TREE_OPERAND (fn, 1);
fn = get_member_function_from_ptrfunc (&object_addr, fn,
complain);
- VEC_safe_insert (tree, gc, *args, 0, object_addr);
+ vec_safe_insert (*args, 0, object_addr);
}
if (CLASS_TYPE_P (TREE_TYPE (fn)))
@@ -4519,7 +4519,7 @@ mark_used (tree decl)
finishes, otherwise it might recurse. */
if (defer_mark_used_calls)
{
- VEC_safe_push (tree, gc, deferred_mark_used_calls, decl);
+ vec_safe_push (deferred_mark_used_calls, decl);
return true;
}
@@ -4589,7 +4589,7 @@ mark_used (tree decl)
the vector interferes with GC, so give an error now. */
no_linkage_error (decl);
else
- VEC_safe_push (tree, gc, no_linkage_decls, decl);
+ vec_safe_push (no_linkage_decls, decl);
}
if (TREE_CODE (decl) == FUNCTION_DECL && DECL_DECLARED_INLINE_P (decl)
diff --git a/gcc/cp/error.c b/gcc/cp/error.c
index 76f939f1049..1980cd34bd7 100644
--- a/gcc/cp/error.c
+++ b/gcc/cp/error.c
@@ -84,7 +84,7 @@ static void dump_exception_spec (tree, int);
static void dump_template_argument (tree, int);
static void dump_template_argument_list (tree, int);
static void dump_template_parameter (tree, int);
-static void dump_template_bindings (tree, tree, VEC(tree,gc) *);
+static void dump_template_bindings (tree, tree, vec<tree, va_gc> *);
static void dump_scope (tree, int);
static void dump_template_parms (tree, int, int);
static int get_non_default_template_args_count (tree, int);
@@ -259,7 +259,7 @@ dump_template_parameter (tree parm, int flags)
TREE_VEC. */
static void
-dump_template_bindings (tree parms, tree args, VEC(tree,gc)* typenames)
+dump_template_bindings (tree parms, tree args, vec<tree, va_gc> *typenames)
{
bool need_semicolon = false;
int i;
@@ -310,10 +310,10 @@ dump_template_bindings (tree parms, tree args, VEC(tree,gc)* typenames)
}
/* Don't bother with typenames for a partial instantiation. */
- if (VEC_empty (tree, typenames) || uses_template_parms (args))
+ if (vec_safe_is_empty (typenames) || uses_template_parms (args))
return;
- FOR_EACH_VEC_ELT (tree, typenames, i, t)
+ FOR_EACH_VEC_SAFE_ELT (typenames, i, t)
{
if (need_semicolon)
pp_separate_with_semicolon (cxx_pp);
@@ -1275,13 +1275,13 @@ dump_template_decl (tree t, int flags)
}
/* find_typenames looks through the type of the function template T
- and returns a VEC containing any typedefs, decltypes or TYPENAME_TYPEs
+ and returns a vec containing any typedefs, decltypes or TYPENAME_TYPEs
it finds. */
struct find_typenames_t
{
struct pointer_set_t *p_set;
- VEC (tree,gc) *typenames;
+ vec<tree, va_gc> *typenames;
};
static tree
@@ -1299,7 +1299,7 @@ find_typenames_r (tree *tp, int * /*walk_subtrees*/, void *data)
mv = TYPE_MAIN_VARIANT (*tp);
if (mv && (mv == *tp || !pointer_set_insert (d->p_set, mv)))
- VEC_safe_push (tree, gc, d->typenames, mv);
+ vec_safe_push (d->typenames, mv);
/* Search into class template arguments, which cp_walk_subtrees
doesn't do. */
@@ -1310,7 +1310,7 @@ find_typenames_r (tree *tp, int * /*walk_subtrees*/, void *data)
return NULL_TREE;
}
-static VEC(tree,gc) *
+static vec<tree, va_gc> *
find_typenames (tree t)
{
struct find_typenames_t ft;
@@ -1338,7 +1338,7 @@ dump_function_decl (tree t, int flags)
int show_return = flags & TFF_RETURN_TYPE || flags & TFF_DECL_SPECIFIERS;
int do_outer_scope = ! (flags & TFF_UNQUALIFIED_NAME);
tree exceptions;
- VEC(tree,gc) *typenames = NULL;
+ vec<tree, va_gc> *typenames = NULL;
if (DECL_NAME (t) && LAMBDA_FUNCTION_P (t))
{
@@ -1737,7 +1737,7 @@ dump_expr_list (tree l, int flags)
/* Print out a vector of initializers (subr of dump_expr). */
static void
-dump_expr_init_vec (VEC(constructor_elt,gc) *v, int flags)
+dump_expr_init_vec (vec<constructor_elt, va_gc> *v, int flags)
{
unsigned HOST_WIDE_INT idx;
tree value;
@@ -1745,7 +1745,7 @@ dump_expr_init_vec (VEC(constructor_elt,gc) *v, int flags)
FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value)
{
dump_expr (value, flags | TFF_EXPR_IN_PARENS);
- if (idx != VEC_length (constructor_elt, v) - 1)
+ if (idx != v->length () - 1)
pp_separate_with_comma (cxx_pp);
}
}
@@ -3208,11 +3208,11 @@ print_instantiation_context (void)
void
maybe_print_constexpr_context (diagnostic_context *context)
{
- VEC(tree,heap) *call_stack = cx_error_context ();
+ vec<tree> call_stack = cx_error_context ();
unsigned ix;
tree t;
- FOR_EACH_VEC_ELT (tree, call_stack, ix, t)
+ FOR_EACH_VEC_ELT (call_stack, ix, t)
{
expanded_location xloc = expand_location (EXPR_LOCATION (t));
const char *s = expr_as_string (t, 0);
diff --git a/gcc/cp/except.c b/gcc/cp/except.c
index ad49211b58b..cbb0235937b 100644
--- a/gcc/cp/except.c
+++ b/gcc/cp/except.c
@@ -824,7 +824,7 @@ build_throw (tree exp)
if (CLASS_TYPE_P (temp_type))
{
int flags = LOOKUP_NORMAL | LOOKUP_ONLYCONVERTING;
- VEC(tree,gc) *exp_vec;
+ vec<tree, va_gc> *exp_vec;
/* Under C++0x [12.8/16 class.copy], a thrown lvalue is sometimes
treated as an rvalue for the purposes of overload resolution
@@ -1176,9 +1176,7 @@ typedef struct GTY(()) pending_noexcept {
tree fn;
location_t loc;
} pending_noexcept;
-DEF_VEC_O(pending_noexcept);
-DEF_VEC_ALLOC_O(pending_noexcept,gc);
-static GTY(()) VEC(pending_noexcept,gc) *pending_noexcept_checks;
+static GTY(()) vec<pending_noexcept, va_gc> *pending_noexcept_checks;
/* FN is a FUNCTION_DECL that caused a noexcept-expr to be false. Warn if
it can't throw. */
@@ -1204,7 +1202,7 @@ perform_deferred_noexcept_checks (void)
int i;
pending_noexcept *p;
location_t saved_loc = input_location;
- FOR_EACH_VEC_ELT (pending_noexcept, pending_noexcept_checks, i, p)
+ FOR_EACH_VEC_SAFE_ELT (pending_noexcept_checks, i, p)
{
input_location = p->loc;
maybe_noexcept_warning (p->fn);
@@ -1248,7 +1246,7 @@ expr_noexcept_p (tree expr, tsubst_flags_t complain)
{
/* Not defined yet; check again at EOF. */
pending_noexcept p = {fn, input_location};
- VEC_safe_push (pending_noexcept, gc, pending_noexcept_checks, p);
+ vec_safe_push (pending_noexcept_checks, p);
}
else
maybe_noexcept_warning (fn);
diff --git a/gcc/cp/init.c b/gcc/cp/init.c
index 9f886c0013e..fea50b5b0ea 100644
--- a/gcc/cp/init.c
+++ b/gcc/cp/init.c
@@ -184,7 +184,7 @@ build_zero_init_1 (tree type, tree nelts, bool static_storage_p,
else if (CLASS_TYPE_P (type))
{
tree field;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Iterate over the fields, building initializations. */
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
@@ -233,7 +233,7 @@ build_zero_init_1 (tree type, tree nelts, bool static_storage_p,
else if (TREE_CODE (type) == ARRAY_TYPE)
{
tree max_index;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Iterate over the array elements, building initializations. */
if (nelts)
@@ -255,7 +255,7 @@ build_zero_init_1 (tree type, tree nelts, bool static_storage_p,
{
constructor_elt ce;
- v = VEC_alloc (constructor_elt, gc, 1);
+ vec_alloc (v, 1);
/* If this is a one element array, we just use a regular init. */
if (tree_int_cst_equal (size_zero_node, max_index))
@@ -267,7 +267,7 @@ build_zero_init_1 (tree type, tree nelts, bool static_storage_p,
ce.value = build_zero_init_1 (TREE_TYPE (type),
/*nelts=*/NULL_TREE,
static_storage_p, NULL_TREE);
- VEC_quick_push (constructor_elt, v, ce);
+ v->quick_push (ce);
}
/* Build a constructor to contain the initializations. */
@@ -391,7 +391,7 @@ build_value_init_noctor (tree type, tsubst_flags_t complain)
if (TREE_CODE (type) != UNION_TYPE)
{
tree field;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Iterate over the fields, building initializations. */
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
@@ -428,7 +428,7 @@ build_value_init_noctor (tree type, tsubst_flags_t complain)
}
else if (TREE_CODE (type) == ARRAY_TYPE)
{
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Iterate over the array elements, building initializations. */
tree max_index = array_type_nelts (type);
@@ -450,7 +450,7 @@ build_value_init_noctor (tree type, tsubst_flags_t complain)
{
constructor_elt ce;
- v = VEC_alloc (constructor_elt, gc, 1);
+ vec_alloc (v, 1);
/* If this is a one element array, we just use a regular init. */
if (tree_int_cst_equal (size_zero_node, max_index))
@@ -459,7 +459,7 @@ build_value_init_noctor (tree type, tsubst_flags_t complain)
ce.index = build2 (RANGE_EXPR, sizetype, size_zero_node, max_index);
ce.value = build_value_init (TREE_TYPE (type), complain);
- VEC_quick_push (constructor_elt, v, ce);
+ v->quick_push (ce);
if (ce.value == error_mark_node)
return error_mark_node;
@@ -628,7 +628,7 @@ perform_member_init (tree member, tree init)
reference member in a constructor’s ctor-initializer (12.6.2)
persists until the constructor exits." */
unsigned i; tree t;
- VEC(tree,gc) *cleanups = make_tree_vector ();
+ vec<tree, va_gc> *cleanups = make_tree_vector ();
if (TREE_CODE (init) == TREE_LIST)
init = build_x_compound_expr_from_list (init, ELK_MEM_INIT,
tf_warning_or_error);
@@ -645,7 +645,7 @@ perform_member_init (tree member, tree init)
init = build_vec_init_expr (type, init, tf_warning_or_error);
init = build2 (INIT_EXPR, type, decl, init);
finish_expr_stmt (init);
- FOR_EACH_VEC_ELT (tree, cleanups, i, t)
+ FOR_EACH_VEC_ELT (*cleanups, i, t)
push_cleanup (decl, t, false);
release_tree_vector (cleanups);
}
@@ -802,7 +802,7 @@ sort_mem_initializers (tree t, tree mem_inits)
tree base, binfo, base_binfo;
tree sorted_inits;
tree next_subobject;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
int i;
int uses_unions_p = 0;
@@ -814,7 +814,7 @@ sort_mem_initializers (tree t, tree mem_inits)
/* Process the virtual bases. */
for (vbases = CLASSTYPE_VBASECLASSES (t), i = 0;
- VEC_iterate (tree, vbases, i, base); i++)
+ vec_safe_iterate (vbases, i, &base); i++)
sorted_inits = tree_cons (base, NULL_TREE, sorted_inits);
/* Process the direct bases. */
@@ -1545,7 +1545,7 @@ expand_default_init (tree binfo, tree true_exp, tree exp, tree init, int flags,
followed by initialization by X. If neither of these work
out, then look hard. */
tree rval;
- VEC(tree,gc) *parms;
+ vec<tree, va_gc> *parms;
/* If we have direct-initialization from an initializer list, pull
it out of the TREE_LIST so the code below can see it. */
@@ -1627,7 +1627,7 @@ expand_default_init (tree binfo, tree true_exp, tree exp, tree init, int flags,
{
parms = make_tree_vector ();
for (; init != NULL_TREE; init = TREE_CHAIN (init))
- VEC_safe_push (tree, gc, parms, TREE_VALUE (init));
+ vec_safe_push (parms, TREE_VALUE (init));
}
else
parms = make_tree_vector_single (init);
@@ -1641,11 +1641,11 @@ expand_default_init (tree binfo, tree true_exp, tree exp, tree init, int flags,
tree elt; unsigned i;
/* Unshare the arguments for the second call. */
- VEC(tree,gc) *parms2 = make_tree_vector ();
- FOR_EACH_VEC_ELT (tree, parms, i, elt)
+ vec<tree, va_gc> *parms2 = make_tree_vector ();
+ FOR_EACH_VEC_SAFE_ELT (parms, i, elt)
{
elt = break_out_target_exprs (elt);
- VEC_safe_push (tree, gc, parms2, elt);
+ vec_safe_push (parms2, elt);
}
complete = build_special_member_call (exp, complete_ctor_identifier,
&parms2, binfo, flags,
@@ -1730,7 +1730,7 @@ expand_aggr_init_1 (tree binfo, tree true_exp, tree exp, tree init, int flags,
if (init && TREE_CODE (exp) == VAR_DECL
&& COMPOUND_LITERAL_P (init))
{
- VEC(tree,gc)* cleanups = NULL;
+ vec<tree, va_gc> *cleanups = NULL;
/* If store_init_value returns NULL_TREE, the INIT has been
recorded as the DECL_INITIAL for EXP. That means there's
nothing more we have to do. */
@@ -2062,8 +2062,8 @@ build_builtin_delete_call (tree addr)
creates and returns a NEW_EXPR. */
static tree
-build_raw_new_expr (VEC(tree,gc) *placement, tree type, tree nelts,
- VEC(tree,gc) *init, int use_global_new)
+build_raw_new_expr (vec<tree, va_gc> *placement, tree type, tree nelts,
+ vec<tree, va_gc> *init, int use_global_new)
{
tree init_list;
tree new_expr;
@@ -2074,7 +2074,7 @@ build_raw_new_expr (VEC(tree,gc) *placement, tree type, tree nelts,
int" from an empty initializer "new int()". */
if (init == NULL)
init_list = NULL_TREE;
- else if (VEC_empty (tree, init))
+ else if (init->is_empty ())
init_list = void_zero_node;
else
init_list = build_tree_list_vec (init);
@@ -2165,8 +2165,8 @@ diagnose_uninitialized_cst_or_ref_member (tree type, bool using_new, bool compla
build_raw_new_expr. This may change PLACEMENT and INIT. */
static tree
-build_new_1 (VEC(tree,gc) **placement, tree type, tree nelts,
- VEC(tree,gc) **init, bool globally_qualified_p,
+build_new_1 (vec<tree, va_gc> **placement, tree type, tree nelts,
+ vec<tree, va_gc> **init, bool globally_qualified_p,
tsubst_flags_t complain)
{
tree size, rval;
@@ -2397,13 +2397,12 @@ build_new_1 (VEC(tree,gc) **placement, tree type, tree nelts,
reference, prepare to capture it in a temporary variable. Do
this now, since PLACEMENT will change in the calls below. */
placement_first = NULL_TREE;
- if (VEC_length (tree, *placement) == 1
- && (TREE_CODE (TREE_TYPE (VEC_index (tree, *placement, 0)))
- == POINTER_TYPE))
- placement_first = VEC_index (tree, *placement, 0);
+ if (vec_safe_length (*placement) == 1
+ && (TREE_CODE (TREE_TYPE ((**placement)[0])) == POINTER_TYPE))
+ placement_first = (**placement)[0];
/* Allocate the object. */
- if (VEC_empty (tree, *placement) && TYPE_FOR_JAVA (elt_type))
+ if (vec_safe_is_empty (*placement) && TYPE_FOR_JAVA (elt_type))
{
tree class_addr;
tree class_decl = build_java_class_ref (elt_type);
@@ -2466,7 +2465,7 @@ build_new_1 (VEC(tree,gc) **placement, tree type, tree nelts,
size = fold_build3 (COND_EXPR, sizetype, outer_nelts_check,
size, TYPE_MAX_VALUE (sizetype));
/* Create the argument list. */
- VEC_safe_insert (tree, gc, *placement, 0, size);
+ vec_safe_insert (*placement, 0, size);
/* Do name-lookup to find the appropriate operator. */
fns = lookup_fnfields (elt_type, fnname, /*protect=*/2);
if (fns == NULL_TREE)
@@ -2651,7 +2650,7 @@ build_new_1 (VEC(tree,gc) **placement, tree type, tree nelts,
bool stable;
bool explicit_value_init_p = false;
- if (*init != NULL && VEC_empty (tree, *init))
+ if (*init != NULL && (*init)->is_empty ())
{
*init = NULL;
explicit_value_init_p = true;
@@ -2675,11 +2674,11 @@ build_new_1 (VEC(tree,gc) **placement, tree type, tree nelts,
else if (array_p)
{
tree vecinit = NULL_TREE;
- if (*init && VEC_length (tree, *init) == 1
- && BRACE_ENCLOSED_INITIALIZER_P (VEC_index (tree, *init, 0))
- && CONSTRUCTOR_IS_DIRECT_INIT (VEC_index (tree, *init, 0)))
+ if (vec_safe_length (*init) == 1
+ && BRACE_ENCLOSED_INITIALIZER_P ((**init)[0])
+ && CONSTRUCTOR_IS_DIRECT_INIT ((**init)[0]))
{
- vecinit = VEC_index (tree, *init, 0);
+ vecinit = (**init)[0];
if (CONSTRUCTOR_NELTS (vecinit) == 0)
/* List-value-initialization, leave it alone. */;
else
@@ -2891,25 +2890,25 @@ build_new_1 (VEC(tree,gc) **placement, tree type, tree nelts,
rather than just "new". This may change PLACEMENT and INIT. */
tree
-build_new (VEC(tree,gc) **placement, tree type, tree nelts,
- VEC(tree,gc) **init, int use_global_new, tsubst_flags_t complain)
+build_new (vec<tree, va_gc> **placement, tree type, tree nelts,
+ vec<tree, va_gc> **init, int use_global_new, tsubst_flags_t complain)
{
tree rval;
- VEC(tree,gc) *orig_placement = NULL;
+ vec<tree, va_gc> *orig_placement = NULL;
tree orig_nelts = NULL_TREE;
- VEC(tree,gc) *orig_init = NULL;
+ vec<tree, va_gc> *orig_init = NULL;
if (type == error_mark_node)
return error_mark_node;
- if (nelts == NULL_TREE && VEC_length (tree, *init) == 1
+ if (nelts == NULL_TREE && vec_safe_length (*init) == 1
/* Don't do auto deduction where it might affect mangling. */
&& (!processing_template_decl || at_function_scope_p ()))
{
tree auto_node = type_uses_auto (type);
if (auto_node)
{
- tree d_init = VEC_index (tree, *init, 0);
+ tree d_init = (**init)[0];
d_init = resolve_nondeduced_context (d_init);
type = do_auto_deduction (type, d_init, auto_node);
}
@@ -3308,7 +3307,7 @@ build_vec_init (tree base, tree maxindex, tree init,
&& ((TREE_CODE (init) == CONSTRUCTOR
/* Don't do this if the CONSTRUCTOR might contain something
that might throw and require us to clean up. */
- && (VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (init))
+ && (vec_safe_is_empty (CONSTRUCTOR_ELTS (init))
|| ! TYPE_HAS_NONTRIVIAL_DESTRUCTOR (inner_elt_type)))
|| from_array))
{
@@ -3428,11 +3427,11 @@ build_vec_init (tree base, tree maxindex, tree init,
initialization of any elements with constant initializers even if
some are non-constant. */
bool do_static_init = (DECL_P (obase) && TREE_STATIC (obase));
- VEC(constructor_elt,gc) *new_vec;
+ vec<constructor_elt, va_gc> *new_vec;
from_array = 0;
if (try_const)
- new_vec = VEC_alloc (constructor_elt, gc, CONSTRUCTOR_NELTS (init));
+ vec_alloc (new_vec, CONSTRUCTOR_NELTS (init));
else
new_vec = NULL;
@@ -3506,7 +3505,7 @@ build_vec_init (tree base, tree maxindex, tree init,
else if (do_static_init && saw_const)
DECL_INITIAL (obase) = build_constructor (atype, new_vec);
else
- VEC_free (constructor_elt, gc, new_vec);
+ vec_free (new_vec);
}
/* Clear out INIT so that we don't get confused below. */
@@ -3937,7 +3936,7 @@ push_base_cleanups (void)
int i;
tree member;
tree expr;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
/* Run destructors for all virtual baseclasses. */
if (CLASSTYPE_VBASECLASSES (current_class_type))
@@ -3950,7 +3949,7 @@ push_base_cleanups (void)
/* The CLASSTYPE_VBASECLASSES vector is in initialization
order, which is also the right order for pushing cleanups. */
for (vbases = CLASSTYPE_VBASECLASSES (current_class_type), i = 0;
- VEC_iterate (tree, vbases, i, base_binfo); i++)
+ vec_safe_iterate (vbases, i, &base_binfo); i++)
{
if (TYPE_HAS_NONTRIVIAL_DESTRUCTOR (BINFO_TYPE (base_binfo)))
{
diff --git a/gcc/cp/mangle.c b/gcc/cp/mangle.c
index 54a4c9c4bca..2b1d397d5a3 100644
--- a/gcc/cp/mangle.c
+++ b/gcc/cp/mangle.c
@@ -91,7 +91,7 @@ along with GCC; see the file COPYING3. If not see
typedef struct GTY(()) globals {
/* An array of the current substitution candidates, in the order
we've seen them. */
- VEC(tree,gc) *substitutions;
+ vec<tree, va_gc> *substitutions;
/* The entity that is being mangled. */
tree GTY ((skip)) entity;
@@ -311,7 +311,7 @@ dump_substitution_candidates (void)
tree el;
fprintf (stderr, " ++ substitutions ");
- FOR_EACH_VEC_ELT (tree, G.substitutions, i, el)
+ FOR_EACH_VEC_ELT (*G.substitutions, i, el)
{
const char *name = "???";
@@ -391,7 +391,7 @@ add_substitution (tree node)
int i;
tree candidate;
- FOR_EACH_VEC_ELT (tree, G.substitutions, i, candidate)
+ FOR_EACH_VEC_SAFE_ELT (G.substitutions, i, candidate)
{
gcc_assert (!(DECL_P (node) && node == candidate));
gcc_assert (!(TYPE_P (node) && TYPE_P (candidate)
@@ -401,7 +401,7 @@ add_substitution (tree node)
#endif /* ENABLE_CHECKING */
/* Put the decl onto the varray of substitution candidates. */
- VEC_safe_push (tree, gc, G.substitutions, node);
+ vec_safe_push (G.substitutions, node);
if (DEBUG_MANGLE)
dump_substitution_candidates ();
@@ -504,7 +504,7 @@ static int
find_substitution (tree node)
{
int i;
- const int size = VEC_length (tree, G.substitutions);
+ const int size = vec_safe_length (G.substitutions);
tree decl;
tree type;
@@ -612,7 +612,7 @@ find_substitution (tree node)
operation. */
for (i = 0; i < size; ++i)
{
- tree candidate = VEC_index (tree, G.substitutions, i);
+ tree candidate = (*G.substitutions)[i];
/* NODE is a matched to a candidate if it's the same decl node or
if it's the same type. */
if (decl == candidate
@@ -1322,18 +1322,18 @@ write_abi_tags (tree tags)
tags = TREE_VALUE (tags);
- VEC(tree,gc)* vec = make_tree_vector();
+ vec<tree, va_gc> * vec = make_tree_vector();
for (tree t = tags; t; t = TREE_CHAIN (t))
{
tree str = TREE_VALUE (t);
- VEC_safe_push (tree, gc, vec, str);
+ vec_safe_push (vec, str);
}
- VEC_qsort (tree, vec, tree_string_cmp);
+ vec->qsort (tree_string_cmp);
unsigned i; tree str;
- FOR_EACH_VEC_ELT (tree, vec, i, str)
+ FOR_EACH_VEC_ELT (*vec, i, str)
{
write_string ("B");
write_unsigned_number (TREE_STRING_LENGTH (str) - 1);
@@ -1699,7 +1699,7 @@ local_class_index (tree entity)
tree ctx = TYPE_CONTEXT (entity);
for (ix = 0; ; ix++)
{
- tree type = VEC_index (tree, local_classes, ix);
+ tree type = (*local_classes)[ix];
if (type == entity)
return discriminator;
if (TYPE_CONTEXT (type) == ctx
@@ -2801,7 +2801,7 @@ write_expression (tree expr)
}
else if (code == CONSTRUCTOR)
{
- VEC(constructor_elt,gc)* elts = CONSTRUCTOR_ELTS (expr);
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (expr);
unsigned i; tree val;
if (BRACE_ENCLOSED_INITIALIZER_P (expr))
@@ -3320,7 +3320,7 @@ finish_mangling_internal (const bool warn)
G.entity);
/* Clear all the substitutions. */
- VEC_truncate (tree, G.substitutions, 0);
+ vec_safe_truncate (G.substitutions, 0);
/* Null-terminate the string. */
write_char ('\0');
@@ -3354,7 +3354,7 @@ init_mangle (void)
{
gcc_obstack_init (&name_obstack);
name_base = obstack_alloc (&name_obstack, 0);
- G.substitutions = NULL;
+ vec_alloc (G.substitutions, 0);
/* Cache these identifiers for quick comparison when checking for
standard substitutions. */
diff --git a/gcc/cp/method.c b/gcc/cp/method.c
index 8a7d7cbaf3b..64580324909 100644
--- a/gcc/cp/method.c
+++ b/gcc/cp/method.c
@@ -550,7 +550,7 @@ do_build_copy_constructor (tree fndecl)
int i;
tree binfo, base_binfo;
tree init;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
/* Initialize all the base-classes with the parameter converted
to their type so that we get their copy constructor and not
@@ -558,7 +558,7 @@ do_build_copy_constructor (tree fndecl)
deal with the binfo's directly as a direct base might be
inaccessible due to ambiguity. */
for (vbases = CLASSTYPE_VBASECLASSES (current_class_type), i = 0;
- VEC_iterate (tree, vbases, i, binfo); i++)
+ vec_safe_iterate (vbases, i, &binfo); i++)
{
member_init_list = add_one_base_init (binfo, parm, move_p, inh,
member_init_list);
@@ -655,7 +655,7 @@ do_build_copy_assign (tree fndecl)
BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
{
tree converted_parm;
- VEC(tree,gc) *parmvec;
+ vec<tree, va_gc> *parmvec;
/* We must convert PARM directly to the base class
explicitly since the base class may be ambiguous. */
@@ -852,7 +852,7 @@ locate_fn_flags (tree type, tree name, tree argtype, int flags,
tsubst_flags_t complain)
{
tree ob, fn, fns, binfo, rval;
- VEC(tree,gc) *args;
+ vec<tree, va_gc> *args;
if (TYPE_P (type))
binfo = TYPE_BINFO (type);
@@ -875,13 +875,13 @@ locate_fn_flags (tree type, tree name, tree argtype, int flags,
if (TREE_CODE (type) != REFERENCE_TYPE)
type = cp_build_reference_type (type, /*rval*/true);
tree arg = build_stub_object (type);
- VEC_safe_push (tree, gc, args, arg);
+ vec_safe_push (args, arg);
}
}
else
{
tree arg = build_stub_object (argtype);
- VEC_quick_push (tree, args, arg);
+ args->quick_push (arg);
}
}
@@ -1157,7 +1157,7 @@ synthesized_method_walk (tree ctype, special_function_kind sfk, bool const_p,
{
tree binfo, base_binfo, scope, fnname, rval, argtype;
bool move_p, copy_arg_p, assign_p, expected_trivial, check_vdtor;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
int i, quals, flags;
tsubst_flags_t complain;
bool ctor_p;
@@ -1351,7 +1351,7 @@ synthesized_method_walk (tree ctype, special_function_kind sfk, bool const_p,
{
if (constexpr_p)
*constexpr_p = false;
- FOR_EACH_VEC_ELT (tree, vbases, i, base_binfo)
+ FOR_EACH_VEC_ELT (*vbases, i, base_binfo)
{
tree basetype = BINFO_TYPE (base_binfo);
if (copy_arg_p)
diff --git a/gcc/cp/name-lookup.c b/gcc/cp/name-lookup.c
index f0105604921..5abebe32197 100644
--- a/gcc/cp/name-lookup.c
+++ b/gcc/cp/name-lookup.c
@@ -320,7 +320,7 @@ new_class_binding (tree name, tree value, tree type, cp_binding_level *scope)
{
cp_class_binding cb = {cxx_binding_make (value, type), name};
cxx_binding *binding = cb.base;
- VEC_safe_push (cp_class_binding, gc, scope->class_shadowed, cb);
+ vec_safe_push (scope->class_shadowed, cb);
binding->scope = scope;
return binding;
}
@@ -597,7 +597,7 @@ add_decl_to_level (tree decl, cp_binding_level *b)
&& (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
|| (TREE_CODE (decl) == FUNCTION_DECL
&& (!TREE_PUBLIC (decl) || DECL_DECLARED_INLINE_P (decl))))
- VEC_safe_push (tree, gc, b->static_decls, decl);
+ vec_safe_push (b->static_decls, decl);
}
}
@@ -1506,11 +1506,9 @@ begin_scope (scope_kind kind, tree entity)
case sk_namespace:
NAMESPACE_LEVEL (entity) = scope;
- scope->static_decls =
- VEC_alloc (tree, gc,
- DECL_NAME (entity) == std_identifier
- || DECL_NAME (entity) == global_scope_name
- ? 200 : 10);
+ vec_alloc (scope->static_decls,
+ (DECL_NAME (entity) == std_identifier
+ || DECL_NAME (entity) == global_scope_name) ? 200 : 10);
break;
default:
@@ -1781,12 +1779,12 @@ print_binding_level (cp_binding_level* lvl)
if (i)
fprintf (stderr, "\n");
}
- if (VEC_length (cp_class_binding, lvl->class_shadowed))
+ if (vec_safe_length (lvl->class_shadowed))
{
size_t i;
cp_class_binding *b;
fprintf (stderr, " class-shadowed:");
- FOR_EACH_VEC_ELT (cp_class_binding, lvl->class_shadowed, i, b)
+ FOR_EACH_VEC_ELT (*lvl->class_shadowed, i, b)
fprintf (stderr, " %s ", IDENTIFIER_POINTER (b->identifier));
fprintf (stderr, "\n");
}
@@ -2836,7 +2834,7 @@ poplevel_class (void)
/* Remove the bindings for all of the class-level declarations. */
if (level->class_shadowed)
{
- FOR_EACH_VEC_ELT (cp_class_binding, level->class_shadowed, i, cb)
+ FOR_EACH_VEC_ELT (*level->class_shadowed, i, cb)
{
IDENTIFIER_BINDING (cb->identifier) = cb->base->previous;
cxx_binding_free (cb->base);
@@ -3468,8 +3466,8 @@ current_decl_namespace (void)
{
tree result;
/* If we have been pushed into a different namespace, use it. */
- if (!VEC_empty (tree, decl_namespace_list))
- return VEC_last (tree, decl_namespace_list);
+ if (!vec_safe_is_empty (decl_namespace_list))
+ return decl_namespace_list->last ();
if (current_class_type)
result = decl_namespace_context (current_class_type);
@@ -3674,7 +3672,7 @@ push_decl_namespace (tree decl)
{
if (TREE_CODE (decl) != NAMESPACE_DECL)
decl = decl_namespace_context (decl);
- VEC_safe_push (tree, gc, decl_namespace_list, ORIGINAL_NAMESPACE (decl));
+ vec_safe_push (decl_namespace_list, ORIGINAL_NAMESPACE (decl));
}
/* [namespace.memdef]/2 */
@@ -3682,7 +3680,7 @@ push_decl_namespace (tree decl)
void
pop_decl_namespace (void)
{
- VEC_pop (tree, decl_namespace_list);
+ decl_namespace_list->pop ();
}
/* Return the namespace that is the common ancestor
@@ -4224,19 +4222,19 @@ remove_hidden_names (tree fns)
void
suggest_alternatives_for (location_t location, tree name)
{
- VEC(tree,heap) *candidates = NULL;
- VEC(tree,heap) *namespaces_to_search = NULL;
+ vec<tree> candidates = vec<tree>();
+ vec<tree> namespaces_to_search = vec<tree>();
int max_to_search = PARAM_VALUE (CXX_MAX_NAMESPACES_FOR_DIAGNOSTIC_HELP);
int n_searched = 0;
tree t;
unsigned ix;
- VEC_safe_push (tree, heap, namespaces_to_search, global_namespace);
+ namespaces_to_search.safe_push (global_namespace);
- while (!VEC_empty (tree, namespaces_to_search)
+ while (!namespaces_to_search.is_empty ()
&& n_searched < max_to_search)
{
- tree scope = VEC_pop (tree, namespaces_to_search);
+ tree scope = namespaces_to_search.pop ();
struct scope_binding binding = EMPTY_SCOPE_BINDING;
cp_binding_level *level = NAMESPACE_LEVEL (scope);
@@ -4246,11 +4244,11 @@ suggest_alternatives_for (location_t location, tree name)
n_searched++;
if (binding.value)
- VEC_safe_push (tree, heap, candidates, binding.value);
+ candidates.safe_push (binding.value);
/* Add child namespaces. */
for (t = level->namespaces; t; t = DECL_CHAIN (t))
- VEC_safe_push (tree, heap, namespaces_to_search, t);
+ namespaces_to_search.safe_push (t);
}
/* If we stopped before we could examine all namespaces, inform the
@@ -4258,25 +4256,25 @@ suggest_alternatives_for (location_t location, tree name)
might be more candidates further down that we weren't able to
find. */
if (n_searched >= max_to_search
- && !VEC_empty (tree, namespaces_to_search))
+ && !namespaces_to_search.is_empty ())
inform (location,
"maximum limit of %d namespaces searched for %qE",
max_to_search, name);
- VEC_free (tree, heap, namespaces_to_search);
+ namespaces_to_search.release ();
/* Nothing useful to report. */
- if (VEC_empty (tree, candidates))
+ if (candidates.is_empty ())
return;
- inform_n (location, VEC_length (tree, candidates),
+ inform_n (location, candidates.length (),
"suggested alternative:",
"suggested alternatives:");
- FOR_EACH_VEC_ELT (tree, candidates, ix, t)
+ FOR_EACH_VEC_ELT (candidates, ix, t)
inform (location_of (t), " %qE", t);
- VEC_free (tree, heap, candidates);
+ candidates.release ();
}
/* Unscoped lookup of a global: iterate over current namespaces,
@@ -4408,11 +4406,11 @@ lookup_using_namespace (tree name, struct scope_binding *val,
/* Returns true iff VEC contains TARGET. */
static bool
-tree_vec_contains (VEC(tree,gc)* vec, tree target)
+tree_vec_contains (vec<tree, va_gc> *vec, tree target)
{
unsigned int i;
tree elt;
- FOR_EACH_VEC_ELT (tree,vec,i,elt)
+ FOR_EACH_VEC_SAFE_ELT (vec,i,elt)
if (elt == target)
return true;
return false;
@@ -4428,12 +4426,12 @@ qualified_lookup_using_namespace (tree name, tree scope,
struct scope_binding *result, int flags)
{
/* Maintain a list of namespaces visited... */
- VEC(tree,gc) *seen = NULL;
- VEC(tree,gc) *seen_inline = NULL;
+ vec<tree, va_gc> *seen = NULL;
+ vec<tree, va_gc> *seen_inline = NULL;
/* ... and a list of namespace yet to see. */
- VEC(tree,gc) *todo = NULL;
- VEC(tree,gc) *todo_maybe = NULL;
- VEC(tree,gc) *todo_inline = NULL;
+ vec<tree, va_gc> *todo = NULL;
+ vec<tree, va_gc> *todo_maybe = NULL;
+ vec<tree, va_gc> *todo_inline = NULL;
tree usings;
timevar_start (TV_NAME_LOOKUP);
/* Look through namespace aliases. */
@@ -4443,26 +4441,26 @@ qualified_lookup_using_namespace (tree name, tree scope,
namespaces. For each used namespace, look through its inline
namespace set for any bindings and usings. If no bindings are
found, add any usings seen to the set of used namespaces. */
- VEC_safe_push (tree, gc, todo, scope);
+ vec_safe_push (todo, scope);
- while (VEC_length (tree, todo))
+ while (todo->length ())
{
bool found_here;
- scope = VEC_pop (tree, todo);
+ scope = todo->pop ();
if (tree_vec_contains (seen, scope))
continue;
- VEC_safe_push (tree, gc, seen, scope);
- VEC_safe_push (tree, gc, todo_inline, scope);
+ vec_safe_push (seen, scope);
+ vec_safe_push (todo_inline, scope);
found_here = false;
- while (VEC_length (tree, todo_inline))
+ while (todo_inline->length ())
{
cxx_binding *binding;
- scope = VEC_pop (tree, todo_inline);
+ scope = todo_inline->pop ();
if (tree_vec_contains (seen_inline, scope))
continue;
- VEC_safe_push (tree, gc, seen_inline, scope);
+ vec_safe_push (seen_inline, scope);
binding =
cp_binding_level_find_binding_for_name (NAMESPACE_LEVEL (scope), name);
@@ -4477,23 +4475,23 @@ qualified_lookup_using_namespace (tree name, tree scope,
if (!TREE_INDIRECT_USING (usings))
{
if (is_associated_namespace (scope, TREE_PURPOSE (usings)))
- VEC_safe_push (tree, gc, todo_inline, TREE_PURPOSE (usings));
+ vec_safe_push (todo_inline, TREE_PURPOSE (usings));
else
- VEC_safe_push (tree, gc, todo_maybe, TREE_PURPOSE (usings));
+ vec_safe_push (todo_maybe, TREE_PURPOSE (usings));
}
}
if (found_here)
- VEC_truncate (tree, todo_maybe, 0);
+ vec_safe_truncate (todo_maybe, 0);
else
- while (VEC_length (tree, todo_maybe))
- VEC_safe_push (tree, gc, todo, VEC_pop (tree, todo_maybe));
- }
- VEC_free (tree,gc,todo);
- VEC_free (tree,gc,todo_maybe);
- VEC_free (tree,gc,todo_inline);
- VEC_free (tree,gc,seen);
- VEC_free (tree,gc,seen_inline);
+ while (vec_safe_length (todo_maybe))
+ vec_safe_push (todo, todo_maybe->pop ());
+ }
+ vec_free (todo);
+ vec_free (todo_maybe);
+ vec_free (todo_inline);
+ vec_free (seen);
+ vec_free (seen_inline);
timevar_stop (TV_NAME_LOOKUP);
return result->value != error_mark_node;
}
@@ -4791,7 +4789,7 @@ lookup_name_nonclass (tree name)
}
tree
-lookup_function_nonclass (tree name, VEC(tree,gc) *args, bool block_p)
+lookup_function_nonclass (tree name, vec<tree, va_gc> *args, bool block_p)
{
return
lookup_arg_dependent (name,
@@ -5035,16 +5033,16 @@ lookup_type_current_level (tree name)
struct arg_lookup
{
tree name;
- VEC(tree,gc) *args;
- VEC(tree,gc) *namespaces;
- VEC(tree,gc) *classes;
+ vec<tree, va_gc> *args;
+ vec<tree, va_gc> *namespaces;
+ vec<tree, va_gc> *classes;
tree functions;
struct pointer_set_t *fn_set;
};
static bool arg_assoc (struct arg_lookup*, tree);
static bool arg_assoc_args (struct arg_lookup*, tree);
-static bool arg_assoc_args_vec (struct arg_lookup*, VEC(tree,gc) *);
+static bool arg_assoc_args_vec (struct arg_lookup*, vec<tree, va_gc> *);
static bool arg_assoc_type (struct arg_lookup*, tree);
static bool add_function (struct arg_lookup *, tree);
static bool arg_assoc_namespace (struct arg_lookup *, tree);
@@ -5085,8 +5083,8 @@ add_function (struct arg_lookup *k, tree fn)
bool
is_associated_namespace (tree current, tree scope)
{
- VEC(tree,gc) *seen = make_tree_vector ();
- VEC(tree,gc) *todo = make_tree_vector ();
+ vec<tree, va_gc> *seen = make_tree_vector ();
+ vec<tree, va_gc> *todo = make_tree_vector ();
tree t;
bool ret;
@@ -5097,14 +5095,14 @@ is_associated_namespace (tree current, tree scope)
ret = true;
break;
}
- VEC_safe_push (tree, gc, seen, scope);
+ vec_safe_push (seen, scope);
for (t = DECL_NAMESPACE_ASSOCIATIONS (scope); t; t = TREE_CHAIN (t))
if (!vec_member (TREE_PURPOSE (t), seen))
- VEC_safe_push (tree, gc, todo, TREE_PURPOSE (t));
- if (!VEC_empty (tree, todo))
+ vec_safe_push (todo, TREE_PURPOSE (t));
+ if (!todo->is_empty ())
{
- scope = VEC_last (tree, todo);
- VEC_pop (tree, todo);
+ scope = todo->last ();
+ todo->pop ();
}
else
{
@@ -5129,7 +5127,7 @@ arg_assoc_namespace (struct arg_lookup *k, tree scope)
if (vec_member (scope, k->namespaces))
return false;
- VEC_safe_push (tree, gc, k->namespaces, scope);
+ vec_safe_push (k->namespaces, scope);
/* Check out our super-users. */
for (value = DECL_NAMESPACE_ASSOCIATIONS (scope); value;
@@ -5312,7 +5310,7 @@ arg_assoc_class (struct arg_lookup *k, tree type)
if (vec_member (type, k->classes))
return false;
- VEC_safe_push (tree, gc, k->classes, type);
+ vec_safe_push (k->classes, type);
if (TYPE_CLASS_SCOPE_P (type)
&& arg_assoc_class_only (k, TYPE_CONTEXT (type)))
@@ -5422,12 +5420,12 @@ arg_assoc_args (struct arg_lookup *k, tree args)
on error. */
static bool
-arg_assoc_args_vec (struct arg_lookup *k, VEC(tree,gc) *args)
+arg_assoc_args_vec (struct arg_lookup *k, vec<tree, va_gc> *args)
{
unsigned int ix;
tree arg;
- FOR_EACH_VEC_ELT (tree, args, ix, arg)
+ FOR_EACH_VEC_SAFE_ELT (args, ix, arg)
if (arg_assoc (k, arg))
return true;
return false;
@@ -5495,7 +5493,7 @@ arg_assoc (struct arg_lookup *k, tree n)
are the functions found in normal lookup. */
static tree
-lookup_arg_dependent_1 (tree name, tree fns, VEC(tree,gc) *args,
+lookup_arg_dependent_1 (tree name, tree fns, vec<tree, va_gc> *args,
bool include_std)
{
struct arg_lookup k;
@@ -5560,7 +5558,7 @@ lookup_arg_dependent_1 (tree name, tree fns, VEC(tree,gc) *args,
/* Wrapper for lookup_arg_dependent_1. */
tree
-lookup_arg_dependent (tree name, tree fns, VEC(tree,gc) *args,
+lookup_arg_dependent (tree name, tree fns, vec<tree, va_gc> *args,
bool include_std)
{
tree ret;
@@ -5820,7 +5818,7 @@ pushtag_1 (tree name, tree type, tag_scope scope)
add_decl_expr (decl);
}
else
- VEC_safe_push (tree, gc, local_classes, type);
+ vec_safe_push (local_classes, type);
}
}
if (b->kind == sk_class
@@ -5884,7 +5882,7 @@ store_binding_p (tree id)
have enough space reserved. */
static void
-store_binding (tree id, VEC(cxx_saved_binding,gc) **old_bindings)
+store_binding (tree id, vec<cxx_saved_binding, va_gc> **old_bindings)
{
cxx_saved_binding saved;
@@ -5895,14 +5893,14 @@ store_binding (tree id, VEC(cxx_saved_binding,gc) **old_bindings)
saved.identifier = id;
saved.binding = IDENTIFIER_BINDING (id);
saved.real_type_value = REAL_IDENTIFIER_TYPE_VALUE (id);
- VEC_quick_push (cxx_saved_binding, *old_bindings, saved);
+ (*old_bindings)->quick_push (saved);
IDENTIFIER_BINDING (id) = NULL;
}
static void
-store_bindings (tree names, VEC(cxx_saved_binding,gc) **old_bindings)
+store_bindings (tree names, vec<cxx_saved_binding, va_gc> **old_bindings)
{
- static VEC(tree,heap) *bindings_need_stored = NULL;
+ static vec<tree> bindings_need_stored = vec<tree>();
tree t, id;
size_t i;
@@ -5915,19 +5913,18 @@ store_bindings (tree names, VEC(cxx_saved_binding,gc) **old_bindings)
id = DECL_NAME (t);
if (store_binding_p (id))
- VEC_safe_push(tree, heap, bindings_need_stored, id);
+ bindings_need_stored.safe_push (id);
}
- if (!VEC_empty (tree, bindings_need_stored))
+ if (!bindings_need_stored.is_empty ())
{
- VEC_reserve_exact (cxx_saved_binding, gc, *old_bindings,
- VEC_length (tree, bindings_need_stored));
- for (i = 0; VEC_iterate(tree, bindings_need_stored, i, id); ++i)
+ vec_safe_reserve_exact (*old_bindings, bindings_need_stored.length ());
+ for (i = 0; bindings_need_stored.iterate (i, &id); ++i)
{
/* We can appearantly have duplicates in NAMES. */
if (store_binding_p (id))
store_binding (id, old_bindings);
}
- VEC_truncate (tree, bindings_need_stored, 0);
+ bindings_need_stored.truncate (0);
}
timevar_cond_stop (TV_NAME_LOOKUP, subtime);
}
@@ -5936,25 +5933,24 @@ store_bindings (tree names, VEC(cxx_saved_binding,gc) **old_bindings)
objects, rather than a TREE_LIST. */
static void
-store_class_bindings (VEC(cp_class_binding,gc) *names,
- VEC(cxx_saved_binding,gc) **old_bindings)
+store_class_bindings (vec<cp_class_binding, va_gc> *names,
+ vec<cxx_saved_binding, va_gc> **old_bindings)
{
- static VEC(tree,heap) *bindings_need_stored = NULL;
+ static vec<tree> bindings_need_stored = vec<tree>();
size_t i;
cp_class_binding *cb;
bool subtime = timevar_cond_start (TV_NAME_LOOKUP);
- for (i = 0; VEC_iterate(cp_class_binding, names, i, cb); ++i)
+ for (i = 0; vec_safe_iterate (names, i, &cb); ++i)
if (store_binding_p (cb->identifier))
- VEC_safe_push (tree, heap, bindings_need_stored, cb->identifier);
- if (!VEC_empty (tree, bindings_need_stored))
+ bindings_need_stored.safe_push (cb->identifier);
+ if (!bindings_need_stored.is_empty ())
{
tree id;
- VEC_reserve_exact (cxx_saved_binding, gc, *old_bindings,
- VEC_length (tree, bindings_need_stored));
- for (i = 0; VEC_iterate(tree, bindings_need_stored, i, id); ++i)
+ vec_safe_reserve_exact (*old_bindings, bindings_need_stored.length ());
+ for (i = 0; bindings_need_stored.iterate (i, &id); ++i)
store_binding (id, old_bindings);
- VEC_truncate (tree, bindings_need_stored, 0);
+ bindings_need_stored.truncate (0);
}
timevar_cond_stop (TV_NAME_LOOKUP, subtime);
}
@@ -6010,7 +6006,7 @@ push_to_top_level (void)
SET_IDENTIFIER_TYPE_VALUE (TREE_PURPOSE (t), TREE_VALUE (t));
}
- FOR_EACH_VEC_ELT (cxx_saved_binding, s->old_bindings, i, sb)
+ FOR_EACH_VEC_SAFE_ELT (s->old_bindings, i, sb)
IDENTIFIER_MARKED (sb->identifier) = 0;
s->prev = scope_chain;
@@ -6023,7 +6019,7 @@ push_to_top_level (void)
scope_chain = s;
current_function_decl = NULL_TREE;
- current_lang_base = VEC_alloc (tree, gc, 10);
+ vec_alloc (current_lang_base, 10);
current_lang_name = lang_name_cplusplus;
current_namespace = global_namespace;
push_class_stack ();
@@ -6047,7 +6043,7 @@ pop_from_top_level_1 (void)
current_lang_base = 0;
scope_chain = s->prev;
- FOR_EACH_VEC_ELT (cxx_saved_binding, s->old_bindings, i, saved)
+ FOR_EACH_VEC_SAFE_ELT (s->old_bindings, i, saved)
{
tree id = saved->identifier;
diff --git a/gcc/cp/name-lookup.h b/gcc/cp/name-lookup.h
index a37afdb9b33..540e2003302 100644
--- a/gcc/cp/name-lookup.h
+++ b/gcc/cp/name-lookup.h
@@ -86,8 +86,6 @@ typedef struct GTY(()) cxx_saved_binding {
tree real_type_value;
} cxx_saved_binding;
-DEF_VEC_O(cxx_saved_binding);
-DEF_VEC_ALLOC_O(cxx_saved_binding,gc);
extern tree identifier_type_value (tree);
extern void set_identifier_type_value (tree, tree);
@@ -147,8 +145,6 @@ typedef struct GTY(()) cp_class_binding {
tree identifier;
} cp_class_binding;
-DEF_VEC_O(cp_class_binding);
-DEF_VEC_ALLOC_O(cp_class_binding,gc);
typedef struct GTY(()) cp_label_binding {
/* The bound LABEL_DECL. */
@@ -157,8 +153,6 @@ typedef struct GTY(()) cp_label_binding {
tree prev_value;
} cp_label_binding;
-DEF_VEC_O(cp_label_binding);
-DEF_VEC_ALLOC_O(cp_label_binding,gc);
/* For each binding contour we allocate a binding_level structure
which records the names defined in that contour.
@@ -195,7 +189,7 @@ struct GTY(()) cp_binding_level {
tree namespaces;
/* An array of static functions and variables (for namespaces only) */
- VEC(tree,gc) *static_decls;
+ vec<tree, va_gc> *static_decls;
/* A list of USING_DECL nodes. */
tree usings;
@@ -206,7 +200,7 @@ struct GTY(()) cp_binding_level {
/* For the binding level corresponding to a class, the entities
declared in the class or its base classes. */
- VEC(cp_class_binding,gc) *class_shadowed;
+ vec<cp_class_binding, va_gc> *class_shadowed;
/* Similar to class_shadowed, but for IDENTIFIER_TYPE_VALUE, and
is used for all binding levels. The TREE_PURPOSE is the name of
@@ -217,7 +211,7 @@ struct GTY(()) cp_binding_level {
/* Similar to class_shadowed, but for IDENTIFIER_LABEL_VALUE, and
used for all binding levels. */
- VEC(cp_label_binding,gc) *shadowed_labels;
+ vec<cp_label_binding, va_gc> *shadowed_labels;
/* For each level (except not the global one),
a chain of BLOCK nodes for all the levels
@@ -234,7 +228,7 @@ struct GTY(()) cp_binding_level {
/* List of VAR_DECLS saved from a previous for statement.
These would be dead in ISO-conforming code, but might
be referenced in ARM-era code. */
- VEC(tree,gc) *dead_vars_from_for;
+ vec<tree, va_gc> *dead_vars_from_for;
/* STATEMENT_LIST for statements in this binding contour.
Only used at present for SK_CLEANUP temporary bindings. */
@@ -327,7 +321,7 @@ extern tree lookup_qualified_name (tree, tree, bool, bool);
extern tree lookup_name_nonclass (tree);
extern tree lookup_name_innermost_nonclass_level (tree);
extern bool is_local_extern (tree);
-extern tree lookup_function_nonclass (tree, VEC(tree,gc) *, bool);
+extern tree lookup_function_nonclass (tree, vec<tree, va_gc> *, bool);
extern void push_local_binding (tree, tree, int);
extern bool pushdecl_class_level (tree);
extern tree pushdecl_namespace_level (tree, bool);
@@ -343,7 +337,7 @@ extern void do_toplevel_using_decl (tree, tree, tree);
extern void do_local_using_decl (tree, tree, tree);
extern tree do_class_using_decl (tree, tree);
extern void do_using_directive (tree);
-extern tree lookup_arg_dependent (tree, tree, VEC(tree,gc) *, bool);
+extern tree lookup_arg_dependent (tree, tree, vec<tree, va_gc> *, bool);
extern bool is_associated_namespace (tree, tree);
extern void parse_using_directive (tree, tree);
extern tree innermost_non_namespace_value (tree);
diff --git a/gcc/cp/parser.c b/gcc/cp/parser.c
index 7107134639e..9650351580a 100644
--- a/gcc/cp/parser.c
+++ b/gcc/cp/parser.c
@@ -252,7 +252,7 @@ int cp_unevaluated_operand;
highlighted by surrounding it in [[ ]]. */
static void
-cp_lexer_dump_tokens (FILE *file, VEC(cp_token,gc) *buffer,
+cp_lexer_dump_tokens (FILE *file, vec<cp_token, va_gc> *buffer,
cp_token *start_token, unsigned num,
cp_token *curr_token)
{
@@ -260,26 +260,26 @@ cp_lexer_dump_tokens (FILE *file, VEC(cp_token,gc) *buffer,
cp_token *token;
bool do_print;
- fprintf (file, "%u tokens\n", VEC_length (cp_token, buffer));
+ fprintf (file, "%u tokens\n", vec_safe_length (buffer));
if (buffer == NULL)
return;
if (num == 0)
- num = VEC_length (cp_token, buffer);
+ num = buffer->length ();
if (start_token == NULL)
- start_token = VEC_address (cp_token, buffer);
+ start_token = buffer->address ();
- if (start_token > VEC_address (cp_token, buffer))
+ if (start_token > buffer->address ())
{
- cp_lexer_print_token (file, &VEC_index (cp_token, buffer, 0));
+ cp_lexer_print_token (file, &(*buffer)[0]);
fprintf (file, " ... ");
}
do_print = false;
nprinted = 0;
- for (i = 0; VEC_iterate (cp_token, buffer, i, token) && nprinted < num; i++)
+ for (i = 0; buffer->iterate (i, &token) && nprinted < num; i++)
{
if (token == start_token)
do_print = true;
@@ -310,10 +310,10 @@ cp_lexer_dump_tokens (FILE *file, VEC(cp_token,gc) *buffer,
}
}
- if (i == num && i < VEC_length (cp_token, buffer))
+ if (i == num && i < buffer->length ())
{
fprintf (file, " ... ");
- cp_lexer_print_token (file, &VEC_last (cp_token, buffer));
+ cp_lexer_print_token (file, &buffer->last ());
}
fprintf (file, "\n");
@@ -323,7 +323,7 @@ cp_lexer_dump_tokens (FILE *file, VEC(cp_token,gc) *buffer,
/* Dump all tokens in BUFFER to stderr. */
void
-cp_lexer_debug_tokens (VEC(cp_token,gc) *buffer)
+cp_lexer_debug_tokens (vec<cp_token, va_gc> *buffer)
{
cp_lexer_dump_tokens (stderr, buffer, NULL, 0, NULL);
}
@@ -393,8 +393,7 @@ cp_debug_print_unparsed_function (FILE *file, cp_unparsed_functions_entry *uf)
fprintf (file, "\tFunctions with default args:\n");
for (i = 0;
- VEC_iterate (cp_default_arg_entry, uf->funs_with_default_args, i,
- default_arg_fn);
+ vec_safe_iterate (uf->funs_with_default_args, i, &default_arg_fn);
i++)
{
fprintf (file, "\t\tClass type: ");
@@ -406,7 +405,7 @@ cp_debug_print_unparsed_function (FILE *file, cp_unparsed_functions_entry *uf)
fprintf (file, "\n\tFunctions with definitions that require "
"post-processing\n\t\t");
- for (i = 0; VEC_iterate (tree, uf->funs_with_definitions, i, fn); i++)
+ for (i = 0; vec_safe_iterate (uf->funs_with_definitions, i, &fn); i++)
{
print_node_brief (file, "", fn, 0);
fprintf (file, " ");
@@ -415,7 +414,7 @@ cp_debug_print_unparsed_function (FILE *file, cp_unparsed_functions_entry *uf)
fprintf (file, "\n\tNon-static data members with initializers that require "
"post-processing\n\t\t");
- for (i = 0; VEC_iterate (tree, uf->nsdmis, i, fn); i++)
+ for (i = 0; vec_safe_iterate (uf->nsdmis, i, &fn); i++)
{
print_node_brief (file, "", fn, 0);
fprintf (file, " ");
@@ -428,13 +427,13 @@ cp_debug_print_unparsed_function (FILE *file, cp_unparsed_functions_entry *uf)
static void
cp_debug_print_unparsed_queues (FILE *file,
- VEC(cp_unparsed_functions_entry, gc) *s)
+ vec<cp_unparsed_functions_entry, va_gc> *s)
{
unsigned i;
cp_unparsed_functions_entry *uf;
fprintf (file, "Unparsed functions\n");
- for (i = 0; VEC_iterate (cp_unparsed_functions_entry, s, i, uf); i++)
+ for (i = 0; vec_safe_iterate (s, i, &uf); i++)
{
fprintf (file, "#%u:\n", i);
cp_debug_print_unparsed_function (file, uf);
@@ -454,7 +453,7 @@ cp_debug_parser_tokens (FILE *file, cp_parser *parser, int window_size)
file = stderr;
next_token = parser->lexer->next_token;
- first_token = VEC_address (cp_token, parser->lexer->buffer);
+ first_token = parser->lexer->buffer->address ();
start_token = (next_token > first_token + window_size / 2)
? next_token - window_size / 2
: first_token;
@@ -478,7 +477,7 @@ cp_debug_parser (FILE *file, cp_parser *parser)
fprintf (file, "Parser state\n\n");
fprintf (file, "Number of tokens: %u\n",
- VEC_length (cp_token, parser->lexer->buffer));
+ vec_safe_length (parser->lexer->buffer));
cp_debug_print_tree_if_set (file, "Lookup scope", parser->scope);
cp_debug_print_tree_if_set (file, "Object scope",
parser->object_scope);
@@ -563,11 +562,10 @@ cp_lexer_alloc (void)
/* Initially we are not debugging. */
lexer->debugging_p = false;
- lexer->saved_tokens = VEC_alloc (cp_token_position, heap,
- CP_SAVED_TOKEN_STACK);
+ lexer->saved_tokens.create (CP_SAVED_TOKEN_STACK);
/* Create the buffer. */
- lexer->buffer = VEC_alloc (cp_token, gc, CP_LEXER_BUFFER_SIZE);
+ vec_alloc (lexer->buffer, CP_LEXER_BUFFER_SIZE);
return lexer;
}
@@ -590,20 +588,20 @@ cp_lexer_new_main (void)
lexer = cp_lexer_alloc ();
/* Put the first token in the buffer. */
- VEC_quick_push (cp_token, lexer->buffer, token);
+ lexer->buffer->quick_push (token);
/* Get the remaining tokens from the preprocessor. */
while (token.type != CPP_EOF)
{
cp_lexer_get_preprocessor_token (lexer, &token);
- VEC_safe_push (cp_token, gc, lexer->buffer, token);
+ vec_safe_push (lexer->buffer, token);
}
- lexer->last_token = VEC_address (cp_token, lexer->buffer)
- + VEC_length (cp_token, lexer->buffer)
+ lexer->last_token = lexer->buffer->address ()
+ + lexer->buffer->length ()
- 1;
- lexer->next_token = VEC_length (cp_token, lexer->buffer)
- ? VEC_address (cp_token, lexer->buffer)
+ lexer->next_token = lexer->buffer->length ()
+ ? lexer->buffer->address ()
: &eof_token;
/* Subsequent preprocessor diagnostics should use compiler
@@ -629,8 +627,7 @@ cp_lexer_new_from_tokens (cp_token_cache *cache)
lexer->next_token = first == last ? &eof_token : first;
lexer->last_token = last;
- lexer->saved_tokens = VEC_alloc (cp_token_position, heap,
- CP_SAVED_TOKEN_STACK);
+ lexer->saved_tokens.create (CP_SAVED_TOKEN_STACK);
/* Initially we are not debugging. */
lexer->debugging_p = false;
@@ -644,8 +641,8 @@ cp_lexer_new_from_tokens (cp_token_cache *cache)
static void
cp_lexer_destroy (cp_lexer *lexer)
{
- VEC_free (cp_token, gc, lexer->buffer);
- VEC_free (cp_token_position, heap, lexer->saved_tokens);
+ vec_free (lexer->buffer);
+ lexer->saved_tokens.release ();
ggc_free (lexer);
}
@@ -700,7 +697,7 @@ cp_lexer_previous_token (cp_lexer *lexer)
static inline int
cp_lexer_saving_tokens (const cp_lexer* lexer)
{
- return VEC_length (cp_token_position, lexer->saved_tokens) != 0;
+ return lexer->saved_tokens.length () != 0;
}
/* Store the next token from the preprocessor in *TOKEN. Return true
@@ -1060,8 +1057,7 @@ cp_lexer_save_tokens (cp_lexer* lexer)
if (cp_lexer_debugging_p (lexer))
fprintf (cp_lexer_debug_stream, "cp_lexer: saving tokens\n");
- VEC_safe_push (cp_token_position, heap,
- lexer->saved_tokens, lexer->next_token);
+ lexer->saved_tokens.safe_push (lexer->next_token);
}
/* Commit to the portion of the token stream most recently saved. */
@@ -1073,7 +1069,7 @@ cp_lexer_commit_tokens (cp_lexer* lexer)
if (cp_lexer_debugging_p (lexer))
fprintf (cp_lexer_debug_stream, "cp_lexer: committing tokens\n");
- VEC_pop (cp_token_position, lexer->saved_tokens);
+ lexer->saved_tokens.pop ();
}
/* Return all tokens saved since the last call to cp_lexer_save_tokens
@@ -1086,7 +1082,7 @@ cp_lexer_rollback_tokens (cp_lexer* lexer)
if (cp_lexer_debugging_p (lexer))
fprintf (cp_lexer_debug_stream, "cp_lexer: restoring tokens\n");
- lexer->next_token = VEC_pop (cp_token_position, lexer->saved_tokens);
+ lexer->next_token = lexer->saved_tokens.pop ();
}
/* Print a representation of the TOKEN on the STREAM. */
@@ -1735,24 +1731,24 @@ cp_parser_context_new (cp_parser_context* next)
/* Managing the unparsed function queues. */
#define unparsed_funs_with_default_args \
- VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues).funs_with_default_args
+ parser->unparsed_queues->last ().funs_with_default_args
#define unparsed_funs_with_definitions \
- VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues).funs_with_definitions
+ parser->unparsed_queues->last ().funs_with_definitions
#define unparsed_nsdmis \
- VEC_last (cp_unparsed_functions_entry, parser->unparsed_queues).nsdmis
+ parser->unparsed_queues->last ().nsdmis
static void
push_unparsed_function_queues (cp_parser *parser)
{
cp_unparsed_functions_entry e = {NULL, make_tree_vector (), NULL};
- VEC_safe_push (cp_unparsed_functions_entry, gc, parser->unparsed_queues, e);
+ vec_safe_push (parser->unparsed_queues, e);
}
static void
pop_unparsed_function_queues (cp_parser *parser)
{
release_tree_vector (unparsed_funs_with_definitions);
- VEC_pop (cp_unparsed_functions_entry, parser->unparsed_queues);
+ parser->unparsed_queues->pop ();
}
/* Prototypes. */
@@ -1812,7 +1808,7 @@ static tree cp_parser_postfix_open_square_expression
(cp_parser *, tree, bool);
static tree cp_parser_postfix_dot_deref_expression
(cp_parser *, enum cpp_ttype, tree, bool, cp_id_kind *, location_t);
-static VEC(tree,gc) *cp_parser_parenthesized_expression_list
+static vec<tree, va_gc> *cp_parser_parenthesized_expression_list
(cp_parser *, int, bool, bool, bool *);
/* Values for the second parameter of cp_parser_parenthesized_expression_list. */
enum { non_attr = 0, normal_attr = 1, id_attr = 2 };
@@ -1824,7 +1820,7 @@ static enum tree_code cp_parser_unary_operator
(cp_token *);
static tree cp_parser_new_expression
(cp_parser *);
-static VEC(tree,gc) *cp_parser_new_placement
+static vec<tree, va_gc> *cp_parser_new_placement
(cp_parser *);
static tree cp_parser_new_type_id
(cp_parser *, tree *);
@@ -1832,7 +1828,7 @@ static cp_declarator *cp_parser_new_declarator_opt
(cp_parser *);
static cp_declarator *cp_parser_direct_new_declarator
(cp_parser *);
-static VEC(tree,gc) *cp_parser_new_initializer
+static vec<tree, va_gc> *cp_parser_new_initializer
(cp_parser *);
static tree cp_parser_delete_expression
(cp_parser *);
@@ -1964,7 +1960,7 @@ static tree cp_parser_decltype
/* Declarators [gram.dcl.decl] */
static tree cp_parser_init_declarator
- (cp_parser *, cp_decl_specifier_seq *, VEC (deferred_access_check,gc)*, bool, bool, int, bool *, tree *);
+ (cp_parser *, cp_decl_specifier_seq *, vec<deferred_access_check, va_gc> *, bool, bool, int, bool *, tree *);
static cp_declarator *cp_parser_declarator
(cp_parser *, cp_parser_declarator_kind, int *, bool *, bool);
static cp_declarator *cp_parser_direct_declarator
@@ -2004,7 +2000,7 @@ static tree cp_parser_initializer_clause
(cp_parser *, bool *);
static tree cp_parser_braced_list
(cp_parser*, bool*);
-static VEC(constructor_elt,gc) *cp_parser_initializer_list
+static vec<constructor_elt, va_gc> *cp_parser_initializer_list
(cp_parser *, bool *);
static bool cp_parser_ctor_initializer_opt_and_function_body
@@ -2220,9 +2216,9 @@ static tree cp_parser_function_definition_after_declarator
static void cp_parser_template_declaration_after_export
(cp_parser *, bool);
static void cp_parser_perform_template_parameter_access_checks
- (VEC (deferred_access_check,gc)*);
+ (vec<deferred_access_check, va_gc> *);
static tree cp_parser_single_declaration
- (cp_parser *, VEC (deferred_access_check,gc)*, bool, bool, bool *);
+ (cp_parser *, vec<deferred_access_check, va_gc> *, bool, bool, bool *);
static tree cp_parser_functional_cast
(cp_parser *, tree);
static tree cp_parser_save_member_function_body
@@ -3551,7 +3547,7 @@ cp_parser_string_literal (cp_parser *parser, bool translate, bool wide_ok)
/* Look up a literal operator with the name and the exact arguments. */
static tree
-lookup_literal_operator (tree name, VEC(tree,gc) *args)
+lookup_literal_operator (tree name, vec<tree, va_gc> *args)
{
tree decl, fns;
decl = lookup_name (name);
@@ -3567,11 +3563,11 @@ lookup_literal_operator (tree name, VEC(tree,gc) *args)
argtypes = TYPE_ARG_TYPES (TREE_TYPE (fn));
if (argtypes != NULL_TREE)
{
- for (ix = 0; ix < VEC_length (tree, args) && argtypes != NULL_TREE;
+ for (ix = 0; ix < vec_safe_length (args) && argtypes != NULL_TREE;
++ix, argtypes = TREE_CHAIN (argtypes))
{
tree targ = TREE_VALUE (argtypes);
- tree tparm = TREE_TYPE (VEC_index (tree, args, ix));
+ tree tparm = TREE_TYPE ((*args)[ix]);
bool ptr = TREE_CODE (targ) == POINTER_TYPE;
bool arr = TREE_CODE (tparm) == ARRAY_TYPE;
if ((ptr || arr || !same_type_p (targ, tparm))
@@ -3581,7 +3577,7 @@ lookup_literal_operator (tree name, VEC(tree,gc) *args)
found = false;
}
if (found
- && ix == VEC_length (tree, args)
+ && ix == vec_safe_length (args)
/* May be this should be sufficient_parms_p instead,
depending on how exactly should user-defined literals
work in presence of default arguments on the literal
@@ -3609,8 +3605,8 @@ cp_parser_userdef_char_literal (cp_parser *parser)
/* Build up a call to the user-defined operator */
/* Lookup the name we got back from the id-expression. */
- VEC(tree,gc) *args = make_tree_vector ();
- VEC_safe_push (tree, gc, args, value);
+ vec<tree, va_gc> *args = make_tree_vector ();
+ vec_safe_push (args, value);
decl = lookup_literal_operator (name, args);
if (!decl || decl == error_mark_node)
{
@@ -3668,12 +3664,12 @@ cp_parser_userdef_numeric_literal (cp_parser *parser)
tree num_string = USERDEF_LITERAL_NUM_STRING (literal);
tree name = cp_literal_operator_id (IDENTIFIER_POINTER (suffix_id));
tree decl, result;
- VEC(tree,gc) *args;
+ vec<tree, va_gc> *args;
/* Look for a literal operator taking the exact type of numeric argument
as the literal value. */
args = make_tree_vector ();
- VEC_safe_push (tree, gc, args, value);
+ vec_safe_push (args, value);
decl = lookup_literal_operator (name, args);
if (decl && decl != error_mark_node)
{
@@ -3690,7 +3686,7 @@ cp_parser_userdef_numeric_literal (cp_parser *parser)
operator taking a const char* argument consisting of the number
in string format. */
args = make_tree_vector ();
- VEC_safe_push (tree, gc, args, num_string);
+ vec_safe_push (args, num_string);
decl = lookup_literal_operator (name, args);
if (decl && decl != error_mark_node)
{
@@ -3742,9 +3738,9 @@ cp_parser_userdef_string_literal (cp_token *token)
/* Build up a call to the user-defined operator */
/* Lookup the name we got back from the id-expression. */
- VEC(tree,gc) *args = make_tree_vector ();
- VEC_safe_push (tree, gc, args, value);
- VEC_safe_push (tree, gc, args, build_int_cst (size_type_node, len));
+ vec<tree, va_gc> *args = make_tree_vector ();
+ vec_safe_push (args, value);
+ vec_safe_push (args, build_int_cst (size_type_node, len));
decl = lookup_name (name);
if (!decl || decl == error_mark_node)
{
@@ -5496,7 +5492,7 @@ cp_parser_postfix_expression (cp_parser *parser, bool address_p, bool cast_p,
case RID_BUILTIN_SHUFFLE:
{
- VEC(tree,gc)* vec;
+ vec<tree, va_gc> *vec;
unsigned int i;
tree p;
location_t loc = token->location;
@@ -5508,21 +5504,13 @@ cp_parser_postfix_expression (cp_parser *parser, bool address_p, bool cast_p,
if (vec == NULL)
return error_mark_node;
- FOR_EACH_VEC_ELT (tree, vec, i, p)
+ FOR_EACH_VEC_ELT (*vec, i, p)
mark_exp_read (p);
- if (VEC_length (tree, vec) == 2)
- return
- c_build_vec_perm_expr
- (loc, VEC_index (tree, vec, 0),
- NULL_TREE, VEC_index (tree, vec, 1));
-
- else if (VEC_length (tree, vec) == 3)
- return
- c_build_vec_perm_expr
- (loc, VEC_index (tree, vec, 0),
- VEC_index (tree, vec, 1),
- VEC_index (tree, vec, 2));
+ if (vec->length () == 2)
+ return c_build_vec_perm_expr (loc, (*vec)[0], NULL_TREE, (*vec)[1]);
+ else if (vec->length () == 3)
+ return c_build_vec_perm_expr (loc, (*vec)[0], (*vec)[1], (*vec)[2]);
else
{
error_at (loc, "wrong number of arguments to "
@@ -5558,7 +5546,7 @@ cp_parser_postfix_expression (cp_parser *parser, bool address_p, bool cast_p,
if (cp_parser_allow_gnu_extensions_p (parser)
&& cp_lexer_next_token_is (parser->lexer, CPP_OPEN_PAREN))
{
- VEC(constructor_elt,gc) *initializer_list = NULL;
+ vec<constructor_elt, va_gc> *initializer_list = NULL;
bool saved_in_type_id_in_expr_p;
cp_parser_parse_tentatively (parser);
@@ -5666,7 +5654,7 @@ cp_parser_postfix_expression (cp_parser *parser, bool address_p, bool cast_p,
bool is_builtin_constant_p;
bool saved_integral_constant_expression_p = false;
bool saved_non_integral_constant_expression_p = false;
- VEC(tree,gc) *args;
+ vec<tree, va_gc> *args;
is_member_access = false;
@@ -5717,7 +5705,7 @@ cp_parser_postfix_expression (cp_parser *parser, bool address_p, bool cast_p,
{
if (TREE_CODE (postfix_expression) == IDENTIFIER_NODE)
{
- if (!VEC_empty (tree, args))
+ if (!args->is_empty ())
{
koenig_p = true;
if (!any_type_dependent_arguments_p (args))
@@ -5733,7 +5721,7 @@ cp_parser_postfix_expression (cp_parser *parser, bool address_p, bool cast_p,
/* We do not perform argument-dependent lookup if
normal lookup finds a non-function, in accordance
with the expected resolution of DR 218. */
- else if (!VEC_empty (tree, args)
+ else if (!args->is_empty ()
&& is_overloaded_fn (postfix_expression))
{
tree fn = get_first_fn (postfix_expression);
@@ -6154,14 +6142,14 @@ cp_parser_postfix_dot_deref_expression (cp_parser *parser,
NON_CONSTANT_P is non-NULL, *NON_CONSTANT_P indicates whether or
not all of the expressions in the list were constant. */
-static VEC(tree,gc) *
+static vec<tree, va_gc> *
cp_parser_parenthesized_expression_list (cp_parser* parser,
int is_attribute_list,
bool cast_p,
bool allow_expansion_p,
bool *non_constant_p)
{
- VEC(tree,gc) *expression_list;
+ vec<tree, va_gc> *expression_list;
bool fold_expr_p = is_attribute_list != non_attr;
tree identifier = NULL_TREE;
bool saved_greater_than_is_operator_p;
@@ -6242,7 +6230,7 @@ cp_parser_parenthesized_expression_list (cp_parser* parser,
expressions to the list, so that we can still tell if
the correct form for a parenthesized expression-list
is found. That gives better errors. */
- VEC_safe_push (tree, gc, expression_list, expr);
+ vec_safe_push (expression_list, expr);
if (expr == error_mark_node)
goto skip_comma;
@@ -6286,7 +6274,7 @@ cp_parser_parenthesized_expression_list (cp_parser* parser,
= saved_greater_than_is_operator_p;
if (identifier)
- VEC_safe_insert (tree, gc, expression_list, 0, identifier);
+ vec_safe_insert (expression_list, 0, identifier);
return expression_list;
}
@@ -6725,9 +6713,9 @@ static tree
cp_parser_new_expression (cp_parser* parser)
{
bool global_scope_p;
- VEC(tree,gc) *placement;
+ vec<tree, va_gc> *placement;
tree type;
- VEC(tree,gc) *initializer;
+ vec<tree, va_gc> *initializer;
tree nelts = NULL_TREE;
tree ret;
@@ -6819,10 +6807,10 @@ cp_parser_new_expression (cp_parser* parser)
Returns the same representation as for an expression-list. */
-static VEC(tree,gc) *
+static vec<tree, va_gc> *
cp_parser_new_placement (cp_parser* parser)
{
- VEC(tree,gc) *expression_list;
+ vec<tree, va_gc> *expression_list;
/* Parse the expression-list. */
expression_list = (cp_parser_parenthesized_expression_list
@@ -7014,10 +7002,10 @@ cp_parser_direct_new_declarator (cp_parser* parser)
Returns a representation of the expression-list. */
-static VEC(tree,gc) *
+static vec<tree, va_gc> *
cp_parser_new_initializer (cp_parser* parser)
{
- VEC(tree,gc) *expression_list;
+ vec<tree, va_gc> *expression_list;
if (cp_lexer_next_token_is (parser->lexer, CPP_OPEN_BRACE))
{
@@ -8084,9 +8072,7 @@ typedef struct GTY(()) tree_int
tree t;
int i;
} tree_int;
-DEF_VEC_O(tree_int);
-DEF_VEC_ALLOC_O(tree_int,gc);
-static GTY(()) VEC(tree_int,gc) *lambda_scope_stack;
+static GTY(()) vec<tree_int, va_gc> *lambda_scope_stack;
static void
start_lambda_scope (tree decl)
@@ -8099,7 +8085,7 @@ start_lambda_scope (tree decl)
decl = current_function_decl;
ti.t = lambda_scope;
ti.i = lambda_count;
- VEC_safe_push (tree_int, gc, lambda_scope_stack, ti);
+ vec_safe_push (lambda_scope_stack, ti);
if (lambda_scope != decl)
{
/* Don't reset the count if we're still in the same function. */
@@ -8118,13 +8104,13 @@ record_lambda_scope (tree lambda)
static void
finish_lambda_scope (void)
{
- tree_int *p = &VEC_last (tree_int, lambda_scope_stack);
+ tree_int *p = &lambda_scope_stack->last ();
if (lambda_scope != p->t)
{
lambda_scope = p->t;
lambda_count = p->i;
}
- VEC_pop (tree_int, lambda_scope_stack);
+ lambda_scope_stack->pop ();
}
/* Parse a lambda expression.
@@ -9716,10 +9702,10 @@ cp_parser_perform_range_for_lookup (tree range, tree *begin, tree *end)
else
{
/* Use global functions with ADL. */
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
vec = make_tree_vector ();
- VEC_safe_push (tree, gc, vec, range);
+ vec_safe_push (vec, range);
member_begin = perform_koenig_lookup (id_begin, vec,
/*include_std=*/true,
@@ -9763,7 +9749,7 @@ static tree
cp_parser_range_for_member_function (tree range, tree identifier)
{
tree member, res;
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
member = finish_class_member_access_expr (range, identifier,
false, tf_warning_or_error);
@@ -11671,7 +11657,7 @@ cp_parser_mem_initializer (cp_parser* parser)
}
else
{
- VEC(tree,gc)* vec;
+ vec<tree, va_gc> *vec;
vec = cp_parser_parenthesized_expression_list (parser, non_attr,
/*cast_p=*/false,
/*allow_expansion_p=*/true,
@@ -12566,7 +12552,7 @@ cp_parser_template_id (cp_parser *parser,
tree template_id;
cp_token_position start_of_id = 0;
deferred_access_check *chk;
- VEC (deferred_access_check,gc) *access_check;
+ vec<deferred_access_check, va_gc> *access_check;
cp_token *next_token = NULL, *next_token_2 = NULL;
bool is_identifier;
@@ -12583,7 +12569,7 @@ cp_parser_template_id (cp_parser *parser,
access_check = check_value->checks;
if (access_check)
{
- FOR_EACH_VEC_ELT (deferred_access_check, access_check, i, chk)
+ FOR_EACH_VEC_ELT (*access_check, i, chk)
perform_or_defer_access_check (chk->binfo,
chk->decl,
chk->diag_decl,
@@ -15664,7 +15650,7 @@ cp_parser_asm_definition (cp_parser* parser)
static tree
cp_parser_init_declarator (cp_parser* parser,
cp_decl_specifier_seq *decl_specifiers,
- VEC (deferred_access_check,gc)* checks,
+ vec<deferred_access_check, va_gc> *checks,
bool function_definition_allowed_p,
bool member_p,
int declares_class_or_enum,
@@ -17744,7 +17730,7 @@ cp_parser_initializer (cp_parser* parser, bool* is_direct_init,
}
else if (token->type == CPP_OPEN_PAREN)
{
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
vec = cp_parser_parenthesized_expression_list (parser, non_attr,
/*cast_p=*/false,
/*allow_expansion_p=*/true,
@@ -17861,15 +17847,15 @@ cp_parser_braced_list (cp_parser* parser, bool* non_constant_p)
identifier :
[ constant-expression ] =
- Returns a VEC of constructor_elt. The VALUE of each elt is an expression
+ Returns a vec of constructor_elt. The VALUE of each elt is an expression
for the initializer. If the INDEX of the elt is non-NULL, it is the
IDENTIFIER_NODE naming the field to initialize. NON_CONSTANT_P is
as for cp_parser_initializer. */
-static VEC(constructor_elt,gc) *
+static vec<constructor_elt, va_gc> *
cp_parser_initializer_list (cp_parser* parser, bool* non_constant_p)
{
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Assume all of the expressions are constant. */
*non_constant_p = false;
@@ -18370,8 +18356,7 @@ cp_parser_class_specifier_1 (cp_parser* parser)
};
*/
- FOR_EACH_VEC_ELT (cp_default_arg_entry, unparsed_funs_with_default_args,
- ix, e)
+ FOR_EACH_VEC_SAFE_ELT (unparsed_funs_with_default_args, ix, e)
{
decl = e->decl;
/* If there are default arguments that have not yet been processed,
@@ -18390,11 +18375,11 @@ cp_parser_class_specifier_1 (cp_parser* parser)
/* Remove any template parameters from the symbol table. */
maybe_end_member_template_processing ();
}
- VEC_truncate (cp_default_arg_entry, unparsed_funs_with_default_args, 0);
+ vec_safe_truncate (unparsed_funs_with_default_args, 0);
/* Now parse any NSDMIs. */
save_ccp = current_class_ptr;
save_ccr = current_class_ref;
- FOR_EACH_VEC_ELT (tree, unparsed_nsdmis, ix, decl)
+ FOR_EACH_VEC_SAFE_ELT (unparsed_nsdmis, ix, decl)
{
if (class_type != DECL_CONTEXT (decl))
{
@@ -18406,15 +18391,15 @@ cp_parser_class_specifier_1 (cp_parser* parser)
inject_this_parameter (class_type, TYPE_UNQUALIFIED);
cp_parser_late_parsing_nsdmi (parser, decl);
}
- VEC_truncate (tree, unparsed_nsdmis, 0);
+ vec_safe_truncate (unparsed_nsdmis, 0);
current_class_ptr = save_ccp;
current_class_ref = save_ccr;
if (pushed_scope)
pop_scope (pushed_scope);
/* Now parse the body of the functions. */
- FOR_EACH_VEC_ELT (tree, unparsed_funs_with_definitions, ix, decl)
+ FOR_EACH_VEC_SAFE_ELT (unparsed_funs_with_definitions, ix, decl)
cp_parser_late_parsing_for_member (parser, decl);
- VEC_truncate (tree, unparsed_funs_with_definitions, 0);
+ vec_safe_truncate (unparsed_funs_with_definitions, 0);
}
/* Put back any saved access checks. */
@@ -19453,7 +19438,7 @@ cp_parser_member_declaration (cp_parser* parser)
&& !DECL_C_BIT_FIELD (decl)
&& DECL_INITIAL (decl))
/* Add DECL to the queue of NSDMI to be parsed later. */
- VEC_safe_push (tree, gc, unparsed_nsdmis, decl);
+ vec_safe_push (unparsed_nsdmis, decl);
}
if (assume_semicolon)
@@ -20503,7 +20488,7 @@ cp_parser_gnu_attribute_list (cp_parser* parser)
/* If it's an `(', then parse the attribute arguments. */
if (token->type == CPP_OPEN_PAREN)
{
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
int attr_flag = (attribute_takes_identifier_p (identifier)
? id_attr : normal_attr);
vec = cp_parser_parenthesized_expression_list
@@ -20640,7 +20625,7 @@ cp_parser_std_attribute (cp_parser *parser)
return attribute;
{
- VEC(tree, gc) *vec;
+ vec<tree, va_gc> *vec;
int attr_flag = normal_attr;
if (attr_ns == get_identifier ("gnu")
@@ -21672,7 +21657,7 @@ static void
cp_parser_template_declaration_after_export (cp_parser* parser, bool member_p)
{
tree decl = NULL_TREE;
- VEC (deferred_access_check,gc) *checks;
+ vec<deferred_access_check, va_gc> *checks;
tree parameter_list;
bool friend_p = false;
bool need_lang_pop;
@@ -21824,7 +21809,7 @@ cp_parser_template_declaration_after_export (cp_parser* parser, bool member_p)
if (member_p && decl
&& (TREE_CODE (decl) == FUNCTION_DECL
|| DECL_FUNCTION_TEMPLATE_P (decl)))
- VEC_safe_push (tree, gc, unparsed_funs_with_definitions, decl);
+ vec_safe_push (unparsed_funs_with_definitions, decl);
}
/* Perform the deferred access checks from a template-parameter-list.
@@ -21832,7 +21817,7 @@ cp_parser_template_declaration_after_export (cp_parser* parser, bool member_p)
get_deferred_access_checks. */
static void
-cp_parser_perform_template_parameter_access_checks (VEC (deferred_access_check,gc)* checks)
+cp_parser_perform_template_parameter_access_checks (vec<deferred_access_check, va_gc> *checks)
{
++processing_template_parmlist;
perform_access_checks (checks, tf_warning_or_error);
@@ -21848,7 +21833,7 @@ cp_parser_perform_template_parameter_access_checks (VEC (deferred_access_check,g
static tree
cp_parser_single_declaration (cp_parser* parser,
- VEC (deferred_access_check,gc)* checks,
+ vec<deferred_access_check, va_gc> *checks,
bool member_p,
bool explicit_specialization_p,
bool* friend_p)
@@ -21999,7 +21984,7 @@ cp_parser_simple_cast_expression (cp_parser *parser)
static tree
cp_parser_functional_cast (cp_parser* parser, tree type)
{
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
tree expression_list;
tree cast;
bool nonconst_p;
@@ -22108,7 +22093,7 @@ cp_parser_save_member_function_body (cp_parser* parser,
DECL_INITIALIZED_IN_CLASS_P (fn) = 1;
/* Add FN to the queue of functions to be parsed later. */
- VEC_safe_push (tree, gc, unparsed_funs_with_definitions, fn);
+ vec_safe_push (unparsed_funs_with_definitions, fn);
return fn;
}
@@ -22319,8 +22304,7 @@ cp_parser_save_default_args (cp_parser* parser, tree decl)
if (TREE_PURPOSE (probe))
{
cp_default_arg_entry entry = {current_class_type, decl};
- VEC_safe_push (cp_default_arg_entry, gc,
- unparsed_funs_with_default_args, entry);
+ vec_safe_push (unparsed_funs_with_default_args, entry);
break;
}
}
@@ -22440,7 +22424,7 @@ cp_parser_late_parsing_default_args (cp_parser *parser, tree fn)
{
tree default_arg = TREE_PURPOSE (parm);
tree parsed_arg;
- VEC(tree,gc) *insts;
+ vec<tree, va_gc> *insts;
tree copy;
unsigned ix;
@@ -22465,7 +22449,7 @@ cp_parser_late_parsing_default_args (cp_parser *parser, tree fn)
/* Update any instantiations we've already created. */
for (insts = DEFARG_INSTANTIATIONS (default_arg), ix = 0;
- VEC_iterate (tree, insts, ix, copy); ix++)
+ vec_safe_iterate (insts, ix, &copy); ix++)
TREE_PURPOSE (copy) = parsed_arg;
}
@@ -23298,7 +23282,7 @@ cp_parser_pre_parsed_nested_name_specifier (cp_parser *parser)
int i;
struct tree_check *check_value;
deferred_access_check *chk;
- VEC (deferred_access_check,gc) *checks;
+ vec<deferred_access_check, va_gc> *checks;
/* Get the stored value. */
check_value = cp_lexer_consume_token (parser->lexer)->u.tree_check_value;
@@ -23306,7 +23290,7 @@ cp_parser_pre_parsed_nested_name_specifier (cp_parser *parser)
checks = check_value->checks;
if (checks)
{
- FOR_EACH_VEC_ELT (deferred_access_check, checks, i, chk)
+ FOR_EACH_VEC_SAFE_ELT (checks, i, chk)
perform_or_defer_access_check (chk->binfo,
chk->decl,
chk->diag_decl, tf_warning_or_error);
@@ -26875,7 +26859,7 @@ cp_parser_omp_for_loop (cp_parser *parser, tree clauses, tree *par_clauses)
location_t loc_first;
bool collapse_err = false;
int i, collapse = 1, nbraces = 0;
- VEC(tree,gc) *for_block = make_tree_vector ();
+ vec<tree, va_gc> *for_block = make_tree_vector ();
for (cl = clauses; cl; cl = OMP_CLAUSE_CHAIN (cl))
if (OMP_CLAUSE_CODE (cl) == OMP_CLAUSE_COLLAPSE)
@@ -26994,7 +26978,7 @@ cp_parser_omp_for_loop (cp_parser *parser, tree clauses, tree *par_clauses)
LOOKUP_ONLYCONVERTING);
if (CLASS_TYPE_P (TREE_TYPE (decl)))
{
- VEC_safe_push (tree, gc, for_block, this_pre_body);
+ vec_safe_push (for_block, this_pre_body);
init = NULL_TREE;
}
else
@@ -27252,8 +27236,8 @@ cp_parser_omp_for_loop (cp_parser *parser, tree clauses, tree *par_clauses)
}
}
- while (!VEC_empty (tree, for_block))
- add_stmt (pop_stmt_list (VEC_pop (tree, for_block)));
+ while (!for_block->is_empty ())
+ add_stmt (pop_stmt_list (for_block->pop ()));
release_tree_vector (for_block);
return ret;
diff --git a/gcc/cp/parser.h b/gcc/cp/parser.h
index 5b95f0892de..cf281c4b187 100644
--- a/gcc/cp/parser.h
+++ b/gcc/cp/parser.h
@@ -30,7 +30,7 @@ struct GTY(()) tree_check {
/* The value associated with the token. */
tree value;
/* The checks that have been associated with value. */
- VEC (deferred_access_check, gc)* checks;
+ vec<deferred_access_check, va_gc> *checks;
/* The token's qualifying scope (used when it is a
CPP_NESTED_NAME_SPECIFIER). */
tree qualifying_scope;
@@ -69,14 +69,9 @@ typedef struct GTY (()) cp_token {
} GTY((desc ("(%1.type == CPP_TEMPLATE_ID) || (%1.type == CPP_NESTED_NAME_SPECIFIER)"))) u;
} cp_token;
-DEF_VEC_O (cp_token);
-DEF_VEC_ALLOC_O (cp_token,gc);
-DEF_VEC_ALLOC_O (cp_token,heap);
/* We use a stack of token pointer for saving token sets. */
typedef struct cp_token *cp_token_position;
-DEF_VEC_P (cp_token_position);
-DEF_VEC_ALLOC_P (cp_token_position,heap);
/* The cp_lexer structure represents the C++ lexer. It is responsible
for managing the token stream from the preprocessor and supplying
@@ -86,7 +81,7 @@ DEF_VEC_ALLOC_P (cp_token_position,heap);
typedef struct GTY (()) cp_lexer {
/* The memory allocated for the buffer. NULL if this lexer does not
own the token buffer. */
- VEC(cp_token,gc) *buffer;
+ vec<cp_token, va_gc> *buffer;
/* A pointer just past the last available token. The tokens
in this lexer are [buffer, last_token). */
@@ -100,7 +95,7 @@ typedef struct GTY (()) cp_lexer {
called. The top entry is the most recent position at which we
began saving tokens. If the stack is non-empty, we are saving
tokens. */
- VEC(cp_token_position,heap) *GTY ((skip)) saved_tokens;
+ vec<cp_token_position> GTY ((skip)) saved_tokens;
/* The next lexer in a linked list of lexers. */
struct cp_lexer *next;
@@ -113,8 +108,6 @@ typedef struct GTY (()) cp_lexer {
bool in_pragma;
} cp_lexer;
-DEF_VEC_O (cp_lexer);
-DEF_VEC_ALLOC_O (cp_lexer,heap);
/* cp_token_cache is a range of tokens. There is no need to represent
allocate heap memory for it, since tokens are never removed from the
@@ -131,8 +124,6 @@ typedef struct GTY(()) cp_token_cache {
} cp_token_cache;
typedef cp_token_cache *cp_token_cache_ptr;
-DEF_VEC_P (cp_token_cache_ptr);
-DEF_VEC_ALLOC_P (cp_token_cache_ptr,gc);
struct cp_token_ident_d
{
@@ -156,27 +147,23 @@ typedef struct GTY(()) cp_default_arg_entry_d {
tree decl;
} cp_default_arg_entry;
-DEF_VEC_O(cp_default_arg_entry);
-DEF_VEC_ALLOC_O(cp_default_arg_entry,gc);
/* An entry in a stack for member functions of local classes. */
typedef struct GTY(()) cp_unparsed_functions_entry_d {
/* Functions with default arguments that require post-processing.
Functions appear in this list in declaration order. */
- VEC(cp_default_arg_entry,gc) *funs_with_default_args;
+ vec<cp_default_arg_entry, va_gc> *funs_with_default_args;
/* Functions with defintions that require post-processing. Functions
appear in this list in declaration order. */
- VEC(tree,gc) *funs_with_definitions;
+ vec<tree, va_gc> *funs_with_definitions;
/* Non-static data members with initializers that require post-processing.
FIELD_DECLs appear in this list in declaration order. */
- VEC(tree,gc) *nsdmis;
+ vec<tree, va_gc> *nsdmis;
} cp_unparsed_functions_entry;
-DEF_VEC_O(cp_unparsed_functions_entry);
-DEF_VEC_ALLOC_O(cp_unparsed_functions_entry,gc);
/* The status of a tentative parse. */
@@ -344,7 +331,7 @@ typedef struct GTY(()) cp_parser {
/* A stack used for member functions of local classes. The lists
contained in an individual entry can only be processed once the
outermost class being defined is complete. */
- VEC(cp_unparsed_functions_entry,gc) *unparsed_queues;
+ vec<cp_unparsed_functions_entry, va_gc> *unparsed_queues;
/* The number of classes whose definitions are currently in
progress. */
@@ -356,7 +343,7 @@ typedef struct GTY(()) cp_parser {
} cp_parser;
/* In parser.c */
-extern void cp_lexer_debug_tokens (VEC(cp_token,gc) *);
+extern void cp_lexer_debug_tokens (vec<cp_token, va_gc> *);
extern void cp_debug_parser (FILE *, cp_parser *);
#endif /* GCC_CP_PARSER_H */
diff --git a/gcc/cp/pt.c b/gcc/cp/pt.c
index 101b22d9bcf..ecb013ecaee 100644
--- a/gcc/cp/pt.c
+++ b/gcc/cp/pt.c
@@ -43,7 +43,6 @@ along with GCC; see the file COPYING3. If not see
#include "toplev.h"
#include "timevar.h"
#include "tree-iterator.h"
-#include "vecprim.h"
/* The type of functions taking a tree, and some additional data, and
returning an int. */
@@ -64,7 +63,7 @@ int processing_template_parmlist;
static int template_header_count;
static GTY(()) tree saved_trees;
-static VEC(int,heap) *inline_parm_levels;
+static vec<int> inline_parm_levels;
static GTY(()) struct tinst_level *current_tinst_level;
@@ -100,7 +99,7 @@ static GTY ((param_is (spec_entry)))
the TEMPLATE_TYPE_IDX of the template parameter. Each element is a
TREE_LIST, whose TREE_VALUEs contain the canonical template
parameters of various types and levels. */
-static GTY(()) VEC(tree,gc) *canonical_template_parms;
+static GTY(()) vec<tree, va_gc> *canonical_template_parms;
#define UNIFY_ALLOW_NONE 0
#define UNIFY_ALLOW_MORE_CV_QUAL 1
@@ -473,7 +472,7 @@ maybe_begin_member_template_processing (tree decl)
/* Remember how many levels of template parameters we pushed so that
we can pop them later. */
- VEC_safe_push (int, heap, inline_parm_levels, levels);
+ inline_parm_levels.safe_push (levels);
}
/* Undo the effects of maybe_begin_member_template_processing. */
@@ -484,10 +483,10 @@ maybe_end_member_template_processing (void)
int i;
int last;
- if (VEC_length (int, inline_parm_levels) == 0)
+ if (inline_parm_levels.length () == 0)
return;
- last = VEC_pop (int, inline_parm_levels);
+ last = inline_parm_levels.pop ();
for (i = 0; i < last; ++i)
{
--processing_template_decl;
@@ -1018,7 +1017,7 @@ retrieve_specialization (tree tmpl, tree args, hashval_t hash)
{
tree class_template;
tree class_specialization;
- VEC(tree,gc) *methods;
+ vec<tree, va_gc> *methods;
tree fns;
int idx;
@@ -1038,7 +1037,7 @@ retrieve_specialization (tree tmpl, tree args, hashval_t hash)
/* Iterate through the methods with the indicated name, looking
for the one that has an instance of TMPL. */
methods = CLASSTYPE_METHOD_VEC (class_specialization);
- for (fns = VEC_index (tree, methods, idx); fns; fns = OVL_NEXT (fns))
+ for (fns = (*methods)[idx]; fns; fns = OVL_NEXT (fns))
{
tree fn = OVL_CURRENT (fns);
if (DECL_TEMPLATE_INFO (fn) && DECL_TI_TEMPLATE (fn) == tmpl
@@ -2548,11 +2547,11 @@ check_explicit_specialization (tree declarator,
{
idx = lookup_fnfields_1 (ctype, name);
if (idx >= 0)
- fns = VEC_index (tree, CLASSTYPE_METHOD_VEC (ctype), idx);
+ fns = (*CLASSTYPE_METHOD_VEC (ctype))[idx];
}
else
{
- VEC(tree,gc) *methods;
+ vec<tree, va_gc> *methods;
tree ovl;
/* For a type-conversion operator, we cannot do a
@@ -2565,7 +2564,7 @@ check_explicit_specialization (tree declarator,
methods = CLASSTYPE_METHOD_VEC (ctype);
if (methods)
for (idx = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, methods, idx, ovl);
+ methods->iterate (idx, &ovl);
++idx)
{
if (!DECL_CONV_FN_P (OVL_CURRENT (ovl)))
@@ -3513,12 +3512,12 @@ canonical_type_parameter (tree type)
tree list;
int idx = TEMPLATE_TYPE_IDX (type);
if (!canonical_template_parms)
- canonical_template_parms = VEC_alloc (tree, gc, idx+1);
+ vec_alloc (canonical_template_parms, idx+1);
- while (VEC_length (tree, canonical_template_parms) <= (unsigned)idx)
- VEC_safe_push (tree, gc, canonical_template_parms, NULL_TREE);
+ while (canonical_template_parms->length () <= (unsigned)idx)
+ vec_safe_push (canonical_template_parms, NULL_TREE);
- list = VEC_index (tree, canonical_template_parms, idx);
+ list = (*canonical_template_parms)[idx];
while (list && !comptypes (type, TREE_VALUE (list), COMPARE_STRUCTURAL))
list = TREE_CHAIN (list);
@@ -3526,9 +3525,9 @@ canonical_type_parameter (tree type)
return TREE_VALUE (list);
else
{
- VEC_replace(tree, canonical_template_parms, idx,
- tree_cons (NULL_TREE, type,
- VEC_index (tree, canonical_template_parms, idx)));
+ (*canonical_template_parms)[idx]
+ = tree_cons (NULL_TREE, type,
+ (*canonical_template_parms)[idx]);
return type;
}
}
@@ -8457,7 +8456,7 @@ static void
perform_typedefs_access_check (tree tmpl, tree targs)
{
location_t saved_location;
- int i;
+ unsigned i;
qualified_typedef_usage_t *iter;
if (!tmpl
@@ -8466,9 +8465,7 @@ perform_typedefs_access_check (tree tmpl, tree targs)
return;
saved_location = input_location;
- FOR_EACH_VEC_ELT (qualified_typedef_usage_t,
- get_types_needing_access_check (tmpl),
- i, iter)
+ FOR_EACH_VEC_SAFE_ELT (get_types_needing_access_check (tmpl), i, iter)
{
tree type_decl = iter->typedef_decl;
tree type_scope = iter->context;
@@ -10729,8 +10726,7 @@ tsubst_arg_types (tree arg_types,
argument in a call of this function. */
remaining_arg_types =
tree_cons (default_arg, type, remaining_arg_types);
- VEC_safe_push (tree, gc, DEFARG_INSTANTIATIONS (default_arg),
- remaining_arg_types);
+ vec_safe_push (DEFARG_INSTANTIATIONS(default_arg), remaining_arg_types);
}
else
remaining_arg_types =
@@ -13626,8 +13622,8 @@ tsubst_copy_and_build (tree t,
{
tree placement = RECUR (TREE_OPERAND (t, 0));
tree init = RECUR (TREE_OPERAND (t, 3));
- VEC(tree,gc) *placement_vec;
- VEC(tree,gc) *init_vec;
+ vec<tree, va_gc> *placement_vec;
+ vec<tree, va_gc> *init_vec;
tree ret;
if (placement == NULL_TREE)
@@ -13636,7 +13632,7 @@ tsubst_copy_and_build (tree t,
{
placement_vec = make_tree_vector ();
for (; placement != NULL_TREE; placement = TREE_CHAIN (placement))
- VEC_safe_push (tree, gc, placement_vec, TREE_VALUE (placement));
+ vec_safe_push (placement_vec, TREE_VALUE (placement));
}
/* If there was an initializer in the original tree, but it
@@ -13655,7 +13651,7 @@ tsubst_copy_and_build (tree t,
else
{
for (; init != NULL_TREE; init = TREE_CHAIN (init))
- VEC_safe_push (tree, gc, init_vec, TREE_VALUE (init));
+ vec_safe_push (init_vec, TREE_VALUE (init));
}
}
@@ -13691,7 +13687,7 @@ tsubst_copy_and_build (tree t,
case CALL_EXPR:
{
tree function;
- VEC(tree,gc) *call_args;
+ vec<tree, va_gc> *call_args;
unsigned int nargs, i;
bool qualified_p;
bool koenig_p;
@@ -13751,8 +13747,7 @@ tsubst_copy_and_build (tree t,
tree arg = CALL_EXPR_ARG (t, i);
if (!PACK_EXPANSION_P (arg))
- VEC_safe_push (tree, gc, call_args,
- RECUR (CALL_EXPR_ARG (t, i)));
+ vec_safe_push (call_args, RECUR (CALL_EXPR_ARG (t, i)));
else
{
/* Expand the pack expansion and push each entry onto
@@ -13768,13 +13763,13 @@ tsubst_copy_and_build (tree t,
tree value = TREE_VEC_ELT (arg, j);
if (value != NULL_TREE)
value = convert_from_reference (value);
- VEC_safe_push (tree, gc, call_args, value);
+ vec_safe_push (call_args, value);
}
}
else
{
/* A partial substitution. Add one entry. */
- VEC_safe_push (tree, gc, call_args, arg);
+ vec_safe_push (call_args, arg);
}
}
}
@@ -14123,7 +14118,7 @@ tsubst_copy_and_build (tree t,
case CONSTRUCTOR:
{
- VEC(constructor_elt,gc) *n;
+ vec<constructor_elt, va_gc> *n;
constructor_elt *ce;
unsigned HOST_WIDE_INT idx;
tree type = tsubst (TREE_TYPE (t), args, complain, in_decl);
@@ -14144,9 +14139,9 @@ tsubst_copy_and_build (tree t,
looked up by digest_init. */
process_index_p = !(type && MAYBE_CLASS_TYPE_P (type));
- n = VEC_copy (constructor_elt, gc, CONSTRUCTOR_ELTS (t));
- newlen = VEC_length (constructor_elt, n);
- FOR_EACH_VEC_ELT (constructor_elt, n, idx, ce)
+ n = vec_safe_copy (CONSTRUCTOR_ELTS (t));
+ newlen = vec_safe_length (n);
+ FOR_EACH_VEC_SAFE_ELT (n, idx, ce)
{
if (ce->index && process_index_p)
ce->index = RECUR (ce->index);
@@ -14178,10 +14173,10 @@ tsubst_copy_and_build (tree t,
if (need_copy_p)
{
- VEC(constructor_elt,gc) *old_n = n;
+ vec<constructor_elt, va_gc> *old_n = n;
- n = VEC_alloc (constructor_elt, gc, newlen);
- FOR_EACH_VEC_ELT (constructor_elt, old_n, idx, ce)
+ vec_alloc (n, newlen);
+ FOR_EACH_VEC_ELT (*old_n, idx, ce)
{
if (TREE_CODE (ce->value) == TREE_VEC)
{
@@ -19791,12 +19786,12 @@ type_dependent_expression_p_push (tree expr)
/* Returns TRUE if ARGS contains a type-dependent expression. */
bool
-any_type_dependent_arguments_p (const VEC(tree,gc) *args)
+any_type_dependent_arguments_p (const vec<tree, va_gc> *args)
{
unsigned int i;
tree arg;
- FOR_EACH_VEC_ELT (tree, args, i, arg)
+ FOR_EACH_VEC_SAFE_ELT (args, i, arg)
{
if (type_dependent_expression_p (arg))
return true;
@@ -20249,16 +20244,16 @@ build_non_dependent_expr (tree expr)
This modifies ARGS in place. */
void
-make_args_non_dependent (VEC(tree,gc) *args)
+make_args_non_dependent (vec<tree, va_gc> *args)
{
unsigned int ix;
tree arg;
- FOR_EACH_VEC_ELT (tree, args, ix, arg)
+ FOR_EACH_VEC_SAFE_ELT (args, ix, arg)
{
tree newarg = build_non_dependent_expr (arg);
if (newarg != arg)
- VEC_replace (tree, args, ix, newarg);
+ (*args)[ix] = newarg;
}
}
@@ -20459,11 +20454,11 @@ type_uses_auto (tree type)
Those typedefs were added to T by the function
append_type_to_template_for_access_check. */
-VEC(qualified_typedef_usage_t,gc)*
+vec<qualified_typedef_usage_t, va_gc> *
get_types_needing_access_check (tree t)
{
tree ti;
- VEC(qualified_typedef_usage_t,gc) *result = NULL;
+ vec<qualified_typedef_usage_t, va_gc> *result = NULL;
if (!t || t == error_mark_node)
return NULL;
@@ -20521,9 +20516,7 @@ append_type_to_template_for_access_check_1 (tree t,
typedef_usage.context = scope;
typedef_usage.locus = location;
- VEC_safe_push (qualified_typedef_usage_t, gc,
- TI_TYPEDEFS_NEEDING_ACCESS_CHECKING (ti),
- typedef_usage);
+ vec_safe_push (TI_TYPEDEFS_NEEDING_ACCESS_CHECKING (ti), typedef_usage);
}
/* Append TYPE_DECL to the template TEMPL.
@@ -20562,14 +20555,12 @@ append_type_to_template_for_access_check (tree templ,
location_t location)
{
qualified_typedef_usage_t *iter;
- int i;
+ unsigned i;
gcc_assert (type_decl && (TREE_CODE (type_decl) == TYPE_DECL));
/* Make sure we don't append the type to the template twice. */
- FOR_EACH_VEC_ELT (qualified_typedef_usage_t,
- get_types_needing_access_check (templ),
- i, iter)
+ FOR_EACH_VEC_SAFE_ELT (get_types_needing_access_check (templ), i, iter)
if (iter->typedef_decl == type_decl && scope == iter->context)
return;
diff --git a/gcc/cp/repo.c b/gcc/cp/repo.c
index ca971b61b59..06e26937fbd 100644
--- a/gcc/cp/repo.c
+++ b/gcc/cp/repo.c
@@ -43,7 +43,7 @@ static FILE *open_repo_file (const char *);
static char *afgets (FILE *);
static FILE *reopen_repo_file_for_write (void);
-static GTY(()) VEC(tree,gc) *pending_repo;
+static GTY(()) vec<tree, va_gc> *pending_repo;
static char *repo_name;
static const char *old_args, *old_dir, *old_main;
@@ -268,7 +268,7 @@ finish_repo (void)
fprintf (repo_file, "\n");
}
- FOR_EACH_VEC_ELT_REVERSE (tree, pending_repo, ix, val)
+ FOR_EACH_VEC_SAFE_ELT_REVERSE (pending_repo, ix, val)
{
tree name = DECL_ASSEMBLER_NAME (val);
char type = IDENTIFIER_REPO_CHOSEN (name) ? 'C' : 'O';
@@ -353,7 +353,7 @@ repo_emit_p (tree decl)
if (!DECL_REPO_AVAILABLE_P (decl))
{
DECL_REPO_AVAILABLE_P (decl) = 1;
- VEC_safe_push (tree, gc, pending_repo, decl);
+ vec_safe_push (pending_repo, decl);
}
return IDENTIFIER_REPO_CHOSEN (DECL_ASSEMBLER_NAME (decl)) ? 1 : ret;
diff --git a/gcc/cp/rtti.c b/gcc/cp/rtti.c
index 95518cffe1d..b13ec171bbb 100644
--- a/gcc/cp/rtti.c
+++ b/gcc/cp/rtti.c
@@ -72,8 +72,6 @@ typedef struct GTY (()) tinfo_s {
the type_info derived type. */
} tinfo_s;
-DEF_VEC_O(tinfo_s);
-DEF_VEC_ALLOC_O(tinfo_s,gc);
typedef enum tinfo_kind
{
@@ -92,12 +90,12 @@ typedef enum tinfo_kind
} tinfo_kind;
/* A vector of all tinfo decls that haven't yet been emitted. */
-VEC(tree,gc) *unemitted_tinfo_decls;
+vec<tree, va_gc> *unemitted_tinfo_decls;
/* A vector of all type_info derived types we need. The first few are
fixed and created early. The remainder are for multiple inheritance
and are generated as needed. */
-static GTY (()) VEC(tinfo_s,gc) *tinfo_descs;
+static GTY (()) vec<tinfo_s, va_gc> *tinfo_descs;
static tree ifnonnull (tree, tree, tsubst_flags_t);
static tree tinfo_name (tree, bool);
@@ -155,7 +153,7 @@ init_rtti_processing (void)
= cp_build_qualified_type (type_info_type, TYPE_QUAL_CONST);
type_info_ptr_type = build_pointer_type (const_type_info_type_node);
- unemitted_tinfo_decls = VEC_alloc (tree, gc, 124);
+ vec_alloc (unemitted_tinfo_decls, 124);
create_tinfo_types ();
}
@@ -294,8 +292,7 @@ typeid_ok_p (void)
return false;
}
- pseudo_type_info
- = VEC_index (tinfo_s, tinfo_descs, TK_TYPE_INFO_TYPE).type;
+ pseudo_type_info = (*tinfo_descs)[TK_TYPE_INFO_TYPE].type;
type_info_type = TYPE_MAIN_VARIANT (const_type_info_type_node);
/* Make sure abi::__type_info_pseudo has the same alias set
@@ -422,7 +419,7 @@ get_tinfo_decl (tree type)
if (!d)
{
int ix = get_pseudo_ti_index (type);
- tinfo_s *ti = &VEC_index (tinfo_s, tinfo_descs, ix);
+ tinfo_s *ti = &(*tinfo_descs)[ix];
d = build_lang_decl (VAR_DECL, name, ti->type);
SET_DECL_ASSEMBLER_NAME (d, name);
@@ -444,7 +441,7 @@ get_tinfo_decl (tree type)
CLASSTYPE_TYPEINFO_VAR (TYPE_MAIN_VARIANT (type)) = d;
/* Add decl to the global array of tinfo decls. */
- VEC_safe_push (tree, gc, unemitted_tinfo_decls, d);
+ vec_safe_push (unemitted_tinfo_decls, d);
}
return d;
@@ -873,7 +870,7 @@ tinfo_base_init (tinfo_s *ti, tree target)
tree init;
tree name_decl;
tree vtable_ptr;
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
{
tree name_name, name_string;
@@ -935,7 +932,7 @@ tinfo_base_init (tinfo_s *ti, tree target)
ti->vtable = vtable_ptr;
}
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, vtable_ptr);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE,
decay_conversion (name_decl, tf_warning_or_error));
@@ -973,7 +970,8 @@ ptr_initializer (tinfo_s *ti, tree target)
tree to = TREE_TYPE (target);
int flags = qualifier_flags (to);
bool incomplete = target_incomplete_p (to);
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 3);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 3);
if (incomplete)
flags |= 8;
@@ -1001,7 +999,8 @@ ptm_initializer (tinfo_s *ti, tree target)
tree klass = TYPE_PTRMEM_CLASS_TYPE (target);
int flags = qualifier_flags (to);
bool incomplete = target_incomplete_p (to);
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 4);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 4);
if (incomplete)
flags |= 0x8;
@@ -1029,7 +1028,8 @@ class_initializer (tinfo_s *ti, tree target, unsigned n, ...)
tree init = tinfo_base_init (ti, target);
va_list extra_inits;
unsigned i;
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, n+1);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, n+1);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
va_start (extra_inits, n);
@@ -1079,7 +1079,7 @@ typeinfo_in_lib_p (tree type)
static tree
get_pseudo_ti_init (tree type, unsigned tk_index)
{
- tinfo_s *ti = &VEC_index (tinfo_s, tinfo_descs, tk_index);
+ tinfo_s *ti = &(*tinfo_descs)[tk_index];
gcc_assert (at_eof);
switch (tk_index)
@@ -1105,7 +1105,7 @@ get_pseudo_ti_init (tree type, unsigned tk_index)
tree tinfo = get_tinfo_ptr (BINFO_TYPE (base_binfo));
/* get_tinfo_ptr might have reallocated the tinfo_descs vector. */
- ti = &VEC_index (tinfo_s, tinfo_descs, tk_index);
+ ti = &(*tinfo_descs)[tk_index];
return class_initializer (ti, type, 1, tinfo);
}
@@ -1115,16 +1115,16 @@ get_pseudo_ti_init (tree type, unsigned tk_index)
| (CLASSTYPE_DIAMOND_SHAPED_P (type) << 1));
tree binfo = TYPE_BINFO (type);
int nbases = BINFO_N_BASE_BINFOS (binfo);
- VEC(tree,gc) *base_accesses = BINFO_BASE_ACCESSES (binfo);
+ vec<tree, va_gc> *base_accesses = BINFO_BASE_ACCESSES (binfo);
tree offset_type = integer_types[itk_long];
tree base_inits = NULL_TREE;
int ix;
- VEC(constructor_elt,gc) *init_vec = NULL;
+ vec<constructor_elt, va_gc> *init_vec = NULL;
constructor_elt *e;
gcc_assert (tk_index >= TK_FIXED);
- VEC_safe_grow (constructor_elt, gc, init_vec, nbases);
+ vec_safe_grow (init_vec, nbases);
/* Generate the base information initializer. */
for (ix = nbases; ix--;)
{
@@ -1133,9 +1133,9 @@ get_pseudo_ti_init (tree type, unsigned tk_index)
int flags = 0;
tree tinfo;
tree offset;
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
- if (VEC_index (tree, base_accesses, ix) == access_public_node)
+ if ((*base_accesses)[ix] == access_public_node)
flags |= 2;
tinfo = get_tinfo_ptr (BINFO_TYPE (base_binfo));
if (BINFO_VIRTUAL_P (base_binfo))
@@ -1156,18 +1156,18 @@ get_pseudo_ti_init (tree type, unsigned tk_index)
offset = fold_build2_loc (input_location,
BIT_IOR_EXPR, offset_type, offset,
build_int_cst (offset_type, flags));
- v = VEC_alloc (constructor_elt, gc, 2);
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, tinfo);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, offset);
base_init = build_constructor (init_list_type_node, v);
- e = &VEC_index (constructor_elt, init_vec, ix);
+ e = &(*init_vec)[ix];
e->index = NULL_TREE;
e->value = base_init;
}
base_inits = build_constructor (init_list_type_node, init_vec);
/* get_tinfo_ptr might have reallocated the tinfo_descs vector. */
- ti = &VEC_index (tinfo_s, tinfo_descs, tk_index);
+ ti = &(*tinfo_descs)[tk_index];
return class_initializer (ti, type, 3,
build_int_cst (NULL_TREE, hint),
build_int_cst (NULL_TREE, nbases),
@@ -1213,8 +1213,7 @@ create_pseudo_type_info (int tk, const char *real_name, ...)
/* First field is the pseudo type_info base class. */
fields = build_decl (input_location,
FIELD_DECL, NULL_TREE,
- VEC_index (tinfo_s, tinfo_descs,
- TK_TYPE_INFO_TYPE).type);
+ (*tinfo_descs)[TK_TYPE_INFO_TYPE].type);
/* Now add the derived fields. */
while ((field_decl = va_arg (ap, tree)))
@@ -1228,7 +1227,7 @@ create_pseudo_type_info (int tk, const char *real_name, ...)
finish_builtin_struct (pseudo_type, pseudo_name, fields, NULL_TREE);
CLASSTYPE_AS_BASE (pseudo_type) = pseudo_type;
- ti = &VEC_index (tinfo_s, tinfo_descs, tk);
+ ti = &(*tinfo_descs)[tk];
ti->type = cp_build_qualified_type (pseudo_type, TYPE_QUAL_CONST);
ti->name = get_identifier (real_name);
ti->vtable = NULL_TREE;
@@ -1293,12 +1292,12 @@ get_pseudo_ti_index (tree type)
else
{
tree binfo = TYPE_BINFO (type);
- VEC(tree,gc) *base_accesses = BINFO_BASE_ACCESSES (binfo);
+ vec<tree, va_gc> *base_accesses = BINFO_BASE_ACCESSES (binfo);
tree base_binfo = BINFO_BASE_BINFO (binfo, 0);
int num_bases = BINFO_N_BASE_BINFOS (binfo);
if (num_bases == 1
- && VEC_index (tree, base_accesses, 0) == access_public_node
+ && (*base_accesses)[0] == access_public_node
&& !BINFO_VIRTUAL_P (base_binfo)
&& integer_zerop (BINFO_OFFSET (base_binfo)))
{
@@ -1312,16 +1311,16 @@ get_pseudo_ti_index (tree type)
tree array_domain, base_array;
ix = TK_FIXED + num_bases;
- if (VEC_length (tinfo_s, tinfo_descs) <= ix)
+ if (vec_safe_length (tinfo_descs) <= ix)
{
/* too short, extend. */
- unsigned len = VEC_length (tinfo_s, tinfo_descs);
+ unsigned len = vec_safe_length (tinfo_descs);
- VEC_safe_grow (tinfo_s, gc, tinfo_descs, ix + 1);
- while (VEC_iterate (tinfo_s, tinfo_descs, len++, ti))
+ vec_safe_grow (tinfo_descs, ix + 1);
+ while (tinfo_descs->iterate (len++, &ti))
ti->type = ti->vtable = ti->name = NULL_TREE;
}
- else if (VEC_index (tinfo_s, tinfo_descs, ix).type)
+ else if ((*tinfo_descs)[ix].type)
/* already created. */
break;
@@ -1333,10 +1332,8 @@ get_pseudo_ti_index (tree type)
array_domain = build_index_type (size_int (num_bases - 1));
else
array_domain = build_index_type (size_int (num_bases));
- base_array =
- build_array_type (VEC_index (tinfo_s, tinfo_descs,
- TK_BASE_TYPE).type,
- array_domain);
+ base_array = build_array_type ((*tinfo_descs)[TK_BASE_TYPE].type,
+ array_domain);
push_abi_namespace ();
create_pseudo_type_info
@@ -1369,7 +1366,7 @@ create_tinfo_types (void)
gcc_assert (!tinfo_descs);
- VEC_safe_grow (tinfo_s, gc, tinfo_descs, TK_FIXED);
+ vec_safe_grow (tinfo_descs, TK_FIXED);
push_abi_namespace ();
@@ -1387,7 +1384,7 @@ create_tinfo_types (void)
DECL_CHAIN (field) = fields;
fields = field;
- ti = &VEC_index (tinfo_s, tinfo_descs, TK_TYPE_INFO_TYPE);
+ ti = &(*tinfo_descs)[TK_TYPE_INFO_TYPE];
ti->type = make_class_type (RECORD_TYPE);
ti->vtable = NULL_TREE;
ti->name = NULL_TREE;
@@ -1427,7 +1424,7 @@ create_tinfo_types (void)
DECL_CHAIN (field) = fields;
fields = field;
- ti = &VEC_index (tinfo_s, tinfo_descs, TK_BASE_TYPE);
+ ti = &(*tinfo_descs)[TK_BASE_TYPE];
ti->type = make_class_type (RECORD_TYPE);
ti->vtable = NULL_TREE;
diff --git a/gcc/cp/search.c b/gcc/cp/search.c
index 1614f49350e..92234a52be5 100644
--- a/gcc/cp/search.c
+++ b/gcc/cp/search.c
@@ -639,14 +639,14 @@ dfs_access_in_type (tree binfo, void *data)
{
int i;
tree base_binfo;
- VEC(tree,gc) *accesses;
+ vec<tree, va_gc> *accesses;
/* Otherwise, scan our baseclasses, and pick the most favorable
access. */
accesses = BINFO_BASE_ACCESSES (binfo);
for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
{
- tree base_access = VEC_index (tree, accesses, i);
+ tree base_access = (*accesses)[i];
access_kind base_access_now = BINFO_ACCESS (base_binfo);
if (base_access_now == ak_none || base_access_now == ak_private)
@@ -1316,10 +1316,10 @@ lookup_conversion_operator (tree class_type, tree type)
{
int i;
tree fn;
- VEC(tree,gc) *methods = CLASSTYPE_METHOD_VEC (class_type);
+ vec<tree, va_gc> *methods = CLASSTYPE_METHOD_VEC (class_type);
for (i = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, methods, i, fn); ++i)
+ vec_safe_iterate (methods, i, &fn); ++i)
{
/* All the conversion operators come near the beginning of
the class. Therefore, if FN is not a conversion
@@ -1348,7 +1348,7 @@ lookup_conversion_operator (tree class_type, tree type)
static int
lookup_fnfields_idx_nolazy (tree type, tree name)
{
- VEC(tree,gc) *method_vec;
+ vec<tree, va_gc> *method_vec;
tree fn;
tree tmp;
size_t i;
@@ -1380,7 +1380,7 @@ lookup_fnfields_idx_nolazy (tree type, tree name)
/* Skip the conversion operators. */
for (i = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, method_vec, i, fn);
+ vec_safe_iterate (method_vec, i, &fn);
++i)
if (!DECL_CONV_FN_P (OVL_CURRENT (fn)))
break;
@@ -1392,7 +1392,7 @@ lookup_fnfields_idx_nolazy (tree type, tree name)
int hi;
lo = i;
- hi = VEC_length (tree, method_vec);
+ hi = method_vec->length ();
while (lo < hi)
{
i = (lo + hi) / 2;
@@ -1400,7 +1400,7 @@ lookup_fnfields_idx_nolazy (tree type, tree name)
if (GATHER_STATISTICS)
n_outer_fields_searched++;
- tmp = VEC_index (tree, method_vec, i);
+ tmp = (*method_vec)[i];
tmp = DECL_NAME (OVL_CURRENT (tmp));
if (tmp > name)
hi = i;
@@ -1411,7 +1411,7 @@ lookup_fnfields_idx_nolazy (tree type, tree name)
}
}
else
- for (; VEC_iterate (tree, method_vec, i, fn); ++i)
+ for (; vec_safe_iterate (method_vec, i, &fn); ++i)
{
if (GATHER_STATISTICS)
n_outer_fields_searched++;
@@ -1471,7 +1471,7 @@ lookup_fnfields_slot (tree type, tree name)
int ix = lookup_fnfields_1 (complete_type (type), name);
if (ix < 0)
return NULL_TREE;
- return VEC_index (tree, CLASSTYPE_METHOD_VEC (type), ix);
+ return (*CLASSTYPE_METHOD_VEC (type))[ix];
}
/* As above, but avoid lazily declaring functions. */
@@ -1482,7 +1482,7 @@ lookup_fnfields_slot_nolazy (tree type, tree name)
int ix = lookup_fnfields_idx_nolazy (complete_type (type), name);
if (ix < 0)
return NULL_TREE;
- return VEC_index (tree, CLASSTYPE_METHOD_VEC (type), ix);
+ return (*CLASSTYPE_METHOD_VEC (type))[ix];
}
/* Like lookup_fnfields_1, except that the name is extracted from
@@ -1701,12 +1701,12 @@ dfs_walk_once (tree binfo, tree (*pre_fn) (tree, void *),
/* We are at the top of the hierarchy, and can use the
CLASSTYPE_VBASECLASSES list for unmarking the virtual
bases. */
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
unsigned ix;
tree base_binfo;
for (vbases = CLASSTYPE_VBASECLASSES (BINFO_TYPE (binfo)), ix = 0;
- VEC_iterate (tree, vbases, ix, base_binfo); ix++)
+ vec_safe_iterate (vbases, ix, &base_binfo); ix++)
BINFO_MARKED (base_binfo) = 0;
}
else
@@ -1809,12 +1809,12 @@ dfs_walk_once_accessible (tree binfo, bool friends_p,
/* We are at the top of the hierarchy, and can use the
CLASSTYPE_VBASECLASSES list for unmarking the virtual
bases. */
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
unsigned ix;
tree base_binfo;
for (vbases = CLASSTYPE_VBASECLASSES (BINFO_TYPE (binfo)), ix = 0;
- VEC_iterate (tree, vbases, ix, base_binfo); ix++)
+ vec_safe_iterate (vbases, ix, &base_binfo); ix++)
BINFO_MARKED (base_binfo) = 0;
}
else
@@ -2019,7 +2019,7 @@ look_for_overrides_here (tree type, tree fndecl)
ix = lookup_fnfields_1 (type, DECL_NAME (fndecl));
if (ix >= 0)
{
- tree fns = VEC_index (tree, CLASSTYPE_METHOD_VEC (type), ix);
+ tree fns = (*CLASSTYPE_METHOD_VEC (type))[ix];
for (; fns; fns = OVL_NEXT (fns))
{
@@ -2090,8 +2090,7 @@ dfs_get_pure_virtuals (tree binfo, void *data)
virtuals;
virtuals = TREE_CHAIN (virtuals))
if (DECL_PURE_VIRTUAL_P (BV_FN (virtuals)))
- VEC_safe_push (tree, gc, CLASSTYPE_PURE_VIRTUALS (type),
- BV_FN (virtuals));
+ vec_safe_push (CLASSTYPE_PURE_VIRTUALS (type), BV_FN (virtuals));
}
return NULL_TREE;
@@ -2364,7 +2363,7 @@ lookup_conversions_r (tree binfo,
tree child_tpl_convs = NULL_TREE;
unsigned i;
tree base_binfo;
- VEC(tree,gc) *method_vec = CLASSTYPE_METHOD_VEC (BINFO_TYPE (binfo));
+ vec<tree, va_gc> *method_vec = CLASSTYPE_METHOD_VEC (BINFO_TYPE (binfo));
tree conv;
/* If we have no conversion operators, then don't look. */
@@ -2380,7 +2379,7 @@ lookup_conversions_r (tree binfo,
/* First, locate the unhidden ones at this level. */
for (i = CLASSTYPE_FIRST_CONVERSION_SLOT;
- VEC_iterate (tree, method_vec, i, conv);
+ vec_safe_iterate (method_vec, i, &conv);
++i)
{
tree cur = OVL_CURRENT (conv);
@@ -2622,10 +2621,10 @@ binfo_for_vbase (tree base, tree t)
{
unsigned ix;
tree binfo;
- VEC(tree,gc) *vbases;
+ vec<tree, va_gc> *vbases;
for (vbases = CLASSTYPE_VBASECLASSES (t), ix = 0;
- VEC_iterate (tree, vbases, ix, binfo); ix++)
+ vec_safe_iterate (vbases, ix, &binfo); ix++)
if (SAME_BINFO_TYPE_P (BINFO_TYPE (binfo), base))
return binfo;
return NULL;
diff --git a/gcc/cp/semantics.c b/gcc/cp/semantics.c
index de6461c37c2..bbafa3aa359 100644
--- a/gcc/cp/semantics.c
+++ b/gcc/cp/semantics.c
@@ -92,7 +92,7 @@ static tree capture_decltype (tree);
2. When a declaration such as a type, or a variable, is encountered,
the function `perform_or_defer_access_check' is called. It
- maintains a VEC of all deferred checks.
+ maintains a vector of all deferred checks.
3. The global `current_class_type' or `current_function_decl' is then
setup by the parser. `enforce_access' relies on these information
@@ -100,14 +100,14 @@ static tree capture_decltype (tree);
4. Upon exiting the context mentioned in step 1,
`perform_deferred_access_checks' is called to check all declaration
- stored in the VEC. `pop_deferring_access_checks' is then
+ stored in the vector. `pop_deferring_access_checks' is then
called to restore the previous access checking mode.
In case of parsing error, we simply call `pop_deferring_access_checks'
without `perform_deferred_access_checks'. */
typedef struct GTY(()) deferred_access {
- /* A VEC representing name-lookups for which we have deferred
+ /* A vector representing name-lookups for which we have deferred
checking access controls. We cannot check the accessibility of
names used in a decl-specifier-seq until we know what is being
declared because code like:
@@ -120,17 +120,15 @@ typedef struct GTY(()) deferred_access {
A::B* A::f() { return 0; }
is valid, even though `A::B' is not generally accessible. */
- VEC (deferred_access_check,gc)* GTY(()) deferred_access_checks;
+ vec<deferred_access_check, va_gc> * GTY(()) deferred_access_checks;
/* The current mode of access checks. */
enum deferring_kind deferring_access_checks_kind;
} deferred_access;
-DEF_VEC_O (deferred_access);
-DEF_VEC_ALLOC_O (deferred_access,gc);
/* Data for deferred access checking. */
-static GTY(()) VEC(deferred_access,gc) *deferred_access_stack;
+static GTY(()) vec<deferred_access, va_gc> *deferred_access_stack;
static GTY(()) unsigned deferred_access_no_check;
/* Save the current deferred access states and start deferred
@@ -146,7 +144,7 @@ push_deferring_access_checks (deferring_kind deferring)
else
{
deferred_access e = {NULL, deferring};
- VEC_safe_push (deferred_access, gc, deferred_access_stack, e);
+ vec_safe_push (deferred_access_stack, e);
}
}
@@ -157,8 +155,7 @@ void
resume_deferring_access_checks (void)
{
if (!deferred_access_no_check)
- VEC_last (deferred_access, deferred_access_stack)
- .deferring_access_checks_kind = dk_deferred;
+ deferred_access_stack->last().deferring_access_checks_kind = dk_deferred;
}
/* Stop deferring access checks. */
@@ -167,8 +164,7 @@ void
stop_deferring_access_checks (void)
{
if (!deferred_access_no_check)
- VEC_last (deferred_access, deferred_access_stack)
- .deferring_access_checks_kind = dk_no_deferred;
+ deferred_access_stack->last().deferring_access_checks_kind = dk_no_deferred;
}
/* Discard the current deferred access checks and restore the
@@ -180,7 +176,7 @@ pop_deferring_access_checks (void)
if (deferred_access_no_check)
deferred_access_no_check--;
else
- VEC_pop (deferred_access, deferred_access_stack);
+ deferred_access_stack->pop ();
}
/* Returns a TREE_LIST representing the deferred checks.
@@ -188,14 +184,13 @@ pop_deferring_access_checks (void)
access occurred; the TREE_VALUE is the declaration named.
*/
-VEC (deferred_access_check,gc)*
+vec<deferred_access_check, va_gc> *
get_deferred_access_checks (void)
{
if (deferred_access_no_check)
return NULL;
else
- return (VEC_last (deferred_access, deferred_access_stack)
- .deferred_access_checks);
+ return (deferred_access_stack->last().deferred_access_checks);
}
/* Take current deferred checks and combine with the
@@ -209,14 +204,13 @@ pop_to_parent_deferring_access_checks (void)
deferred_access_no_check--;
else
{
- VEC (deferred_access_check,gc) *checks;
+ vec<deferred_access_check, va_gc> *checks;
deferred_access *ptr;
- checks = (VEC_last (deferred_access, deferred_access_stack)
- .deferred_access_checks);
+ checks = (deferred_access_stack->last ().deferred_access_checks);
- VEC_pop (deferred_access, deferred_access_stack);
- ptr = &VEC_last (deferred_access, deferred_access_stack);
+ deferred_access_stack->pop ();
+ ptr = &deferred_access_stack->last ();
if (ptr->deferring_access_checks_kind == dk_no_deferred)
{
/* Check access. */
@@ -228,10 +222,9 @@ pop_to_parent_deferring_access_checks (void)
int i, j;
deferred_access_check *chk, *probe;
- FOR_EACH_VEC_ELT (deferred_access_check, checks, i, chk)
+ FOR_EACH_VEC_SAFE_ELT (checks, i, chk)
{
- FOR_EACH_VEC_ELT (deferred_access_check,
- ptr->deferred_access_checks, j, probe)
+ FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, j, probe)
{
if (probe->binfo == chk->binfo &&
probe->decl == chk->decl &&
@@ -239,8 +232,7 @@ pop_to_parent_deferring_access_checks (void)
goto found;
}
/* Insert into parent's checks. */
- VEC_safe_push (deferred_access_check, gc,
- ptr->deferred_access_checks, *chk);
+ vec_safe_push (ptr->deferred_access_checks, *chk);
found:;
}
}
@@ -254,7 +246,7 @@ pop_to_parent_deferring_access_checks (void)
otherwise FALSE. */
bool
-perform_access_checks (VEC (deferred_access_check,gc)* checks,
+perform_access_checks (vec<deferred_access_check, va_gc> *checks,
tsubst_flags_t complain)
{
int i;
@@ -265,7 +257,7 @@ perform_access_checks (VEC (deferred_access_check,gc)* checks,
if (!checks)
return true;
- FOR_EACH_VEC_ELT (deferred_access_check, checks, i, chk)
+ FOR_EACH_VEC_SAFE_ELT (checks, i, chk)
{
input_location = chk->loc;
ok &= enforce_access (chk->binfo, chk->decl, chk->diag_decl, complain);
@@ -317,7 +309,7 @@ perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl,
gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
- ptr = &VEC_last (deferred_access, deferred_access_stack);
+ ptr = &deferred_access_stack->last ();
/* If we are not supposed to defer access checks, just check now. */
if (ptr->deferring_access_checks_kind == dk_no_deferred)
@@ -327,8 +319,7 @@ perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl,
}
/* See if we are already going to perform this check. */
- FOR_EACH_VEC_ELT (deferred_access_check,
- ptr->deferred_access_checks, i, chk)
+ FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, i, chk)
{
if (chk->decl == decl && chk->binfo == binfo &&
chk->diag_decl == diag_decl)
@@ -338,8 +329,7 @@ perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl,
}
/* If not, record the check. */
deferred_access_check new_access = {binfo, decl, diag_decl, input_location};
- VEC_safe_push (deferred_access_check, gc, ptr->deferred_access_checks,
- new_access);
+ vec_safe_push (ptr->deferred_access_checks, new_access);
return true;
}
@@ -375,7 +365,7 @@ add_stmt (tree t)
/* Add T to the statement-tree. Non-side-effect statements need to be
recorded during statement expressions. */
- gcc_checking_assert (!VEC_empty (tree, stmt_list_stack));
+ gcc_checking_assert (!stmt_list_stack->is_empty ());
append_to_statement_list_force (t, &cur_stmt_list);
return t;
@@ -1972,7 +1962,7 @@ empty_expr_stmt_p (tree expr_stmt)
Returns the functions to be considered by overload resolution. */
tree
-perform_koenig_lookup (tree fn, VEC(tree,gc) *args, bool include_std,
+perform_koenig_lookup (tree fn, vec<tree, va_gc> *args, bool include_std,
tsubst_flags_t complain)
{
tree identifier = NULL_TREE;
@@ -2038,12 +2028,12 @@ perform_koenig_lookup (tree fn, VEC(tree,gc) *args, bool include_std,
Returns code for the call. */
tree
-finish_call_expr (tree fn, VEC(tree,gc) **args, bool disallow_virtual,
+finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual,
bool koenig_p, tsubst_flags_t complain)
{
tree result;
tree orig_fn;
- VEC(tree,gc) *orig_args = NULL;
+ vec<tree, va_gc> *orig_args = NULL;
if (fn == error_mark_node)
return error_mark_node;
@@ -2172,7 +2162,7 @@ finish_call_expr (tree fn, VEC(tree,gc) **args, bool disallow_virtual,
if (!result)
{
if (warn_sizeof_pointer_memaccess
- && !VEC_empty(tree, *args)
+ && !vec_safe_is_empty (*args)
&& !processing_template_decl)
{
location_t sizeof_arg_loc[3];
@@ -2184,9 +2174,9 @@ finish_call_expr (tree fn, VEC(tree,gc) **args, bool disallow_virtual,
sizeof_arg_loc[i] = UNKNOWN_LOCATION;
sizeof_arg[i] = NULL_TREE;
- if (i >= VEC_length (tree, *args))
+ if (i >= (*args)->length ())
continue;
- t = VEC_index (tree, *args, i);
+ t = (**args)[i];
if (TREE_CODE (t) != SIZEOF_EXPR)
continue;
if (SIZEOF_EXPR_TYPE_P (t))
@@ -2206,7 +2196,7 @@ finish_call_expr (tree fn, VEC(tree,gc) **args, bool disallow_virtual,
}
else if (TREE_CODE (fn) == PSEUDO_DTOR_EXPR)
{
- if (!VEC_empty (tree, *args))
+ if (!vec_safe_is_empty (*args))
error ("arguments to destructor are not allowed");
/* Mark the pseudo-destructor call as having side-effects so
that we do not issue warnings about its use. */
@@ -3448,9 +3438,9 @@ finish_underlying_type (tree type)
tree
calculate_direct_bases (tree type)
{
- VEC(tree, gc) *vector = make_tree_vector();
+ vec<tree, va_gc> *vector = make_tree_vector();
tree bases_vec = NULL_TREE;
- VEC(tree, none) *base_binfos;
+ vec<tree, va_gc> *base_binfos;
tree binfo;
unsigned i;
@@ -3462,29 +3452,29 @@ calculate_direct_bases (tree type)
base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type));
/* Virtual bases are initialized first */
- for (i = 0; VEC_iterate (tree, base_binfos, i, binfo); i++)
+ for (i = 0; base_binfos->iterate (i, &binfo); i++)
{
if (BINFO_VIRTUAL_P (binfo))
{
- VEC_safe_push (tree, gc, vector, binfo);
+ vec_safe_push (vector, binfo);
}
}
/* Now non-virtuals */
- for (i = 0; VEC_iterate (tree, base_binfos, i, binfo); i++)
+ for (i = 0; base_binfos->iterate (i, &binfo); i++)
{
if (!BINFO_VIRTUAL_P (binfo))
{
- VEC_safe_push (tree, gc, vector, binfo);
+ vec_safe_push (vector, binfo);
}
}
- bases_vec = make_tree_vec (VEC_length (tree, vector));
+ bases_vec = make_tree_vec (vector->length ());
- for (i = 0; i < VEC_length (tree, vector); ++i)
+ for (i = 0; i < vector->length (); ++i)
{
- TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE (VEC_index (tree, vector, i));
+ TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]);
}
return bases_vec;
}
@@ -3505,19 +3495,19 @@ dfs_calculate_bases_pre (tree binfo, void * /*data_*/)
static tree
dfs_calculate_bases_post (tree binfo, void *data_)
{
- VEC(tree, gc) **data = (VEC(tree, gc) **) data_;
+ vec<tree, va_gc> **data = ((vec<tree, va_gc> **) data_);
if (!BINFO_VIRTUAL_P (binfo))
{
- VEC_safe_push (tree, gc, *data, BINFO_TYPE (binfo));
+ vec_safe_push (*data, BINFO_TYPE (binfo));
}
return NULL_TREE;
}
/* Calculates the morally non-virtual base classes of a class */
-static VEC(tree, gc) *
+static vec<tree, va_gc> *
calculate_bases_helper (tree type)
{
- VEC(tree, gc) *vector = make_tree_vector();
+ vec<tree, va_gc> *vector = make_tree_vector();
/* Now add non-virtual base classes in order of construction */
dfs_walk_all (TYPE_BINFO (type),
@@ -3528,11 +3518,11 @@ calculate_bases_helper (tree type)
tree
calculate_bases (tree type)
{
- VEC(tree, gc) *vector = make_tree_vector();
+ vec<tree, va_gc> *vector = make_tree_vector();
tree bases_vec = NULL_TREE;
unsigned i;
- VEC(tree, gc) *vbases;
- VEC(tree, gc) *nonvbases;
+ vec<tree, va_gc> *vbases;
+ vec<tree, va_gc> *nonvbases;
tree binfo;
complete_type (type);
@@ -3542,24 +3532,25 @@ calculate_bases (tree type)
/* First go through virtual base classes */
for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0;
- VEC_iterate (tree, vbases, i, binfo); i++)
+ vec_safe_iterate (vbases, i, &binfo); i++)
{
- VEC(tree, gc) *vbase_bases = calculate_bases_helper (BINFO_TYPE (binfo));
- VEC_safe_splice (tree, gc, vector, vbase_bases);
+ vec<tree, va_gc> *vbase_bases;
+ vbase_bases = calculate_bases_helper (BINFO_TYPE (binfo));
+ vec_safe_splice (vector, vbase_bases);
release_tree_vector (vbase_bases);
}
/* Now for the non-virtual bases */
nonvbases = calculate_bases_helper (type);
- VEC_safe_splice (tree, gc, vector, nonvbases);
+ vec_safe_splice (vector, nonvbases);
release_tree_vector (nonvbases);
/* Last element is entire class, so don't copy */
- bases_vec = make_tree_vec (VEC_length (tree, vector) - 1);
+ bases_vec = make_tree_vec (vector->length () - 1);
- for (i = 0; i < VEC_length (tree, vector) - 1; ++i)
+ for (i = 0; i < vector->length () - 1; ++i)
{
- TREE_VEC_ELT (bases_vec, i) = VEC_index (tree, vector, i);
+ TREE_VEC_ELT (bases_vec, i) = (*vector)[i];
}
release_tree_vector (vector);
return bases_vec;
@@ -5010,7 +5001,7 @@ void
finish_omp_barrier (void)
{
tree fn = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
- VEC(tree,gc) *vec = make_tree_vector ();
+ vec<tree, va_gc> *vec = make_tree_vector ();
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
release_tree_vector (vec);
finish_expr_stmt (stmt);
@@ -5020,7 +5011,7 @@ void
finish_omp_flush (void)
{
tree fn = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
- VEC(tree,gc) *vec = make_tree_vector ();
+ vec<tree, va_gc> *vec = make_tree_vector ();
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
release_tree_vector (vec);
finish_expr_stmt (stmt);
@@ -5030,7 +5021,7 @@ void
finish_omp_taskwait (void)
{
tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
- VEC(tree,gc) *vec = make_tree_vector ();
+ vec<tree, va_gc> *vec = make_tree_vector ();
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
release_tree_vector (vec);
finish_expr_stmt (stmt);
@@ -5040,7 +5031,7 @@ void
finish_omp_taskyield (void)
{
tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
- VEC(tree,gc) *vec = make_tree_vector ();
+ vec<tree, va_gc> *vec = make_tree_vector ();
tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
release_tree_vector (vec);
finish_expr_stmt (stmt);
@@ -5380,7 +5371,7 @@ classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p)
ix = lookup_fnfields_1 (type, ansi_assopname (NOP_EXPR));
if (ix < 0)
return false;
- fns = VEC_index (tree, CLASSTYPE_METHOD_VEC (type), ix);
+ fns = (*CLASSTYPE_METHOD_VEC (type))[ix];
}
else if (TYPE_HAS_COPY_CTOR (type))
{
@@ -5806,7 +5797,7 @@ is_valid_constexpr_fn (tree fun, bool complain)
to the existing initialization pair INITS. */
static bool
-build_data_member_initialization (tree t, VEC(constructor_elt,gc) **vec)
+build_data_member_initialization (tree t, vec<constructor_elt, va_gc> **vec)
{
tree member, init;
if (TREE_CODE (t) == CLEANUP_POINT_EXPR)
@@ -5933,13 +5924,13 @@ check_constexpr_ctor_body (tree last, tree list)
return ok;
}
-/* VEC is a vector of constructor elements built up for the base and member
+/* V is a vector of constructor elements built up for the base and member
initializers of a constructor for TYPE. They need to be in increasing
offset order, which they might not be yet if TYPE has a primary base
which is not first in the base-clause. */
-static VEC(constructor_elt,gc) *
-sort_constexpr_mem_initializers (tree type, VEC(constructor_elt,gc) *vec)
+static vec<constructor_elt, va_gc> *
+sort_constexpr_mem_initializers (tree type, vec<constructor_elt, va_gc> *v)
{
tree pri = CLASSTYPE_PRIMARY_BINFO (type);
constructor_elt elt;
@@ -5947,21 +5938,21 @@ sort_constexpr_mem_initializers (tree type, VEC(constructor_elt,gc) *vec)
if (pri == NULL_TREE
|| pri == BINFO_BASE_BINFO (TYPE_BINFO (type), 0))
- return vec;
+ return v;
/* Find the element for the primary base and move it to the beginning of
the vec. */
- VEC(constructor_elt,gc) &v = *vec;
+ vec<constructor_elt, va_gc> &vref = *v;
pri = BINFO_TYPE (pri);
for (i = 1; ; ++i)
- if (TREE_TYPE (v[i].index) == pri)
+ if (TREE_TYPE (vref[i].index) == pri)
break;
- elt = v[i];
+ elt = vref[i];
for (; i > 0; --i)
- v[i] = v[i-1];
- v[0] = elt;
- return vec;
+ vref[i] = vref[i-1];
+ vref[0] = elt;
+ return v;
}
/* Build compile-time evalable representations of member-initializer list
@@ -5970,7 +5961,7 @@ sort_constexpr_mem_initializers (tree type, VEC(constructor_elt,gc) *vec)
static tree
build_constexpr_constructor_member_initializers (tree type, tree body)
{
- VEC(constructor_elt,gc) *vec = NULL;
+ vec<constructor_elt, va_gc> *vec = NULL;
bool ok = true;
if (TREE_CODE (body) == MUST_NOT_THROW_EXPR
|| TREE_CODE (body) == EH_SPEC_BLOCK)
@@ -6015,14 +6006,14 @@ build_constexpr_constructor_member_initializers (tree type, tree body)
gcc_assert (errorcount > 0);
if (ok)
{
- if (VEC_length (constructor_elt, vec) > 0)
+ if (vec_safe_length (vec) > 0)
{
/* In a delegating constructor, return the target. */
- constructor_elt *ce = &VEC_index (constructor_elt, vec, 0);
+ constructor_elt *ce = &(*vec)[0];
if (ce->index == current_class_ptr)
{
body = ce->value;
- VEC_free (constructor_elt, gc, vec);
+ vec_free (vec);
return body;
}
}
@@ -6519,7 +6510,7 @@ cxx_bind_parameters_in_call (const constexpr_call *old_call, tree t,
These do not need to be marked for PCH or GC. */
/* FIXME remember and print actual constant arguments. */
-static VEC(tree,heap) *call_stack = NULL;
+static vec<tree> call_stack = vec<tree>();
static int call_stack_tick;
static int last_cx_error_tick;
@@ -6529,8 +6520,8 @@ push_cx_call_context (tree call)
++call_stack_tick;
if (!EXPR_HAS_LOCATION (call))
SET_EXPR_LOCATION (call, input_location);
- VEC_safe_push (tree, heap, call_stack, call);
- if (VEC_length (tree, call_stack) > (unsigned) max_constexpr_depth)
+ call_stack.safe_push (call);
+ if (call_stack.length () > (unsigned) max_constexpr_depth)
return false;
return true;
}
@@ -6539,15 +6530,15 @@ static void
pop_cx_call_context (void)
{
++call_stack_tick;
- VEC_pop (tree, call_stack);
+ call_stack.pop ();
}
-VEC(tree,heap) *
+vec<tree>
cx_error_context (void)
{
- VEC(tree,heap) *r = NULL;
+ vec<tree> r = vec<tree>();
if (call_stack_tick != last_cx_error_tick
- && !VEC_empty (tree, call_stack))
+ && !call_stack.is_empty ())
r = call_stack;
last_cx_error_tick = call_stack_tick;
return r;
@@ -6893,7 +6884,7 @@ cxx_eval_array_reference (const constexpr_call *call, tree t,
}
i = tree_low_cst (index, 0);
if (TREE_CODE (ary) == CONSTRUCTOR)
- return VEC_index (constructor_elt, CONSTRUCTOR_ELTS (ary), i).value;
+ return (*CONSTRUCTOR_ELTS (ary))[i].value;
else if (elem_nchars == 1)
return build_int_cst (cv_unqualified (TREE_TYPE (TREE_TYPE (ary))),
TREE_STRING_POINTER (ary)[i]);
@@ -7076,7 +7067,7 @@ cxx_eval_logical_expression (const constexpr_call *call, tree t,
initialization of the field. */
static constructor_elt *
-base_field_constructor_elt (VEC(constructor_elt,gc) *v, tree ref)
+base_field_constructor_elt (vec<constructor_elt, va_gc> *v, tree ref)
{
tree aggr = TREE_OPERAND (ref, 0);
tree field = TREE_OPERAND (ref, 1);
@@ -7092,7 +7083,7 @@ base_field_constructor_elt (VEC(constructor_elt,gc) *v, tree ref)
v = CONSTRUCTOR_ELTS (base_ce->value);
}
- for (i = 0; VEC_iterate (constructor_elt, v, i, ce); ++i)
+ for (i = 0; vec_safe_iterate (v, i, &ce); ++i)
if (ce->index == field)
return ce;
@@ -7109,14 +7100,14 @@ cxx_eval_bare_aggregate (const constexpr_call *call, tree t,
bool allow_non_constant, bool addr,
bool *non_constant_p)
{
- VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (t);
- VEC(constructor_elt,gc) *n = VEC_alloc (constructor_elt, gc,
- VEC_length (constructor_elt, v));
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t);
+ vec<constructor_elt, va_gc> *n;
+ vec_alloc (n, vec_safe_length (v));
constructor_elt *ce;
HOST_WIDE_INT i;
bool changed = false;
gcc_assert (!BRACE_ENCLOSED_INITIALIZER_P (t));
- for (i = 0; VEC_iterate (constructor_elt, v, i, ce); ++i)
+ for (i = 0; vec_safe_iterate (v, i, &ce); ++i)
{
tree elt = cxx_eval_constant_expression (call, ce->value,
allow_non_constant, addr,
@@ -7146,7 +7137,7 @@ cxx_eval_bare_aggregate (const constexpr_call *call, tree t,
if (*non_constant_p || !changed)
{
fail:
- VEC_free (constructor_elt, gc, n);
+ vec_free (n);
return t;
}
t = build_constructor (TREE_TYPE (t), n);
@@ -7173,7 +7164,8 @@ cxx_eval_vec_init_1 (const constexpr_call *call, tree atype, tree init,
{
tree elttype = TREE_TYPE (atype);
int max = tree_low_cst (array_type_nelts (atype), 0);
- VEC(constructor_elt,gc) *n = VEC_alloc (constructor_elt, gc, max + 1);
+ vec<constructor_elt, va_gc> *n;
+ vec_alloc (n, max + 1);
bool pre_init = false;
int i;
@@ -7193,7 +7185,7 @@ cxx_eval_vec_init_1 (const constexpr_call *call, tree atype, tree init,
}
else if (!init)
{
- VEC(tree,gc) *argvec = make_tree_vector ();
+ vec<tree, va_gc> *argvec = make_tree_vector ();
init = build_special_member_call (NULL_TREE, complete_ctor_identifier,
&argvec, elttype, LOOKUP_NORMAL,
tf_warning_or_error);
@@ -7234,7 +7226,7 @@ cxx_eval_vec_init_1 (const constexpr_call *call, tree atype, tree init,
else
{
/* Copying an element. */
- VEC(tree,gc) *argvec;
+ vec<tree, va_gc> *argvec;
gcc_assert (same_type_ignoring_top_level_qualifiers_p
(atype, TREE_TYPE (init)));
eltinit = cp_build_array_ref (input_location, init, idx,
@@ -7242,7 +7234,7 @@ cxx_eval_vec_init_1 (const constexpr_call *call, tree atype, tree init,
if (!real_lvalue_p (init))
eltinit = move (eltinit);
argvec = make_tree_vector ();
- VEC_quick_push (tree, argvec, eltinit);
+ argvec->quick_push (eltinit);
eltinit = (build_special_member_call
(NULL_TREE, complete_ctor_identifier, &argvec,
elttype, LOOKUP_NORMAL, tf_warning_or_error));
@@ -7263,7 +7255,7 @@ cxx_eval_vec_init_1 (const constexpr_call *call, tree atype, tree init,
}
fail:
- VEC_free (constructor_elt, gc, n);
+ vec_free (n);
return init;
}
@@ -8517,9 +8509,9 @@ potential_constant_expression_1 (tree t, bool want_rval, tsubst_flags_t flags)
case CONSTRUCTOR:
{
- VEC(constructor_elt, gc) *v = CONSTRUCTOR_ELTS (t);
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t);
constructor_elt *ce;
- for (i = 0; VEC_iterate (constructor_elt, v, i, ce); ++i)
+ for (i = 0; vec_safe_iterate (v, i, &ce); ++i)
if (!potential_constant_expression_1 (ce->value, want_rval, flags))
return false;
return true;
@@ -8747,7 +8739,7 @@ build_lambda_object (tree lambda_expr)
/* Build aggregate constructor call.
- cp_parser_braced_list
- cp_parser_functional_cast */
- VEC(constructor_elt,gc) *elts = NULL;
+ vec<constructor_elt, va_gc> *elts = NULL;
tree node, expr, type;
location_t saved_loc;
@@ -9067,8 +9059,7 @@ insert_capture_proxy (tree var)
/* And put a DECL_EXPR in the STATEMENT_LIST for the same block. */
var = build_stmt (DECL_SOURCE_LOCATION (var), DECL_EXPR, var);
- stmt_list = VEC_index (tree, stmt_list_stack,
- VEC_length (tree, stmt_list_stack) - 1 - skip);
+ stmt_list = (*stmt_list_stack)[stmt_list_stack->length () - 1 - skip];
gcc_assert (stmt_list);
append_to_statement_list_force (var, &stmt_list);
}
@@ -9081,7 +9072,7 @@ void
insert_pending_capture_proxies (void)
{
tree lam;
- VEC(tree,gc) *proxies;
+ vec<tree, va_gc> *proxies;
unsigned i;
if (!current_function_decl || !LAMBDA_FUNCTION_P (current_function_decl))
@@ -9089,9 +9080,9 @@ insert_pending_capture_proxies (void)
lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
proxies = LAMBDA_EXPR_PENDING_PROXIES (lam);
- for (i = 0; i < VEC_length (tree, proxies); ++i)
+ for (i = 0; i < vec_safe_length (proxies); ++i)
{
- tree var = VEC_index (tree, proxies, i);
+ tree var = (*proxies)[i];
insert_capture_proxy (var);
}
release_tree_vector (LAMBDA_EXPR_PENDING_PROXIES (lam));
@@ -9158,7 +9149,7 @@ build_capture_proxy (tree member)
if (fn == current_function_decl)
insert_capture_proxy (var);
else
- VEC_safe_push (tree, gc, LAMBDA_EXPR_PENDING_PROXIES (lam), var);
+ vec_safe_push (LAMBDA_EXPR_PENDING_PROXIES (lam), var);
return var;
}
@@ -9405,7 +9396,7 @@ maybe_add_lambda_conv_op (tree type)
tree callop = lambda_function (type);
tree rettype, name, fntype, fn, body, compound_stmt;
tree thistype, stattype, statfn, convfn, call, arg;
- VEC (tree, gc) *argvec;
+ vec<tree, va_gc> *argvec;
if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
return;
@@ -9495,14 +9486,13 @@ maybe_add_lambda_conv_op (tree type)
arg = build1 (NOP_EXPR, TREE_TYPE (DECL_ARGUMENTS (callop)),
null_pointer_node);
argvec = make_tree_vector ();
- VEC_quick_push (tree, argvec, arg);
+ argvec->quick_push (arg);
for (arg = DECL_ARGUMENTS (statfn); arg; arg = DECL_CHAIN (arg))
{
mark_exp_read (arg);
- VEC_safe_push (tree, gc, argvec, arg);
+ vec_safe_push (argvec, arg);
}
- call = build_call_a (callop, VEC_length (tree, argvec),
- VEC_address (tree, argvec));
+ call = build_call_a (callop, argvec->length (), argvec->address ());
CALL_FROM_THUNK_P (call) = 1;
if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call)))
call = build_cplus_new (TREE_TYPE (call), call, tf_warning_or_error);
diff --git a/gcc/cp/tree.c b/gcc/cp/tree.c
index bc81daccb8e..58725f3bd58 100644
--- a/gcc/cp/tree.c
+++ b/gcc/cp/tree.c
@@ -508,7 +508,7 @@ static tree
build_vec_init_elt (tree type, tree init, tsubst_flags_t complain)
{
tree inner_type = strip_array_types (type);
- VEC(tree,gc) *argvec;
+ vec<tree, va_gc> *argvec;
if (integer_zerop (array_type_nelts_total (type))
|| !CLASS_TYPE_P (inner_type))
@@ -527,7 +527,7 @@ build_vec_init_elt (tree type, tree init, tsubst_flags_t complain)
tree dummy = build_dummy_object (inner_type);
if (!real_lvalue_p (init))
dummy = move (dummy);
- VEC_quick_push (tree, argvec, dummy);
+ argvec->quick_push (dummy);
}
init = build_special_member_call (NULL_TREE, complete_ctor_identifier,
&argvec, inner_type, LOOKUP_NORMAL,
@@ -1286,13 +1286,13 @@ strip_typedefs_expr (tree t)
case TREE_LIST:
{
- VEC(tree,gc) *vec = make_tree_vector ();
+ vec<tree, va_gc> *vec = make_tree_vector ();
bool changed = false;
tree it;
for (it = t; it; it = TREE_CHAIN (it))
{
tree val = strip_typedefs_expr (TREE_VALUE (t));
- VEC_safe_push (tree, gc, vec, val);
+ vec_safe_push (vec, val);
if (val != TREE_VALUE (t))
changed = true;
gcc_assert (TREE_PURPOSE (it) == NULL_TREE);
@@ -1300,7 +1300,7 @@ strip_typedefs_expr (tree t)
if (changed)
{
r = NULL_TREE;
- FOR_EACH_VEC_ELT_REVERSE (tree, vec, i, it)
+ FOR_EACH_VEC_ELT_REVERSE (*vec, i, it)
r = tree_cons (NULL_TREE, it, r);
}
else
@@ -1312,13 +1312,13 @@ strip_typedefs_expr (tree t)
case TREE_VEC:
{
bool changed = false;
- VEC(tree,gc)* vec = make_tree_vector ();
+ vec<tree, va_gc> *vec = make_tree_vector ();
n = TREE_VEC_LENGTH (t);
- VEC_reserve (tree, gc, vec, n);
+ vec_safe_reserve (vec, n);
for (i = 0; i < n; ++i)
{
tree op = strip_typedefs_expr (TREE_VEC_ELT (t, i));
- VEC_quick_push (tree, vec, op);
+ vec->quick_push (op);
if (op != TREE_VEC_ELT (t, i))
changed = true;
}
@@ -1326,7 +1326,7 @@ strip_typedefs_expr (tree t)
{
r = copy_node (t);
for (i = 0; i < n; ++i)
- TREE_VEC_ELT (r, i) = VEC_index (tree, vec, i);
+ TREE_VEC_ELT (r, i) = (*vec)[i];
}
else
r = t;
@@ -1337,13 +1337,13 @@ strip_typedefs_expr (tree t)
case CONSTRUCTOR:
{
bool changed = false;
- VEC(constructor_elt,gc) *vec
- = VEC_copy (constructor_elt, gc, CONSTRUCTOR_ELTS (t));
+ vec<constructor_elt, va_gc> *vec
+ = vec_safe_copy (CONSTRUCTOR_ELTS (t));
n = CONSTRUCTOR_NELTS (t);
type = strip_typedefs (TREE_TYPE (t));
for (i = 0; i < n; ++i)
{
- constructor_elt *e = &VEC_index (constructor_elt, vec, i);
+ constructor_elt *e = &(*vec)[i];
tree op = strip_typedefs_expr (e->value);
if (op != e->value)
{
@@ -1355,7 +1355,7 @@ strip_typedefs_expr (tree t)
if (!changed && type == TREE_TYPE (t))
{
- VEC_free (constructor_elt, gc, vec);
+ vec_free (vec);
return t;
}
else
@@ -1484,7 +1484,7 @@ copy_binfo (tree binfo, tree type, tree t, tree *igo_prev, int virt)
{
/* Push it onto the list after any virtual bases it contains
will have been pushed. */
- VEC_quick_push (tree, CLASSTYPE_VBASECLASSES (t), new_binfo);
+ CLASSTYPE_VBASECLASSES (t)->quick_push (new_binfo);
BINFO_VIRTUAL_P (new_binfo) = 1;
BINFO_INHERITANCE_CHAIN (new_binfo) = TYPE_BINFO (t);
}
@@ -2184,9 +2184,9 @@ bot_replace (tree* t, int* /*walk_subtrees*/, void* data)
/* In an NSDMI build_base_path defers building conversions to virtual
bases, and we handle it here. */
tree basetype = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (*t)));
- VEC(tree,gc) *vbases = CLASSTYPE_VBASECLASSES (current_class_type);
+ vec<tree, va_gc> *vbases = CLASSTYPE_VBASECLASSES (current_class_type);
int i; tree binfo;
- FOR_EACH_VEC_ELT (tree, vbases, i, binfo)
+ FOR_EACH_VEC_SAFE_ELT (vbases, i, binfo)
if (BINFO_TYPE (binfo) == basetype)
break;
*t = build_base_path (PLUS_EXPR, TREE_OPERAND (*t, 0), binfo, true,
@@ -2327,7 +2327,7 @@ build_min_non_dep (enum tree_code code, tree non_dep, ...)
that has been built. */
tree
-build_min_non_dep_call_vec (tree non_dep, tree fn, VEC(tree,gc) *argvec)
+build_min_non_dep_call_vec (tree non_dep, tree fn, vec<tree, va_gc> *argvec)
{
tree t = build_nt_call_vec (fn, argvec);
if (REFERENCE_REF_P (non_dep))
@@ -3742,8 +3742,8 @@ stabilize_init (tree init, tree *initp)
unsigned i;
constructor_elt *ce;
bool good = true;
- VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (t);
- for (i = 0; VEC_iterate (constructor_elt, v, i, ce); ++i)
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (t);
+ for (i = 0; vec_safe_iterate (v, i, &ce); ++i)
{
tree type = TREE_TYPE (ce->value);
tree subinit;
@@ -3827,7 +3827,7 @@ cp_fix_function_decl_p (tree decl)
/* Don't fix same_body aliases. Although they don't have their own
CFG, they share it with what they alias to. */
if (!node || !node->alias
- || !VEC_length (ipa_ref_t, node->symbol.ref_list.references))
+ || !vec_safe_length (node->symbol.ref_list.references))
return true;
}
diff --git a/gcc/cp/typeck.c b/gcc/cp/typeck.c
index 5d8c27d534c..1cbab617167 100644
--- a/gcc/cp/typeck.c
+++ b/gcc/cp/typeck.c
@@ -59,7 +59,7 @@ static bool casts_away_constness (tree, tree, tsubst_flags_t);
static void maybe_warn_about_returning_address_of_local (tree);
static tree lookup_destructor (tree, tree, tree);
static void warn_args_num (location_t, tree, bool);
-static int convert_arguments (tree, VEC(tree,gc) **, tree, int,
+static int convert_arguments (tree, vec<tree, va_gc> **, tree, int,
tsubst_flags_t);
/* Do `exp = require_complete_type (exp);' to make sure exp
@@ -3270,10 +3270,10 @@ build_function_call (location_t /*loc*/,
/* Used by the C-common bits. */
tree
build_function_call_vec (location_t /*loc*/,
- tree function, VEC(tree,gc) *params,
- VEC(tree,gc) * /*origtypes*/)
+ tree function, vec<tree, va_gc> *params,
+ vec<tree, va_gc> * /*origtypes*/)
{
- VEC(tree,gc) *orig_params = params;
+ vec<tree, va_gc> *orig_params = params;
tree ret = cp_build_function_call_vec (function, &params,
tf_warning_or_error);
@@ -3290,12 +3290,12 @@ build_function_call_vec (location_t /*loc*/,
tree
cp_build_function_call (tree function, tree params, tsubst_flags_t complain)
{
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
tree ret;
vec = make_tree_vector ();
for (; params != NULL_TREE; params = TREE_CHAIN (params))
- VEC_safe_push (tree, gc, vec, TREE_VALUE (params));
+ vec_safe_push (vec, TREE_VALUE (params));
ret = cp_build_function_call_vec (function, &vec, complain);
release_tree_vector (vec);
return ret;
@@ -3306,14 +3306,14 @@ cp_build_function_call (tree function, tree params, tsubst_flags_t complain)
tree
cp_build_function_call_nary (tree function, tsubst_flags_t complain, ...)
{
- VEC(tree,gc) *vec;
+ vec<tree, va_gc> *vec;
va_list args;
tree ret, t;
vec = make_tree_vector ();
va_start (args, complain);
for (t = va_arg (args, tree); t != NULL_TREE; t = va_arg (args, tree))
- VEC_safe_push (tree, gc, vec, t);
+ vec_safe_push (vec, t);
va_end (args);
ret = cp_build_function_call_vec (function, &vec, complain);
release_tree_vector (vec);
@@ -3325,7 +3325,7 @@ cp_build_function_call_nary (tree function, tsubst_flags_t complain, ...)
PARAMS. */
tree
-cp_build_function_call_vec (tree function, VEC(tree,gc) **params,
+cp_build_function_call_vec (tree function, vec<tree, va_gc> **params,
tsubst_flags_t complain)
{
tree fntype, fndecl;
@@ -3334,14 +3334,13 @@ cp_build_function_call_vec (tree function, VEC(tree,gc) **params,
int nargs;
tree *argarray;
tree parm_types;
- VEC(tree,gc) *allocated = NULL;
+ vec<tree, va_gc> *allocated = NULL;
tree ret;
/* For Objective-C, convert any calls via a cast to OBJC_TYPE_REF
expressions, like those used for ObjC messenger dispatches. */
- if (params != NULL && !VEC_empty (tree, *params))
- function = objc_rewrite_function_call (function,
- VEC_index (tree, *params, 0));
+ if (params != NULL && !vec_safe_is_empty (*params))
+ function = objc_rewrite_function_call (function, (**params)[0]);
/* build_c_cast puts on a NOP_EXPR to make the result not an lvalue.
Strip such NOP_EXPRs, since FUNCTION is used in non-lvalue context. */
@@ -3421,7 +3420,7 @@ cp_build_function_call_vec (tree function, VEC(tree,gc) **params,
if (nargs < 0)
return error_mark_node;
- argarray = VEC_address (tree, *params);
+ argarray = (*params)->address ();
/* Check for errors in format strings and inappropriately
null parameters. */
@@ -3499,7 +3498,7 @@ warn_args_num (location_t loc, tree fndecl, bool too_many_p)
default arguments, if such were specified. Do so here. */
static int
-convert_arguments (tree typelist, VEC(tree,gc) **values, tree fndecl,
+convert_arguments (tree typelist, vec<tree, va_gc> **values, tree fndecl,
int flags, tsubst_flags_t complain)
{
tree typetail;
@@ -3509,11 +3508,11 @@ convert_arguments (tree typelist, VEC(tree,gc) **values, tree fndecl,
flags |= LOOKUP_ONLYCONVERTING;
for (i = 0, typetail = typelist;
- i < VEC_length (tree, *values);
+ i < vec_safe_length (*values);
i++)
{
tree type = typetail ? TREE_VALUE (typetail) : 0;
- tree val = VEC_index (tree, *values, i);
+ tree val = (**values)[i];
if (val == error_mark_node || type == error_mark_node)
return -1;
@@ -3575,7 +3574,7 @@ convert_arguments (tree typelist, VEC(tree,gc) **values, tree fndecl,
if (parmval == error_mark_node)
return -1;
- VEC_replace (tree, *values, i, parmval);
+ (**values)[i] = parmval;
}
else
{
@@ -3588,7 +3587,7 @@ convert_arguments (tree typelist, VEC(tree,gc) **values, tree fndecl,
else
val = convert_arg_to_ellipsis (val, complain);
- VEC_replace (tree, *values, i, val);
+ (**values)[i] = val;
}
if (typetail)
@@ -3617,7 +3616,7 @@ convert_arguments (tree typelist, VEC(tree,gc) **values, tree fndecl,
if (parmval == error_mark_node)
return -1;
- VEC_safe_push (tree, gc, *values, parmval);
+ vec_safe_push (*values, parmval);
typetail = TREE_CHAIN (typetail);
/* ends with `...'. */
if (typetail == NULL_TREE)
@@ -5880,13 +5879,13 @@ build_x_compound_expr_from_list (tree list, expr_list_kind exp,
/* Like build_x_compound_expr_from_list, but using a VEC. */
tree
-build_x_compound_expr_from_vec (VEC(tree,gc) *vec, const char *msg,
+build_x_compound_expr_from_vec (vec<tree, va_gc> *vec, const char *msg,
tsubst_flags_t complain)
{
- if (VEC_empty (tree, vec))
+ if (vec_safe_is_empty (vec))
return NULL_TREE;
- else if (VEC_length (tree, vec) == 1)
- return VEC_index (tree, vec, 0);
+ else if (vec->length () == 1)
+ return (*vec)[0];
else
{
tree expr;
@@ -5903,8 +5902,8 @@ build_x_compound_expr_from_vec (VEC(tree,gc) *vec, const char *msg,
return error_mark_node;
}
- expr = VEC_index (tree, vec, 0);
- for (ix = 1; VEC_iterate (tree, vec, ix, t); ++ix)
+ expr = (*vec)[0];
+ for (ix = 1; vec->iterate (ix, &t); ++ix)
expr = build_x_compound_expr (EXPR_LOCATION (t), expr,
t, complain);
@@ -7090,7 +7089,7 @@ cp_build_modify_expr (tree lhs, enum tree_code modifycode, tree rhs,
/* Do the default thing. */;
else
{
- VEC(tree,gc) *rhs_vec = make_tree_vector_single (rhs);
+ vec<tree, va_gc> *rhs_vec = make_tree_vector_single (rhs);
result = build_special_member_call (lhs, complete_ctor_identifier,
&rhs_vec, lhstype, LOOKUP_NORMAL,
complain);
@@ -7457,7 +7456,7 @@ build_ptrmemfunc1 (tree type, tree delta, tree pfn)
tree u = NULL_TREE;
tree delta_field;
tree pfn_field;
- VEC(constructor_elt, gc) *v;
+ vec<constructor_elt, va_gc> *v;
/* Pull the FIELD_DECLs out of the type. */
pfn_field = TYPE_FIELDS (type);
@@ -7470,7 +7469,7 @@ build_ptrmemfunc1 (tree type, tree delta, tree pfn)
pfn = fold_convert (TREE_TYPE (pfn_field), pfn);
/* Finish creating the initializer. */
- v = VEC_alloc(constructor_elt, gc, 2);
+ vec_alloc (v, 2);
CONSTRUCTOR_APPEND_ELT(v, pfn_field, pfn);
CONSTRUCTOR_APPEND_ELT(v, delta_field, delta);
u = build_constructor (type, v);
diff --git a/gcc/cp/typeck2.c b/gcc/cp/typeck2.c
index 3478886dea1..c7262f427cd 100644
--- a/gcc/cp/typeck2.c
+++ b/gcc/cp/typeck2.c
@@ -253,7 +253,7 @@ complete_type_check_abstract (tree type)
int
abstract_virtuals_error_sfinae (tree decl, tree type, tsubst_flags_t complain)
{
- VEC(tree,gc) *pure;
+ vec<tree, va_gc> *pure;
/* This function applies only to classes. Any other entity can never
be abstract. */
@@ -331,7 +331,7 @@ abstract_virtuals_error_sfinae (tree decl, tree type, tsubst_flags_t complain)
error ("cannot allocate an object of abstract type %qT", type);
/* Only go through this once. */
- if (VEC_length (tree, pure))
+ if (pure->length ())
{
unsigned ix;
tree fn;
@@ -340,7 +340,7 @@ abstract_virtuals_error_sfinae (tree decl, tree type, tsubst_flags_t complain)
" because the following virtual functions are pure within %qT:",
type);
- FOR_EACH_VEC_ELT (tree, pure, ix, fn)
+ FOR_EACH_VEC_ELT (*pure, ix, fn)
if (! DECL_CLONED_FUNCTION_P (fn)
|| DECL_COMPLETE_DESTRUCTOR_P (fn))
inform (input_location, "\t%+#D", fn);
@@ -348,7 +348,7 @@ abstract_virtuals_error_sfinae (tree decl, tree type, tsubst_flags_t complain)
/* Now truncate the vector. This leaves it non-null, so we know
there are pure virtuals, but empty so we don't list them out
again. */
- VEC_truncate (tree, pure, 0);
+ pure->truncate (0);
}
else
inform (DECL_SOURCE_LOCATION (TYPE_MAIN_DECL (type)),
@@ -566,8 +566,7 @@ split_nonconstant_init_1 (tree dest, tree init)
split_non_constant_init into process_init_constructor_array,
that is separating constants from non-constants while building
the vector. */
- VEC_ordered_remove (constructor_elt, CONSTRUCTOR_ELTS (init),
- idx);
+ CONSTRUCTOR_ELTS (init)->ordered_remove (idx);
--idx;
if (array_type_p)
@@ -661,7 +660,7 @@ split_nonconstant_init (tree dest, tree init)
for static variable. In that case, caller must emit the code. */
tree
-store_init_value (tree decl, tree init, VEC(tree,gc)** cleanups, int flags)
+store_init_value (tree decl, tree init, vec<tree, va_gc>** cleanups, int flags)
{
tree value, type;
@@ -1047,7 +1046,7 @@ process_init_constructor_array (tree type, tree init,
int flags = 0;
bool unbounded = false;
constructor_elt *ce;
- VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (init);
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (init);
gcc_assert (TREE_CODE (type) == ARRAY_TYPE
|| TREE_CODE (type) == VECTOR_TYPE);
@@ -1070,7 +1069,7 @@ process_init_constructor_array (tree type, tree init,
len = TYPE_VECTOR_SUBPARTS (type);
/* There must not be more initializers than needed. */
- if (!unbounded && VEC_length (constructor_elt, v) > len)
+ if (!unbounded && vec_safe_length (v) > len)
{
if (complain & tf_error)
error ("too many initializers for %qT", type);
@@ -1078,7 +1077,7 @@ process_init_constructor_array (tree type, tree init,
return PICFLAG_ERRONEOUS;
}
- FOR_EACH_VEC_ELT (constructor_elt, v, i, ce)
+ FOR_EACH_VEC_SAFE_ELT (v, i, ce)
{
if (ce->index)
{
@@ -1142,7 +1141,7 @@ static int
process_init_constructor_record (tree type, tree init,
tsubst_flags_t complain)
{
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int flags = 0;
tree field;
unsigned HOST_WIDE_INT idx = 0;
@@ -1176,10 +1175,9 @@ process_init_constructor_record (tree type, tree init,
if (DECL_BIT_FIELD_TYPE (field))
type = DECL_BIT_FIELD_TYPE (field);
- if (idx < VEC_length (constructor_elt, CONSTRUCTOR_ELTS (init)))
+ if (idx < vec_safe_length (CONSTRUCTOR_ELTS (init)))
{
- constructor_elt *ce = &VEC_index (constructor_elt,
- CONSTRUCTOR_ELTS (init), idx);
+ constructor_elt *ce = &(*CONSTRUCTOR_ELTS (init))[idx];
if (ce->index)
{
/* We can have either a FIELD_DECL or an IDENTIFIER_NODE. The
@@ -1269,7 +1267,7 @@ process_init_constructor_record (tree type, tree init,
CONSTRUCTOR_APPEND_ELT (v, field, next);
}
- if (idx < VEC_length (constructor_elt, CONSTRUCTOR_ELTS (init)))
+ if (idx < vec_safe_length (CONSTRUCTOR_ELTS (init)))
{
if (complain & tf_error)
error ("too many initializers for %qT", type);
@@ -1293,19 +1291,19 @@ process_init_constructor_union (tree type, tree init,
int len;
/* If the initializer was empty, use default zero initialization. */
- if (VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (init)))
+ if (vec_safe_is_empty (CONSTRUCTOR_ELTS (init)))
return 0;
- len = VEC_length (constructor_elt, CONSTRUCTOR_ELTS (init));
+ len = CONSTRUCTOR_ELTS (init)->length ();
if (len > 1)
{
if (!(complain & tf_error))
return PICFLAG_ERRONEOUS;
error ("too many initializers for %qT", type);
- VEC_block_remove (constructor_elt, CONSTRUCTOR_ELTS (init), 1, len-1);
+ CONSTRUCTOR_ELTS (init)->block_remove (1, len-1);
}
- ce = &VEC_index (constructor_elt, CONSTRUCTOR_ELTS (init), 0);
+ ce = &(*CONSTRUCTOR_ELTS (init))[0];
/* If this element specifies a field, initialize via that field. */
if (ce->index)
@@ -1476,7 +1474,7 @@ build_x_arrow (location_t loc, tree expr, tsubst_flags_t complain)
tree orig_expr = expr;
tree type = TREE_TYPE (expr);
tree last_rval = NULL_TREE;
- VEC(tree,gc) *types_memoized = NULL;
+ vec<tree, va_gc> *types_memoized = NULL;
if (type == error_mark_node)
return error_mark_node;
@@ -1511,7 +1509,7 @@ build_x_arrow (location_t loc, tree expr, tsubst_flags_t complain)
return error_mark_node;
}
- VEC_safe_push (tree, gc, types_memoized, TREE_TYPE (expr));
+ vec_safe_push (types_memoized, TREE_TYPE (expr));
last_rval = expr;
}
@@ -1665,7 +1663,7 @@ build_functional_cast (tree exp, tree parms, tsubst_flags_t complain)
/* The type to which we are casting. */
tree type;
- VEC(tree,gc) *parmvec;
+ vec<tree, va_gc> *parmvec;
if (exp == error_mark_node || parms == error_mark_node)
return error_mark_node;
@@ -1762,7 +1760,7 @@ build_functional_cast (tree exp, tree parms, tsubst_flags_t complain)
/* Call the constructor. */
parmvec = make_tree_vector ();
for (; parms != NULL_TREE; parms = TREE_CHAIN (parms))
- VEC_safe_push (tree, gc, parmvec, TREE_VALUE (parms));
+ vec_safe_push (parmvec, TREE_VALUE (parms));
exp = build_special_member_call (NULL_TREE, complete_ctor_identifier,
&parmvec, type, LOOKUP_NORMAL, complain);
release_tree_vector (parmvec);
diff --git a/gcc/cprop.c b/gcc/cprop.c
index 56a4f9285f3..94852508be4 100644
--- a/gcc/cprop.c
+++ b/gcc/cprop.c
@@ -65,8 +65,6 @@ struct occr
};
typedef struct occr *occr_t;
-DEF_VEC_P (occr_t);
-DEF_VEC_ALLOC_P (occr_t, heap);
/* Hash table entry for assignment expressions. */
diff --git a/gcc/data-streamer.h b/gcc/data-streamer.h
index 705713cd1bd..2a592927253 100644
--- a/gcc/data-streamer.h
+++ b/gcc/data-streamer.h
@@ -31,8 +31,6 @@ along with GCC; see the file COPYING3. If not see
static unsigned const BITS_PER_BITPACK_WORD = HOST_BITS_PER_WIDE_INT;
typedef unsigned HOST_WIDE_INT bitpack_word_t;
-DEF_VEC_I(bitpack_word_t);
-DEF_VEC_ALLOC_I(bitpack_word_t, heap);
struct bitpack_d
{
diff --git a/gcc/dbxout.c b/gcc/dbxout.c
index 5492c7011ba..1d6d71c8478 100644
--- a/gcc/dbxout.c
+++ b/gcc/dbxout.c
@@ -2184,7 +2184,7 @@ dbxout_type (tree type, int full)
{
int i;
tree child;
- VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (binfo);
+ vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
if (use_gnu_debug_info_extensions)
{
@@ -2197,8 +2197,7 @@ dbxout_type (tree type, int full)
}
for (i = 0; BINFO_BASE_ITERATE (binfo, i, child); i++)
{
- tree access = (accesses ? VEC_index (tree, accesses, i)
- : access_public_node);
+ tree access = (accesses ? (*accesses)[i] : access_public_node);
if (use_gnu_debug_info_extensions)
{
@@ -2541,7 +2540,7 @@ static int
output_used_types_helper (void **slot, void *data)
{
tree type = (tree) *slot;
- VEC(tree, heap) **types_p = (VEC(tree, heap) **) data;
+ vec<tree> *types_p = (vec<tree> *) data;
if ((TREE_CODE (type) == RECORD_TYPE
|| TREE_CODE (type) == UNION_TYPE
@@ -2550,10 +2549,10 @@ output_used_types_helper (void **slot, void *data)
&& TYPE_STUB_DECL (type)
&& DECL_P (TYPE_STUB_DECL (type))
&& ! DECL_IGNORED_P (TYPE_STUB_DECL (type)))
- VEC_quick_push (tree, *types_p, TYPE_STUB_DECL (type));
+ types_p->quick_push (TYPE_STUB_DECL (type));
else if (TYPE_NAME (type)
&& TREE_CODE (TYPE_NAME (type)) == TYPE_DECL)
- VEC_quick_push (tree, *types_p, TYPE_NAME (type));
+ types_p->quick_push (TYPE_NAME (type));
return 1;
}
@@ -2593,20 +2592,20 @@ output_used_types (void)
{
if (cfun && cfun->used_types_hash)
{
- VEC(tree, heap) *types;
+ vec<tree> types;
int i;
tree type;
- types = VEC_alloc (tree, heap, htab_elements (cfun->used_types_hash));
+ types.create (htab_elements (cfun->used_types_hash));
htab_traverse (cfun->used_types_hash, output_used_types_helper, &types);
/* Sort by UID to prevent dependence on hash table ordering. */
- VEC_qsort (tree, types, output_types_sort);
+ types.qsort (output_types_sort);
- FOR_EACH_VEC_ELT (tree, types, i, type)
+ FOR_EACH_VEC_ELT (types, i, type)
debug_queue_symbol (type);
- VEC_free (tree, heap, types);
+ types.release ();
}
}
diff --git a/gcc/dce.c b/gcc/dce.c
index 7e4addaef3a..c9ac8b219a9 100644
--- a/gcc/dce.c
+++ b/gcc/dce.c
@@ -52,7 +52,7 @@ static bool can_alter_cfg = false;
/* Instructions that have been marked but whose dependencies have not
yet been processed. */
-static VEC(rtx,heap) *worklist;
+static vec<rtx> worklist;
/* Bitmap of instructions marked as needed indexed by INSN_UID. */
static sbitmap marked;
@@ -182,7 +182,7 @@ mark_insn (rtx insn, bool fast)
if (!marked_insn_p (insn))
{
if (!fast)
- VEC_safe_push (rtx, heap, worklist, insn);
+ worklist.safe_push (insn);
bitmap_set_bit (marked, INSN_UID (insn));
if (dump_file)
fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn));
@@ -760,12 +760,12 @@ rest_of_handle_ud_dce (void)
prescan_insns_for_dce (false);
mark_artificial_uses ();
- while (VEC_length (rtx, worklist) > 0)
+ while (worklist.length () > 0)
{
- insn = VEC_pop (rtx, worklist);
+ insn = worklist.pop ();
mark_reg_dependencies (insn);
}
- VEC_free (rtx, heap, worklist);
+ worklist.release ();
if (MAY_HAVE_DEBUG_INSNS)
reset_unmarked_insns_debug_uses ();
diff --git a/gcc/df-core.c b/gcc/df-core.c
index 2bc3d7f9e23..94b10d3069e 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -991,12 +991,12 @@ df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
bitmap worklist = BITMAP_ALLOC (&df_bitmap_obstack);
int age = 0;
bool changed;
- VEC(int, heap) *last_visit_age = NULL;
+ vec<int> last_visit_age = vec<int>();
int prev_age;
basic_block bb;
int i;
- VEC_safe_grow_cleared (int, heap, last_visit_age, n_blocks);
+ last_visit_age.safe_grow_cleared (n_blocks);
/* Double-queueing. Worklist is for the current iteration,
and pending is for the next. */
@@ -1018,7 +1018,7 @@ df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
bitmap_clear_bit (pending, index);
bb_index = blocks_in_postorder[index];
bb = BASIC_BLOCK (bb_index);
- prev_age = VEC_index (int, last_visit_age, index);
+ prev_age = last_visit_age[index];
if (dir == DF_FORWARD)
changed = df_worklist_propagate_forward (dataflow, bb_index,
bbindex_to_postorder,
@@ -1029,7 +1029,7 @@ df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
bbindex_to_postorder,
pending, considered,
prev_age);
- VEC_replace (int, last_visit_age, index, ++age);
+ last_visit_age[index] = ++age;
if (changed)
bb->aux = (void *)(ptrdiff_t)age;
}
@@ -1040,7 +1040,7 @@ df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
BITMAP_FREE (worklist);
BITMAP_FREE (pending);
- VEC_free (int, heap, last_visit_age);
+ last_visit_age.release ();
/* Dump statistics. */
if (dump_file)
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index 452926128fc..89a6189325f 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -43,7 +43,6 @@ along with GCC; see the file COPYING3. If not see
#include "df.h"
#include "except.h"
#include "dce.h"
-#include "vecprim.h"
#include "valtrack.h"
#include "dumpfile.h"
diff --git a/gcc/df-scan.c b/gcc/df-scan.c
index 566f2375be5..93a06379b17 100644
--- a/gcc/df-scan.c
+++ b/gcc/df-scan.c
@@ -45,18 +45,9 @@ along with GCC; see the file COPYING3. If not see
#include "df.h"
#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
-DEF_VEC_P(df_ref);
-DEF_VEC_ALLOC_P_STACK(df_ref);
-
-#define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
-DEF_VEC_P(df_mw_hardreg_ptr);
-DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
-
-#define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
- VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
#ifndef HAVE_epilogue
#define HAVE_epilogue 0
@@ -96,10 +87,10 @@ static HARD_REG_SET elim_reg_set;
struct df_collection_rec
{
- VEC(df_ref,stack) *def_vec;
- VEC(df_ref,stack) *use_vec;
- VEC(df_ref,stack) *eq_use_vec;
- VEC(df_mw_hardreg_ptr,stack) *mw_vec;
+ vec<df_ref, va_stack> def_vec;
+ vec<df_ref, va_stack> use_vec;
+ vec<df_ref, va_stack> eq_use_vec;
+ vec<df_mw_hardreg_ptr, va_stack> mw_vec;
};
static df_ref df_null_ref_rec[1];
@@ -1181,19 +1172,19 @@ df_free_collection_rec (struct df_collection_rec *collection_rec)
df_ref ref;
struct df_mw_hardreg *mw;
- FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
+ FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
df_free_ref (ref);
- FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
+ FOR_EACH_VEC_ELT (collection_rec->use_vec, ix, ref)
df_free_ref (ref);
- FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
+ FOR_EACH_VEC_ELT (collection_rec->eq_use_vec, ix, ref)
df_free_ref (ref);
- FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
+ FOR_EACH_VEC_ELT (collection_rec->mw_vec, ix, mw)
pool_free (problem_data->mw_reg_pool, mw);
- VEC_free (df_ref, stack, collection_rec->def_vec);
- VEC_free (df_ref, stack, collection_rec->use_vec);
- VEC_free (df_ref, stack, collection_rec->eq_use_vec);
- VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
+ collection_rec->def_vec.release ();
+ collection_rec->use_vec.release ();
+ collection_rec->eq_use_vec.release ();
+ collection_rec->mw_vec.release ();
}
/* Rescan INSN. Return TRUE if the rescanning produced any changes. */
@@ -1245,10 +1236,10 @@ df_insn_rescan (rtx insn)
return false;
}
- collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
- collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
- collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
- collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
+ vec_stack_alloc (df_ref, collection_rec.def_vec, 128);
+ vec_stack_alloc (df_ref, collection_rec.use_vec, 32);
+ vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
+ vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
bitmap_clear_bit (&df->insns_to_delete, uid);
bitmap_clear_bit (&df->insns_to_rescan, uid);
@@ -1287,10 +1278,10 @@ df_insn_rescan (rtx insn)
if (!DEBUG_INSN_P (insn))
df_set_bb_dirty (bb);
- VEC_free (df_ref, stack, collection_rec.def_vec);
- VEC_free (df_ref, stack, collection_rec.use_vec);
- VEC_free (df_ref, stack, collection_rec.eq_use_vec);
- VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
+ collection_rec.def_vec.release ();
+ collection_rec.use_vec.release ();
+ collection_rec.eq_use_vec.release ();
+ collection_rec.mw_vec.release ();
return true;
}
@@ -2198,8 +2189,8 @@ df_notes_rescan (rtx insn)
unsigned int mw_len;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
- collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
+ vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
+ vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
df_ref_chain_delete (insn_info->eq_uses);
@@ -2223,7 +2214,7 @@ df_notes_rescan (rtx insn)
/* Find some place to put any new mw_hardregs. */
df_canonize_collection_rec (&collection_rec);
- mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
+ mw_len = collection_rec.mw_vec.length ();
if (mw_len)
{
unsigned int count = 0;
@@ -2246,7 +2237,7 @@ df_notes_rescan (rtx insn)
count + 1 + mw_len);
}
memcpy (&insn_info->mw_hardregs[count],
- VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
+ collection_rec.mw_vec.address (),
mw_len * sizeof (struct df_mw_hardreg *));
insn_info->mw_hardregs[count + mw_len] = NULL;
qsort (insn_info->mw_hardregs, count + mw_len,
@@ -2258,16 +2249,16 @@ df_notes_rescan (rtx insn)
insn_info->mw_hardregs
= XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
memcpy (insn_info->mw_hardregs,
- VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
+ collection_rec.mw_vec.address (),
mw_len * sizeof (struct df_mw_hardreg *));
insn_info->mw_hardregs[mw_len] = NULL;
}
}
/* Get rid of the mw_rec so that df_refs_add_to_chains will
ignore it. */
- VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
+ collection_rec.mw_vec.release ();
df_refs_add_to_chains (&collection_rec, bb, insn);
- VEC_free (df_ref, stack, collection_rec.eq_use_vec);
+ collection_rec.eq_use_vec.release ();
}
else
df_insn_rescan (insn);
@@ -2367,31 +2358,31 @@ df_ref_compare (const void *r1, const void *r2)
}
static void
-df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
+df_swap_refs (vec<df_ref, va_stack> *ref_vec, int i, int j)
{
- df_ref tmp = VEC_index (df_ref, *ref_vec, i);
- VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
- VEC_replace (df_ref, *ref_vec, j, tmp);
+ df_ref tmp = (*ref_vec)[i];
+ (*ref_vec)[i] = (*ref_vec)[j];
+ (*ref_vec)[j] = tmp;
}
/* Sort and compress a set of refs. */
static void
-df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
+df_sort_and_compress_refs (vec<df_ref, va_stack> *ref_vec)
{
unsigned int count;
unsigned int i;
unsigned int dist = 0;
- count = VEC_length (df_ref, *ref_vec);
+ count = ref_vec->length ();
/* If there are 1 or 0 elements, there is nothing to do. */
if (count < 2)
return;
else if (count == 2)
{
- df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
- df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
+ df_ref r0 = (*ref_vec)[0];
+ df_ref r1 = (*ref_vec)[1];
if (df_ref_compare (&r0, &r1) > 0)
df_swap_refs (ref_vec, 0, 1);
}
@@ -2399,8 +2390,8 @@ df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
{
for (i = 0; i < count - 1; i++)
{
- df_ref r0 = VEC_index (df_ref, *ref_vec, i);
- df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
+ df_ref r0 = (*ref_vec)[i];
+ df_ref r1 = (*ref_vec)[i + 1];
if (df_ref_compare (&r0, &r1) >= 0)
break;
}
@@ -2413,27 +2404,26 @@ df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
of DF_REF_COMPARE. */
if (i == count - 1)
return;
- VEC_qsort (df_ref, *ref_vec, df_ref_compare);
+ ref_vec->qsort (df_ref_compare);
}
for (i=0; i<count-dist; i++)
{
/* Find the next ref that is not equal to the current ref. */
while (i + dist + 1 < count
- && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
- VEC_index (df_ref, *ref_vec, i + dist + 1)))
+ && df_ref_equal_p ((*ref_vec)[i],
+ (*ref_vec)[i + dist + 1]))
{
- df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
+ df_free_ref ((*ref_vec)[i + dist + 1]);
dist++;
}
/* Copy it down to the next position. */
if (dist && i + dist + 1 < count)
- VEC_replace (df_ref, *ref_vec, i + 1,
- VEC_index (df_ref, *ref_vec, i + dist + 1));
+ (*ref_vec)[i + 1] = (*ref_vec)[i + dist + 1];
}
count -= dist;
- VEC_truncate (df_ref, *ref_vec, count);
+ ref_vec->truncate (count);
}
@@ -2487,7 +2477,7 @@ df_mw_compare (const void *m1, const void *m2)
/* Sort and compress a set of refs. */
static void
-df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
+df_sort_and_compress_mws (vec<df_mw_hardreg_ptr, va_stack> *mw_vec)
{
unsigned int count;
struct df_scan_problem_data *problem_data
@@ -2495,45 +2485,40 @@ df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
unsigned int i;
unsigned int dist = 0;
- count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
+ count = mw_vec->length ();
if (count < 2)
return;
else if (count == 2)
{
- struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
- struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
+ struct df_mw_hardreg *m0 = (*mw_vec)[0];
+ struct df_mw_hardreg *m1 = (*mw_vec)[1];
if (df_mw_compare (&m0, &m1) > 0)
{
- struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
- *mw_vec, 0);
- VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
- VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
- VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
+ struct df_mw_hardreg *tmp = (*mw_vec)[0];
+ (*mw_vec)[0] = (*mw_vec)[1];
+ (*mw_vec)[1] = tmp;
}
}
else
- VEC_qsort (df_mw_hardreg_ptr, *mw_vec, df_mw_compare);
+ mw_vec->qsort (df_mw_compare);
for (i=0; i<count-dist; i++)
{
/* Find the next ref that is not equal to the current ref. */
while (i + dist + 1 < count
- && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
- VEC_index (df_mw_hardreg_ptr, *mw_vec,
- i + dist + 1)))
+ && df_mw_equal_p ((*mw_vec)[i], (*mw_vec)[i + dist + 1]))
{
pool_free (problem_data->mw_reg_pool,
- VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
+ (*mw_vec)[i + dist + 1]);
dist++;
}
/* Copy it down to the next position. */
if (dist && i + dist + 1 < count)
- VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
- VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
+ (*mw_vec)[i + 1] = (*mw_vec)[i + dist + 1];
}
count -= dist;
- VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
+ mw_vec->truncate (count);
}
@@ -2603,14 +2588,14 @@ df_install_ref (df_ref this_ref,
static df_ref *
df_install_refs (basic_block bb,
- VEC(df_ref,stack)* old_vec,
+ vec<df_ref, va_stack> old_vec,
struct df_reg_info **reg_info,
struct df_ref_info *ref_info,
bool is_notes)
{
unsigned int count;
- count = VEC_length (df_ref, old_vec);
+ count = old_vec.length ();
if (count)
{
df_ref *new_vec = XNEWVEC (df_ref, count + 1);
@@ -2641,7 +2626,7 @@ df_install_refs (basic_block bb,
if (add_to_table && df->analyze_subset)
add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
- FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
+ FOR_EACH_VEC_ELT (old_vec, ix, this_ref)
{
new_vec[ix] = this_ref;
df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
@@ -2660,16 +2645,16 @@ df_install_refs (basic_block bb,
insn. */
static struct df_mw_hardreg **
-df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
+df_install_mws (vec<df_mw_hardreg_ptr, va_stack> old_vec)
{
unsigned int count;
- count = VEC_length (df_mw_hardreg_ptr, old_vec);
+ count = old_vec.length ();
if (count)
{
struct df_mw_hardreg **new_vec
= XNEWVEC (struct df_mw_hardreg*, count + 1);
- memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
+ memcpy (new_vec, old_vec.address (),
sizeof (struct df_mw_hardreg*) * count);
new_vec[count] = NULL;
return new_vec;
@@ -2692,7 +2677,7 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
/* If there is a vector in the collection rec, add it to the
insn. A null rec is a signal that the caller will handle the
chain specially. */
- if (collection_rec->def_vec)
+ if (collection_rec->def_vec.exists ())
{
df_scan_free_ref_vec (insn_rec->defs);
insn_rec->defs
@@ -2700,7 +2685,7 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
df->def_regs,
&df->def_info, false);
}
- if (collection_rec->use_vec)
+ if (collection_rec->use_vec.exists ())
{
df_scan_free_ref_vec (insn_rec->uses);
insn_rec->uses
@@ -2708,7 +2693,7 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
df->use_regs,
&df->use_info, false);
}
- if (collection_rec->eq_use_vec)
+ if (collection_rec->eq_use_vec.exists ())
{
df_scan_free_ref_vec (insn_rec->eq_uses);
insn_rec->eq_uses
@@ -2716,7 +2701,7 @@ df_refs_add_to_chains (struct df_collection_rec *collection_rec,
df->eq_use_regs,
&df->use_info, true);
}
- if (collection_rec->mw_vec)
+ if (collection_rec->mw_vec.exists ())
{
df_scan_free_mws_vec (insn_rec->mw_hardregs);
insn_rec->mw_hardregs
@@ -2812,11 +2797,11 @@ df_ref_create_structure (enum df_ref_class cl,
if (collection_rec)
{
if (DF_REF_REG_DEF_P (this_ref))
- VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
+ collection_rec->def_vec.safe_push (this_ref);
else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
- VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
+ collection_rec->eq_use_vec.safe_push (this_ref);
else
- VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
+ collection_rec->use_vec.safe_push (this_ref);
}
else
df_install_ref_incremental (this_ref);
@@ -2879,8 +2864,7 @@ df_ref_record (enum df_ref_class cl,
hardreg->start_regno = regno;
hardreg->end_regno = endregno - 1;
hardreg->mw_order = df->ref_order++;
- VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
- hardreg);
+ collection_rec->mw_vec.safe_push (hardreg);
}
for (i = regno; i < endregno; i++)
@@ -3352,7 +3336,7 @@ df_get_conditional_uses (struct df_collection_rec *collection_rec)
unsigned int ix;
df_ref ref;
- FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
+ FOR_EACH_VEC_ELT (collection_rec->def_vec, ix, ref)
{
if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
{
@@ -3458,10 +3442,10 @@ df_insn_refs_collect (struct df_collection_rec *collection_rec,
bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
/* Clear out the collection record. */
- VEC_truncate (df_ref, collection_rec->def_vec, 0);
- VEC_truncate (df_ref, collection_rec->use_vec, 0);
- VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
- VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
+ collection_rec->def_vec.truncate (0);
+ collection_rec->use_vec.truncate (0);
+ collection_rec->eq_use_vec.truncate (0);
+ collection_rec->mw_vec.truncate (0);
/* Process REG_EQUIV/REG_EQUAL notes. */
for (note = REG_NOTES (insn_info->insn); note;
@@ -3550,10 +3534,10 @@ df_recompute_luids (basic_block bb)
static void
df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
{
- VEC_truncate (df_ref, collection_rec->def_vec, 0);
- VEC_truncate (df_ref, collection_rec->use_vec, 0);
- VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
- VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
+ collection_rec->def_vec.truncate (0);
+ collection_rec->use_vec.truncate (0);
+ collection_rec->eq_use_vec.truncate (0);
+ collection_rec->mw_vec.truncate (0);
if (bb->index == ENTRY_BLOCK)
{
@@ -3622,10 +3606,10 @@ df_bb_refs_record (int bb_index, bool scan_insns)
return;
df_grow_bb_info (df_scan);
- collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
- collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
- collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
- collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
+ vec_stack_alloc (df_ref, collection_rec.def_vec, 128);
+ vec_stack_alloc (df_ref, collection_rec.use_vec, 32);
+ vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
+ vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
if (scan_insns)
/* Scan the block an insn at a time from beginning to end. */
@@ -3649,10 +3633,10 @@ df_bb_refs_record (int bb_index, bool scan_insns)
df_bb_refs_collect (&collection_rec, bb);
df_refs_add_to_chains (&collection_rec, bb, NULL);
- VEC_free (df_ref, stack, collection_rec.def_vec);
- VEC_free (df_ref, stack, collection_rec.use_vec);
- VEC_free (df_ref, stack, collection_rec.eq_use_vec);
- VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
+ collection_rec.def_vec.release ();
+ collection_rec.use_vec.release ();
+ collection_rec.eq_use_vec.release ();
+ collection_rec.mw_vec.release ();
/* Now that the block has been processed, set the block as dirty so
LR and LIVE will get it processed. */
@@ -3895,12 +3879,12 @@ df_record_entry_block_defs (bitmap entry_block_defs)
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
+ vec_stack_alloc (df_ref, collection_rec.def_vec, FIRST_PSEUDO_REGISTER);
df_entry_block_defs_collect (&collection_rec, entry_block_defs);
/* Process bb_refs chain */
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
- VEC_free (df_ref, stack, collection_rec.def_vec);
+ collection_rec.def_vec.release ();
}
@@ -4068,13 +4052,12 @@ df_record_exit_block_uses (bitmap exit_block_uses)
{
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
-
+ vec_stack_alloc (df_ref, collection_rec.use_vec, FIRST_PSEUDO_REGISTER);
df_exit_block_uses_collect (&collection_rec, exit_block_uses);
/* Process bb_refs chain */
df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
- VEC_free (df_ref, stack, collection_rec.use_vec);
+ collection_rec.use_vec.release ();
}
@@ -4251,7 +4234,7 @@ df_compute_regs_ever_live (bool reset)
df_reg_chain_mark (refs, regno, is_def, is_eq_use)
df_reg_chain_verify_unmarked (refs)
- df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
+ df_refs_verify (vec<stack, va_df_ref>, ref*, bool)
df_mws_verify (mw*, mw*, bool)
df_insn_refs_verify (collection_rec, bb, insn, bool)
df_bb_refs_verify (bb, refs, bool)
@@ -4315,13 +4298,13 @@ df_reg_chain_verify_unmarked (df_ref refs)
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
-df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
+df_refs_verify (vec<df_ref, va_stack> new_rec, df_ref *old_rec,
bool abort_if_fail)
{
unsigned int ix;
df_ref new_ref;
- FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
+ FOR_EACH_VEC_ELT (new_rec, ix, new_ref)
{
if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
{
@@ -4353,14 +4336,14 @@ df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
/* Verify that NEW_REC and OLD_REC have exactly the same members. */
static bool
-df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
+df_mws_verify (vec<df_mw_hardreg_ptr, va_stack> new_rec,
struct df_mw_hardreg **old_rec,
bool abort_if_fail)
{
unsigned int ix;
struct df_mw_hardreg *new_reg;
- FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
+ FOR_EACH_VEC_ELT (new_rec, ix, new_reg)
{
if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
{
@@ -4438,10 +4421,10 @@ df_bb_verify (basic_block bb)
struct df_collection_rec collection_rec;
memset (&collection_rec, 0, sizeof (struct df_collection_rec));
- collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
- collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
- collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
- collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
+ vec_stack_alloc (df_ref, collection_rec.def_vec, 128);
+ vec_stack_alloc (df_ref, collection_rec.use_vec, 32);
+ vec_stack_alloc (df_ref, collection_rec.eq_use_vec, 32);
+ vec_stack_alloc (df_mw_hardreg_ptr, collection_rec.mw_vec, 32);
gcc_assert (bb_info);
diff --git a/gcc/dominance.c b/gcc/dominance.c
index 683f3f5573f..4168fdea4a5 100644
--- a/gcc/dominance.c
+++ b/gcc/dominance.c
@@ -44,7 +44,6 @@
#include "diagnostic-core.h"
#include "et-forest.h"
#include "timevar.h"
-#include "vecprim.h"
#include "pointer-set.h"
#include "graphds.h"
#include "bitmap.h"
@@ -741,21 +740,21 @@ set_immediate_dominator (enum cdi_direction dir, basic_block bb,
/* Returns the list of basic blocks immediately dominated by BB, in the
direction DIR. */
-VEC (basic_block, heap) *
+vec<basic_block>
get_dominated_by (enum cdi_direction dir, basic_block bb)
{
unsigned int dir_index = dom_convert_dir_to_idx (dir);
struct et_node *node = bb->dom[dir_index], *son = node->son, *ason;
- VEC (basic_block, heap) *bbs = NULL;
+ vec<basic_block> bbs = vec<basic_block>();
gcc_checking_assert (dom_computed[dir_index]);
if (!son)
- return NULL;
+ return vec<basic_block>();
- VEC_safe_push (basic_block, heap, bbs, (basic_block) son->data);
+ bbs.safe_push ((basic_block) son->data);
for (ason = son->right; ason != son; ason = ason->right)
- VEC_safe_push (basic_block, heap, bbs, (basic_block) ason->data);
+ bbs.safe_push ((basic_block) ason->data);
return bbs;
}
@@ -764,13 +763,13 @@ get_dominated_by (enum cdi_direction dir, basic_block bb)
direction DIR) by some block between N_REGION ones stored in REGION,
except for blocks in the REGION itself. */
-VEC (basic_block, heap) *
+vec<basic_block>
get_dominated_by_region (enum cdi_direction dir, basic_block *region,
unsigned n_region)
{
unsigned i;
basic_block dom;
- VEC (basic_block, heap) *doms = NULL;
+ vec<basic_block> doms = vec<basic_block>();
for (i = 0; i < n_region; i++)
region[i]->flags |= BB_DUPLICATED;
@@ -779,7 +778,7 @@ get_dominated_by_region (enum cdi_direction dir, basic_block *region,
dom;
dom = next_dom_son (dir, dom))
if (!(dom->flags & BB_DUPLICATED))
- VEC_safe_push (basic_block, heap, doms, dom);
+ doms.safe_push (dom);
for (i = 0; i < n_region; i++)
region[i]->flags &= ~BB_DUPLICATED;
@@ -791,29 +790,29 @@ get_dominated_by_region (enum cdi_direction dir, basic_block *region,
produce a vector containing all dominated blocks. The vector will be sorted
in preorder. */
-VEC (basic_block, heap) *
+vec<basic_block>
get_dominated_to_depth (enum cdi_direction dir, basic_block bb, int depth)
{
- VEC(basic_block, heap) *bbs = NULL;
+ vec<basic_block> bbs = vec<basic_block>();
unsigned i;
unsigned next_level_start;
i = 0;
- VEC_safe_push (basic_block, heap, bbs, bb);
- next_level_start = 1; /* = VEC_length (basic_block, bbs); */
+ bbs.safe_push (bb);
+ next_level_start = 1; /* = bbs.length (); */
do
{
basic_block son;
- bb = VEC_index (basic_block, bbs, i++);
+ bb = bbs[i++];
for (son = first_dom_son (dir, bb);
son;
son = next_dom_son (dir, son))
- VEC_safe_push (basic_block, heap, bbs, son);
+ bbs.safe_push (son);
if (i == next_level_start && --depth)
- next_level_start = VEC_length (basic_block, bbs);
+ next_level_start = bbs.length ();
}
while (i < next_level_start);
@@ -823,7 +822,7 @@ get_dominated_to_depth (enum cdi_direction dir, basic_block bb, int depth)
/* Returns the list of basic blocks including BB dominated by BB, in the
direction DIR. The vector will be sorted in preorder. */
-VEC (basic_block, heap) *
+vec<basic_block>
get_all_dominated_blocks (enum cdi_direction dir, basic_block bb)
{
return get_dominated_to_depth (dir, bb, 0);
@@ -1088,7 +1087,7 @@ recompute_dominator (enum cdi_direction dir, basic_block bb)
from BBS. */
static void
-prune_bbs_to_update_dominators (VEC (basic_block, heap) *bbs,
+prune_bbs_to_update_dominators (vec<basic_block> bbs,
bool conservative)
{
unsigned i;
@@ -1097,7 +1096,7 @@ prune_bbs_to_update_dominators (VEC (basic_block, heap) *bbs,
edge_iterator ei;
edge e;
- for (i = 0; VEC_iterate (basic_block, bbs, i, bb);)
+ for (i = 0; bbs.iterate (i, &bb);)
{
if (bb == ENTRY_BLOCK_PTR)
goto succeed;
@@ -1140,7 +1139,7 @@ fail:
continue;
succeed:
- VEC_unordered_remove (basic_block, bbs, i);
+ bbs.unordered_remove (i);
}
}
@@ -1159,12 +1158,12 @@ root_of_dom_tree (enum cdi_direction dir, basic_block bb)
blocks. */
static void
-determine_dominators_for_sons (struct graph *g, VEC (basic_block, heap) *bbs,
+determine_dominators_for_sons (struct graph *g, vec<basic_block> bbs,
int y, int *son, int *brother)
{
bitmap gprime;
int i, a, nc;
- VEC (int, heap) **sccs;
+ vec<int> *sccs;
basic_block bb, dom, ybb;
unsigned si;
edge e;
@@ -1172,15 +1171,15 @@ determine_dominators_for_sons (struct graph *g, VEC (basic_block, heap) *bbs,
if (son[y] == -1)
return;
- if (y == (int) VEC_length (basic_block, bbs))
+ if (y == (int) bbs.length ())
ybb = ENTRY_BLOCK_PTR;
else
- ybb = VEC_index (basic_block, bbs, y);
+ ybb = bbs[y];
if (brother[son[y]] == -1)
{
/* Handle the common case Y has just one son specially. */
- bb = VEC_index (basic_block, bbs, son[y]);
+ bb = bbs[son[y]];
set_immediate_dominator (CDI_DOMINATORS, bb,
recompute_dominator (CDI_DOMINATORS, bb));
identify_vertices (g, y, son[y]);
@@ -1194,16 +1193,19 @@ determine_dominators_for_sons (struct graph *g, VEC (basic_block, heap) *bbs,
nc = graphds_scc (g, gprime);
BITMAP_FREE (gprime);
- sccs = XCNEWVEC (VEC (int, heap) *, nc);
+ /* ??? Needed to work around the pre-processor confusion with
+ using a multi-argument template type as macro argument. */
+ typedef vec<int> vec_int_heap;
+ sccs = XCNEWVEC (vec_int_heap, nc);
for (a = son[y]; a != -1; a = brother[a])
- VEC_safe_push (int, heap, sccs[g->vertices[a].component], a);
+ sccs[g->vertices[a].component].safe_push (a);
for (i = nc - 1; i >= 0; i--)
{
dom = NULL;
- FOR_EACH_VEC_ELT (int, sccs[i], si, a)
+ FOR_EACH_VEC_ELT (sccs[i], si, a)
{
- bb = VEC_index (basic_block, bbs, a);
+ bb = bbs[a];
FOR_EACH_EDGE (e, ei, bb->preds)
{
if (root_of_dom_tree (CDI_DOMINATORS, e->src) != ybb)
@@ -1214,15 +1216,15 @@ determine_dominators_for_sons (struct graph *g, VEC (basic_block, heap) *bbs,
}
gcc_assert (dom != NULL);
- FOR_EACH_VEC_ELT (int, sccs[i], si, a)
+ FOR_EACH_VEC_ELT (sccs[i], si, a)
{
- bb = VEC_index (basic_block, bbs, a);
+ bb = bbs[a];
set_immediate_dominator (CDI_DOMINATORS, bb, dom);
}
}
for (i = 0; i < nc; i++)
- VEC_free (int, heap, sccs[i]);
+ sccs[i].release ();
free (sccs);
for (a = son[y]; a != -1; a = brother[a])
@@ -1237,7 +1239,7 @@ determine_dominators_for_sons (struct graph *g, VEC (basic_block, heap) *bbs,
a block of BBS in the current dominance tree dominate it. */
void
-iterate_fix_dominators (enum cdi_direction dir, VEC (basic_block, heap) *bbs,
+iterate_fix_dominators (enum cdi_direction dir, vec<basic_block> bbs,
bool conservative)
{
unsigned i;
@@ -1316,19 +1318,19 @@ iterate_fix_dominators (enum cdi_direction dir, VEC (basic_block, heap) *bbs,
conservatively correct, setting the dominators using the
heuristics in prune_bbs_to_update_dominators could
create cycles in the dominance "tree", and cause ICE. */
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
set_immediate_dominator (CDI_DOMINATORS, bb, NULL);
}
prune_bbs_to_update_dominators (bbs, conservative);
- n = VEC_length (basic_block, bbs);
+ n = bbs.length ();
if (n == 0)
return;
if (n == 1)
{
- bb = VEC_index (basic_block, bbs, 0);
+ bb = bbs[0];
set_immediate_dominator (CDI_DOMINATORS, bb,
recompute_dominator (CDI_DOMINATORS, bb));
return;
@@ -1336,7 +1338,7 @@ iterate_fix_dominators (enum cdi_direction dir, VEC (basic_block, heap) *bbs,
/* Construct the graph G. */
map = pointer_map_create ();
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
{
/* If the dominance tree is conservatively correct, split it now. */
if (conservative)
@@ -1348,7 +1350,7 @@ iterate_fix_dominators (enum cdi_direction dir, VEC (basic_block, heap) *bbs,
g = new_graph (n + 1);
for (y = 0; y < g->n_vertices; y++)
g->vertices[y].data = BITMAP_ALLOC (NULL);
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
{
FOR_EACH_EDGE (e, ei, bb->preds)
{
diff --git a/gcc/domwalk.c b/gcc/domwalk.c
index 00511444414..c81d58da32f 100644
--- a/gcc/domwalk.c
+++ b/gcc/domwalk.c
@@ -162,9 +162,9 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
/* First get some local data, reusing any local data
pointer we may have saved. */
- if (VEC_length (void_p, walk_data->free_block_data) > 0)
+ if (walk_data->free_block_data.length () > 0)
{
- bd = VEC_pop (void_p, walk_data->free_block_data);
+ bd = walk_data->free_block_data.pop ();
recycled = 1;
}
else
@@ -174,7 +174,7 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
}
/* Push the local data into the local data stack. */
- VEC_safe_push (void_p, heap, walk_data->block_data_stack, bd);
+ walk_data->block_data_stack.safe_push (bd);
/* Call the initializer. */
walk_data->initialize_block_local_data (walk_data, bb,
@@ -212,9 +212,9 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
if (walk_data->initialize_block_local_data)
{
/* And finally pop the record off the block local data stack. */
- bd = VEC_pop (void_p, walk_data->block_data_stack);
+ bd = walk_data->block_data_stack.pop ();
/* And save the block data so that we can re-use it. */
- VEC_safe_push (void_p, heap, walk_data->free_block_data, bd);
+ walk_data->free_block_data.safe_push (bd);
}
}
if (sp)
@@ -261,8 +261,8 @@ walk_dominator_tree (struct dom_walk_data *walk_data, basic_block bb)
void
init_walk_dominator_tree (struct dom_walk_data *walk_data)
{
- walk_data->free_block_data = NULL;
- walk_data->block_data_stack = NULL;
+ walk_data->free_block_data.create (0);
+ walk_data->block_data_stack.create (0);
}
void
@@ -270,10 +270,10 @@ fini_walk_dominator_tree (struct dom_walk_data *walk_data)
{
if (walk_data->initialize_block_local_data)
{
- while (VEC_length (void_p, walk_data->free_block_data) > 0)
- free (VEC_pop (void_p, walk_data->free_block_data));
+ while (walk_data->free_block_data.length () > 0)
+ free (walk_data->free_block_data.pop ());
}
- VEC_free (void_p, heap, walk_data->free_block_data);
- VEC_free (void_p, heap, walk_data->block_data_stack);
+ walk_data->free_block_data.release ();
+ walk_data->block_data_stack.release ();
}
diff --git a/gcc/domwalk.h b/gcc/domwalk.h
index 63aeec233a3..e0c11f65d72 100644
--- a/gcc/domwalk.h
+++ b/gcc/domwalk.h
@@ -19,8 +19,6 @@ along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
typedef void *void_p;
-DEF_VEC_P(void_p);
-DEF_VEC_ALLOC_P(void_p,heap);
/* This is the main data structure for the dominator walker. It provides
the callback hooks as well as a convenient place to hang block local
@@ -58,7 +56,7 @@ struct dom_walk_data
/* Stack of any data we need to keep on a per-block basis.
If you have no local data, then BLOCK_DATA_STACK will be NULL. */
- VEC(void_p,heap) *block_data_stack;
+ vec<void_p> block_data_stack;
/* Size of the block local data. If this is zero, then it is assumed
you have no local data and thus no BLOCK_DATA_STACK as well. */
@@ -68,7 +66,7 @@ struct dom_walk_data
information/data outside domwalk.c. */
/* Stack of available block local structures. */
- VEC(void_p,heap) *free_block_data;
+ vec<void_p> free_block_data;
};
void walk_dominator_tree (struct dom_walk_data *, basic_block);
diff --git a/gcc/dse.c b/gcc/dse.c
index d389a426593..c7883f0138b 100644
--- a/gcc/dse.c
+++ b/gcc/dse.c
@@ -550,10 +550,8 @@ static alloc_pool rtx_group_info_pool;
/* Index into the rtx_group_vec. */
static int rtx_group_next_id;
-DEF_VEC_P(group_info_t);
-DEF_VEC_ALLOC_P(group_info_t,heap);
-static VEC(group_info_t,heap) *rtx_group_vec;
+static vec<group_info_t> rtx_group_vec;
/* This structure holds the set of changes that are being deferred
@@ -715,7 +713,7 @@ get_group_info (rtx base)
gi->offset_map_size_p = 0;
gi->offset_map_n = NULL;
gi->offset_map_p = NULL;
- VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
+ rtx_group_vec.safe_push (gi);
}
return clear_alias_group;
}
@@ -741,7 +739,7 @@ get_group_info (rtx base)
gi->offset_map_size_p = 0;
gi->offset_map_n = NULL;
gi->offset_map_p = NULL;
- VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
+ rtx_group_vec.safe_push (gi);
}
return gi;
@@ -1527,7 +1525,7 @@ record_store (rtx body, bb_info_t bb_info)
frame pointer we can do global analysis. */
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, group_id);
+ = rtx_group_vec[group_id];
tree expr = MEM_EXPR (mem);
store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
@@ -1597,7 +1595,7 @@ record_store (rtx body, bb_info_t bb_info)
else
{
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, group_id);
+ = rtx_group_vec[group_id];
mem_addr = group->canon_base_addr;
}
if (offset)
@@ -2214,7 +2212,7 @@ check_mem_read_rtx (rtx *loc, void *data)
else
{
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, group_id);
+ = rtx_group_vec[group_id];
mem_addr = group->canon_base_addr;
}
if (offset)
@@ -2598,8 +2596,7 @@ scan_insn (bb_info_t bb_info, rtx insn)
store_info = store_info->next;
if (store_info->group_id >= 0
- && VEC_index (group_info_t, rtx_group_vec,
- store_info->group_id)->frame_related)
+ && rtx_group_vec[store_info->group_id]->frame_related)
remove_store = true;
}
@@ -2826,7 +2823,7 @@ dse_step1 (void)
if (store_info->group_id >= 0)
{
group_info_t group
- = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
+ = rtx_group_vec[store_info->group_id];
if (group->frame_related && !i_ptr->cannot_delete)
delete_dead_store_insn (i_ptr);
}
@@ -2917,7 +2914,7 @@ dse_step2_init (void)
unsigned int i;
group_info_t group;
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
/* For all non stack related bases, we only consider a store to
be deletable if there are two or more stores for that
@@ -2970,7 +2967,7 @@ dse_step2_nospill (void)
/* Position 0 is unused because 0 is used in the maps to mean
unused. */
current_position = 1;
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
bitmap_iterator bi;
unsigned int j;
@@ -3084,7 +3081,7 @@ scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
{
HOST_WIDE_INT i;
group_info_t group_info
- = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
+ = rtx_group_vec[store_info->group_id];
if (group_info->process_globally)
for (i = store_info->begin; i < store_info->end; i++)
{
@@ -3138,7 +3135,7 @@ scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
/* If this insn reads the frame, kill all the frame related stores. */
if (insn_info->frame_read)
{
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
if (group->process_globally && group->frame_related)
{
if (kill)
@@ -3153,7 +3150,7 @@ scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
if (kill)
bitmap_ior_into (kill, kill_on_calls);
bitmap_and_compl_into (gen, kill_on_calls);
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
if (group->process_globally && !group->frame_related)
{
if (kill)
@@ -3163,7 +3160,7 @@ scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
}
while (read_info)
{
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
if (group->process_globally)
{
@@ -3343,7 +3340,7 @@ dse_step3_exit_block_scan (bb_info_t bb_info)
unsigned int i;
group_info_t group;
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
{
if (group->process_globally && group->frame_related)
bitmap_ior_into (bb_info->gen, group->group_kill);
@@ -3425,7 +3422,7 @@ dse_step3 (bool for_spills)
group_info_t group;
all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
- FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, j, group)
+ FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
bitmap_ior_into (all_ones, group->group_kill);
}
if (!bb_info->out)
@@ -3641,7 +3638,7 @@ dse_step5_nospill (void)
{
HOST_WIDE_INT i;
group_info_t group_info
- = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
+ = rtx_group_vec[store_info->group_id];
for (i = store_info->begin; i < store_info->end; i++)
{
@@ -3840,7 +3837,7 @@ dse_step7 (void)
end_alias_analysis ();
free (bb_table);
rtx_group_table.dispose ();
- VEC_free (group_info_t, heap, rtx_group_vec);
+ rtx_group_vec.release ();
BITMAP_FREE (all_blocks);
BITMAP_FREE (scratch);
diff --git a/gcc/dwarf2cfi.c b/gcc/dwarf2cfi.c
index 7c531603557..3454b1da10a 100644
--- a/gcc/dwarf2cfi.c
+++ b/gcc/dwarf2cfi.c
@@ -78,8 +78,6 @@ typedef struct GTY(()) reg_saved_in_data_struct {
rtx saved_in_reg;
} reg_saved_in_data;
-DEF_VEC_O (reg_saved_in_data);
-DEF_VEC_ALLOC_O (reg_saved_in_data, heap);
/* Since we no longer have a proper CFG, we're going to create a facsimile
of one on the fly while processing the frame-related insns.
@@ -141,7 +139,7 @@ typedef struct
implemented as a flat array because it normally contains zero or 1
entry, depending on the target. IA-64 is the big spender here, using
a maximum of 5 entries. */
- VEC(reg_saved_in_data, heap) *regs_saved_in_regs;
+ vec<reg_saved_in_data> regs_saved_in_regs;
/* An identifier for this trace. Used only for debugging dumps. */
unsigned id;
@@ -153,17 +151,13 @@ typedef struct
bool args_size_undefined;
} dw_trace_info;
-DEF_VEC_O (dw_trace_info);
-DEF_VEC_ALLOC_O (dw_trace_info, heap);
typedef dw_trace_info *dw_trace_info_ref;
-DEF_VEC_P (dw_trace_info_ref);
-DEF_VEC_ALLOC_P (dw_trace_info_ref, heap);
/* The variables making up the pseudo-cfg, as described above. */
-static VEC (dw_trace_info, heap) *trace_info;
-static VEC (dw_trace_info_ref, heap) *trace_work_list;
+static vec<dw_trace_info> trace_info;
+static vec<dw_trace_info_ref> trace_work_list;
static htab_t trace_index;
/* A vector of call frame insns for the CIE. */
@@ -203,10 +197,8 @@ typedef struct {
HOST_WIDE_INT cfa_offset;
} queued_reg_save;
-DEF_VEC_O (queued_reg_save);
-DEF_VEC_ALLOC_O (queued_reg_save, heap);
-static VEC(queued_reg_save, heap) *queued_reg_saves;
+static vec<queued_reg_save> queued_reg_saves;
/* True if any CFI directives were emitted at the current insn. */
static bool any_cfis_emitted;
@@ -383,7 +375,7 @@ copy_cfi_row (dw_cfi_row *src)
dw_cfi_row *dst = ggc_alloc_dw_cfi_row ();
*dst = *src;
- dst->reg_save = VEC_copy (dw_cfi_ref, gc, src->reg_save);
+ dst->reg_save = vec_safe_copy (src->reg_save);
return dst;
}
@@ -415,7 +407,7 @@ add_cfi (dw_cfi_ref cfi)
}
if (add_cfi_vec != NULL)
- VEC_safe_push (dw_cfi_ref, gc, *add_cfi_vec, cfi);
+ vec_safe_push (*add_cfi_vec, cfi);
}
static void
@@ -450,9 +442,9 @@ add_cfi_restore (unsigned reg)
static void
update_row_reg_save (dw_cfi_row *row, unsigned column, dw_cfi_ref cfi)
{
- if (VEC_length (dw_cfi_ref, row->reg_save) <= column)
- VEC_safe_grow_cleared (dw_cfi_ref, gc, row->reg_save, column + 1);
- VEC_replace (dw_cfi_ref, row->reg_save, column, cfi);
+ if (vec_safe_length (row->reg_save) <= column)
+ vec_safe_grow_cleared (row->reg_save, column + 1);
+ (*row->reg_save)[column] = cfi;
}
/* This function fills in aa dw_cfa_location structure from a dwarf location
@@ -677,8 +669,8 @@ cfi_row_equal_p (dw_cfi_row *a, dw_cfi_row *b)
else if (!cfa_equal_p (&a->cfa, &b->cfa))
return false;
- n_a = VEC_length (dw_cfi_ref, a->reg_save);
- n_b = VEC_length (dw_cfi_ref, b->reg_save);
+ n_a = vec_safe_length (a->reg_save);
+ n_b = vec_safe_length (b->reg_save);
n_max = MAX (n_a, n_b);
for (i = 0; i < n_max; ++i)
@@ -686,9 +678,9 @@ cfi_row_equal_p (dw_cfi_row *a, dw_cfi_row *b)
dw_cfi_ref r_a = NULL, r_b = NULL;
if (i < n_a)
- r_a = VEC_index (dw_cfi_ref, a->reg_save, i);
+ r_a = (*a->reg_save)[i];
if (i < n_b)
- r_b = VEC_index (dw_cfi_ref, b->reg_save, i);
+ r_b = (*b->reg_save)[i];
if (!cfi_equal_p (r_a, r_b))
return false;
@@ -927,12 +919,11 @@ record_reg_saved_in_reg (rtx dest, rtx src)
reg_saved_in_data *elt;
size_t i;
- FOR_EACH_VEC_ELT (reg_saved_in_data, cur_trace->regs_saved_in_regs, i, elt)
+ FOR_EACH_VEC_ELT (cur_trace->regs_saved_in_regs, i, elt)
if (compare_reg_or_pc (elt->orig_reg, src))
{
if (dest == NULL)
- VEC_unordered_remove (reg_saved_in_data,
- cur_trace->regs_saved_in_regs, i);
+ cur_trace->regs_saved_in_regs.unordered_remove (i);
else
elt->saved_in_reg = dest;
return;
@@ -942,7 +933,7 @@ record_reg_saved_in_reg (rtx dest, rtx src)
return;
reg_saved_in_data e = {src, dest};
- VEC_safe_push (reg_saved_in_data, heap, cur_trace->regs_saved_in_regs, e);
+ cur_trace->regs_saved_in_regs.safe_push (e);
}
/* Add an entry to QUEUED_REG_SAVES saying that REG is now saved at
@@ -957,14 +948,14 @@ queue_reg_save (rtx reg, rtx sreg, HOST_WIDE_INT offset)
/* Duplicates waste space, but it's also necessary to remove them
for correctness, since the queue gets output in reverse order. */
- FOR_EACH_VEC_ELT (queued_reg_save, queued_reg_saves, i, q)
+ FOR_EACH_VEC_ELT (queued_reg_saves, i, q)
if (compare_reg_or_pc (q->reg, reg))
{
*q = e;
return;
}
- VEC_safe_push (queued_reg_save, heap, queued_reg_saves, e);
+ queued_reg_saves.safe_push (e);
}
/* Output all the entries in QUEUED_REG_SAVES. */
@@ -975,7 +966,7 @@ dwarf2out_flush_queued_reg_saves (void)
queued_reg_save *q;
size_t i;
- FOR_EACH_VEC_ELT (queued_reg_save, queued_reg_saves, i, q)
+ FOR_EACH_VEC_ELT (queued_reg_saves, i, q)
{
unsigned int reg, sreg;
@@ -992,7 +983,7 @@ dwarf2out_flush_queued_reg_saves (void)
reg_save (reg, sreg, q->cfa_offset);
}
- VEC_truncate (queued_reg_save, queued_reg_saves, 0);
+ queued_reg_saves.truncate (0);
}
/* Does INSN clobber any register which QUEUED_REG_SAVES lists a saved
@@ -1006,7 +997,7 @@ clobbers_queued_reg_save (const_rtx insn)
queued_reg_save *q;
size_t iq;
- FOR_EACH_VEC_ELT (queued_reg_save, queued_reg_saves, iq, q)
+ FOR_EACH_VEC_ELT (queued_reg_saves, iq, q)
{
size_t ir;
reg_saved_in_data *rir;
@@ -1014,8 +1005,7 @@ clobbers_queued_reg_save (const_rtx insn)
if (modified_in_p (q->reg, insn))
return true;
- FOR_EACH_VEC_ELT (reg_saved_in_data,
- cur_trace->regs_saved_in_regs, ir, rir)
+ FOR_EACH_VEC_ELT (cur_trace->regs_saved_in_regs, ir, rir)
if (compare_reg_or_pc (q->reg, rir->orig_reg)
&& modified_in_p (rir->saved_in_reg, insn))
return true;
@@ -1034,11 +1024,11 @@ reg_saved_in (rtx reg)
reg_saved_in_data *rir;
size_t i;
- FOR_EACH_VEC_ELT (queued_reg_save, queued_reg_saves, i, q)
+ FOR_EACH_VEC_ELT (queued_reg_saves, i, q)
if (q->saved_reg && regn == REGNO (q->saved_reg))
return q->reg;
- FOR_EACH_VEC_ELT (reg_saved_in_data, cur_trace->regs_saved_in_regs, i, rir)
+ FOR_EACH_VEC_ELT (cur_trace->regs_saved_in_regs, i, rir)
if (regn == REGNO (rir->saved_in_reg))
return rir->orig_reg;
@@ -2049,8 +2039,8 @@ change_cfi_row (dw_cfi_row *old_row, dw_cfi_row *new_row)
add_cfi (cfi);
}
- n_old = VEC_length (dw_cfi_ref, old_row->reg_save);
- n_new = VEC_length (dw_cfi_ref, new_row->reg_save);
+ n_old = vec_safe_length (old_row->reg_save);
+ n_new = vec_safe_length (new_row->reg_save);
n_max = MAX (n_old, n_new);
for (i = 0; i < n_max; ++i)
@@ -2058,9 +2048,9 @@ change_cfi_row (dw_cfi_row *old_row, dw_cfi_row *new_row)
dw_cfi_ref r_old = NULL, r_new = NULL;
if (i < n_old)
- r_old = VEC_index (dw_cfi_ref, old_row->reg_save, i);
+ r_old = (*old_row->reg_save)[i];
if (i < n_new)
- r_new = VEC_index (dw_cfi_ref, new_row->reg_save, i);
+ r_new = (*new_row->reg_save)[i];
if (r_old == r_new)
;
@@ -2124,8 +2114,7 @@ add_cfis_to_fde (void)
if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
{
- fde->dw_fde_switch_cfi_index
- = VEC_length (dw_cfi_ref, fde->dw_fde_cfi);
+ fde->dw_fde_switch_cfi_index = vec_safe_length (fde->dw_fde_cfi);
/* Don't attempt to advance_loc4 between labels
in different sections. */
first = true;
@@ -2158,7 +2147,7 @@ add_cfis_to_fde (void)
xcfi->dw_cfi_opc = (first ? DW_CFA_set_loc
: DW_CFA_advance_loc4);
xcfi->dw_cfi_oprnd1.dw_cfi_addr = label;
- VEC_safe_push (dw_cfi_ref, gc, fde->dw_fde_cfi, xcfi);
+ vec_safe_push (fde->dw_fde_cfi, xcfi);
tmp = emit_note_before (NOTE_INSN_CFI_LABEL, insn);
NOTE_LABEL_NUMBER (tmp) = num;
@@ -2167,8 +2156,7 @@ add_cfis_to_fde (void)
do
{
if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_CFI)
- VEC_safe_push (dw_cfi_ref, gc, fde->dw_fde_cfi,
- NOTE_CFI (insn));
+ vec_safe_push (fde->dw_fde_cfi, NOTE_CFI (insn));
insn = NEXT_INSN (insn);
}
while (insn != next);
@@ -2207,10 +2195,9 @@ maybe_record_trace_start (rtx start, rtx origin)
ti->cfa_store = cur_trace->cfa_store;
ti->cfa_temp = cur_trace->cfa_temp;
- ti->regs_saved_in_regs = VEC_copy (reg_saved_in_data, heap,
- cur_trace->regs_saved_in_regs);
+ ti->regs_saved_in_regs = cur_trace->regs_saved_in_regs.copy ();
- VEC_safe_push (dw_trace_info_ref, heap, trace_work_list, ti);
+ trace_work_list.safe_push (ti);
if (dump_file)
fprintf (dump_file, "\tpush trace %u to worklist\n", ti->id);
@@ -2391,7 +2378,7 @@ scan_trace (dw_trace_info *trace)
if (BARRIER_P (insn))
{
/* Don't bother saving the unneeded queued registers at all. */
- VEC_truncate (queued_reg_save, queued_reg_saves, 0);
+ queued_reg_saves.truncate (0);
break;
}
if (save_point_p (insn))
@@ -2438,13 +2425,12 @@ scan_trace (dw_trace_info *trace)
add_cfi_insn = NULL;
restore_args_size = cur_trace->end_true_args_size;
cur_cfa = &cur_row->cfa;
- save_row_reg_save
- = VEC_copy (dw_cfi_ref, gc, cur_row->reg_save);
+ save_row_reg_save = vec_safe_copy (cur_row->reg_save);
scan_insn_after (elt);
/* ??? Should we instead save the entire row state? */
- gcc_assert (!VEC_length (queued_reg_save, queued_reg_saves));
+ gcc_assert (!queued_reg_saves.length ());
create_trace_edges (control);
@@ -2543,21 +2529,21 @@ create_cfi_notes (void)
{
dw_trace_info *ti;
- gcc_checking_assert (queued_reg_saves == NULL);
- gcc_checking_assert (trace_work_list == NULL);
+ gcc_checking_assert (!queued_reg_saves.exists ());
+ gcc_checking_assert (!trace_work_list.exists ());
/* Always begin at the entry trace. */
- ti = &VEC_index (dw_trace_info, trace_info, 0);
+ ti = &trace_info[0];
scan_trace (ti);
- while (!VEC_empty (dw_trace_info_ref, trace_work_list))
+ while (!trace_work_list.is_empty ())
{
- ti = VEC_pop (dw_trace_info_ref, trace_work_list);
+ ti = trace_work_list.pop ();
scan_trace (ti);
}
- VEC_free (queued_reg_save, heap, queued_reg_saves);
- VEC_free (dw_trace_info_ref, heap, trace_work_list);
+ queued_reg_saves.release ();
+ trace_work_list.release ();
}
/* Return the insn before the first NOTE_INSN_CFI after START. */
@@ -2581,7 +2567,7 @@ before_next_cfi_note (rtx start)
static void
connect_traces (void)
{
- unsigned i, n = VEC_length (dw_trace_info, trace_info);
+ unsigned i, n = trace_info.length ();
dw_trace_info *prev_ti, *ti;
/* ??? Ideally, we should have both queued and processed every trace.
@@ -2594,10 +2580,10 @@ connect_traces (void)
/* Remove all unprocessed traces from the list. */
for (i = n - 1; i > 0; --i)
{
- ti = &VEC_index (dw_trace_info, trace_info, i);
+ ti = &trace_info[i];
if (ti->beg_row == NULL)
{
- VEC_ordered_remove (dw_trace_info, trace_info, i);
+ trace_info.ordered_remove (i);
n -= 1;
}
else
@@ -2606,13 +2592,13 @@ connect_traces (void)
/* Work from the end back to the beginning. This lets us easily insert
remember/restore_state notes in the correct order wrt other notes. */
- prev_ti = &VEC_index (dw_trace_info, trace_info, n - 1);
+ prev_ti = &trace_info[n - 1];
for (i = n - 1; i > 0; --i)
{
dw_cfi_row *old_row;
ti = prev_ti;
- prev_ti = &VEC_index (dw_trace_info, trace_info, i - 1);
+ prev_ti = &trace_info[i - 1];
add_cfi_insn = ti->head;
@@ -2677,13 +2663,13 @@ connect_traces (void)
}
/* Connect args_size between traces that have can_throw_internal insns. */
- if (cfun->eh->lp_array != NULL)
+ if (cfun->eh->lp_array)
{
HOST_WIDE_INT prev_args_size = 0;
for (i = 0; i < n; ++i)
{
- ti = &VEC_index (dw_trace_info, trace_info, i);
+ ti = &trace_info[i];
if (ti->switch_sections)
prev_args_size = 0;
@@ -2716,17 +2702,16 @@ create_pseudo_cfg (void)
/* The first trace begins at the start of the function,
and begins with the CIE row state. */
- trace_info = VEC_alloc (dw_trace_info, heap, 16);
+ trace_info.create (16);
memset (&ti, 0, sizeof (ti));
ti.head = get_insns ();
ti.beg_row = cie_cfi_row;
ti.cfa_store = cie_cfi_row->cfa;
ti.cfa_temp.reg = INVALID_REGNUM;
- VEC_quick_push (dw_trace_info, trace_info, ti);
+ trace_info.quick_push (ti);
if (cie_return_save)
- VEC_safe_push (reg_saved_in_data, heap,
- ti.regs_saved_in_regs, *cie_return_save);
+ ti.regs_saved_in_regs.safe_push (*cie_return_save);
/* Walk all the insns, collecting start of trace locations. */
saw_barrier = false;
@@ -2751,8 +2736,8 @@ create_pseudo_cfg (void)
memset (&ti, 0, sizeof (ti));
ti.head = insn;
ti.switch_sections = switch_sections;
- ti.id = VEC_length (dw_trace_info, trace_info) - 1;
- VEC_safe_push (dw_trace_info, heap, trace_info, ti);
+ ti.id = trace_info.length () - 1;
+ trace_info.safe_push (ti);
saw_barrier = false;
switch_sections = false;
@@ -2761,10 +2746,10 @@ create_pseudo_cfg (void)
/* Create the trace index after we've finished building trace_info,
avoiding stale pointer problems due to reallocation. */
- trace_index = htab_create (VEC_length (dw_trace_info, trace_info),
+ trace_index = htab_create (trace_info.length (),
dw_trace_info_hash, dw_trace_info_eq, NULL);
dw_trace_info *tp;
- FOR_EACH_VEC_ELT (dw_trace_info, trace_info, i, tp)
+ FOR_EACH_VEC_ELT (trace_info, i, tp)
{
void **slot;
@@ -2876,15 +2861,14 @@ create_cie_data (void)
the DW_CFA_offset against the return column, not the intermediate
save register. Save the contents of regs_saved_in_regs so that
we can re-initialize it at the start of each function. */
- switch (VEC_length (reg_saved_in_data, cie_trace.regs_saved_in_regs))
+ switch (cie_trace.regs_saved_in_regs.length ())
{
case 0:
break;
case 1:
cie_return_save = ggc_alloc_reg_saved_in_data ();
- *cie_return_save = VEC_index (reg_saved_in_data,
- cie_trace.regs_saved_in_regs, 0);
- VEC_free (reg_saved_in_data, heap, cie_trace.regs_saved_in_regs);
+ *cie_return_save = cie_trace.regs_saved_in_regs[0];
+ cie_trace.regs_saved_in_regs.release ();
break;
default:
gcc_unreachable ();
@@ -2921,10 +2905,10 @@ execute_dwarf2_frame (void)
size_t i;
dw_trace_info *ti;
- FOR_EACH_VEC_ELT (dw_trace_info, trace_info, i, ti)
- VEC_free (reg_saved_in_data, heap, ti->regs_saved_in_regs);
+ FOR_EACH_VEC_ELT (trace_info, i, ti)
+ ti->regs_saved_in_regs.release ();
}
- VEC_free (dw_trace_info, heap, trace_info);
+ trace_info.release ();
htab_delete (trace_index);
trace_index = NULL;
@@ -3286,7 +3270,7 @@ dump_cfi_row (FILE *f, dw_cfi_row *row)
}
output_cfi_directive (f, cfi);
- FOR_EACH_VEC_ELT (dw_cfi_ref, row->reg_save, i, cfi)
+ FOR_EACH_VEC_SAFE_ELT (row->reg_save, i, cfi)
if (cfi)
output_cfi_directive (f, cfi);
}
diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c
index c5559c869e8..c25b7b74c05 100644
--- a/gcc/dwarf2out.c
+++ b/gcc/dwarf2out.c
@@ -130,20 +130,20 @@ int vms_file_stats_name (const char *, long long *, long *, char *, int *);
/* Array of RTXes referenced by the debugging information, which therefore
must be kept around forever. */
-static GTY(()) VEC(rtx,gc) *used_rtx_array;
+static GTY(()) vec<rtx, va_gc> *used_rtx_array;
/* A pointer to the base of a list of incomplete types which might be
completed at some later time. incomplete_types_list needs to be a
- VEC(tree,gc) because we want to tell the garbage collector about
+ vec<tree, va_gc> *because we want to tell the garbage collector about
it. */
-static GTY(()) VEC(tree,gc) *incomplete_types;
+static GTY(()) vec<tree, va_gc> *incomplete_types;
/* A pointer to the base of a table of references to declaration
scopes. This table is a display which tracks the nesting
of declaration scopes at the current scope and containing
scopes. This table is used to find the proper place to
define type declaration DIE's. */
-static GTY(()) VEC(tree,gc) *decl_scope_table;
+static GTY(()) vec<tree, va_gc> *decl_scope_table;
/* Pointers to various DWARF2 sections. */
static GTY(()) section *debug_info_section;
@@ -190,15 +190,13 @@ static GTY(()) section *debug_frame_section;
#define DWARF_CIE_ID DW_CIE_ID
#endif
-DEF_VEC_P (dw_fde_ref);
-DEF_VEC_ALLOC_P (dw_fde_ref, gc);
/* A vector for a table that contains frame description
information for each routine. */
#define NOT_INDEXED (-1U)
#define NO_INDEX_ASSIGNED (-2U)
-static GTY(()) VEC(dw_fde_ref, gc) *fde_vec;
+static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
struct GTY(()) indirect_string_node {
const char *str;
@@ -616,7 +614,7 @@ output_fde (dw_fde_ref fde, bool for_eh, bool second,
size_t from, until, i;
from = 0;
- until = VEC_length (dw_cfi_ref, fde->dw_fde_cfi);
+ until = vec_safe_length (fde->dw_fde_cfi);
if (fde->dw_fde_second_begin == NULL)
;
@@ -626,7 +624,7 @@ output_fde (dw_fde_ref fde, bool for_eh, bool second,
from = fde->dw_fde_switch_cfi_index;
for (i = from; i < until; i++)
- output_cfi (VEC_index (dw_cfi_ref, fde->dw_fde_cfi, i), fde, for_eh);
+ output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
}
/* If we are to emit a ref/link from function bodies to their frame tables,
@@ -694,7 +692,7 @@ output_call_frame_info (int for_eh)
int dw_cie_version;
/* Don't emit a CIE if there won't be any FDEs. */
- if (fde_vec == NULL)
+ if (!fde_vec)
return;
/* Nothing to do if the assembler's doing it all. */
@@ -711,7 +709,7 @@ output_call_frame_info (int for_eh)
{
bool any_eh_needed = false;
- FOR_EACH_VEC_ELT (dw_fde_ref, fde_vec, i, fde)
+ FOR_EACH_VEC_ELT (*fde_vec, i, fde)
{
if (fde->uses_eh_lsda)
any_eh_needed = any_lsda_needed = true;
@@ -863,7 +861,7 @@ output_call_frame_info (int for_eh)
eh_data_format_name (fde_encoding));
}
- FOR_EACH_VEC_ELT (dw_cfi_ref, cie_cfi_vec, i, cfi)
+ FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
output_cfi (cfi, NULL, for_eh);
/* Pad the CIE out to an address sized boundary. */
@@ -872,7 +870,7 @@ output_call_frame_info (int for_eh)
ASM_OUTPUT_LABEL (asm_out_file, l2);
/* Loop through all of the FDE's. */
- FOR_EACH_VEC_ELT (dw_fde_ref, fde_vec, i, fde)
+ FOR_EACH_VEC_ELT (*fde_vec, i, fde)
{
unsigned int k;
@@ -951,7 +949,7 @@ dwarf2out_alloc_current_fde (void)
fde = ggc_alloc_cleared_dw_fde_node ();
fde->decl = current_function_decl;
fde->funcdef_number = current_function_funcdef_no;
- fde->fde_index = VEC_length (dw_fde_ref, fde_vec);
+ fde->fde_index = vec_safe_length (fde_vec);
fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
fde->uses_eh_lsda = crtl->uses_eh_lsda;
fde->nothrow = crtl->nothrow;
@@ -960,7 +958,7 @@ dwarf2out_alloc_current_fde (void)
/* Record the FDE associated with this function. */
cfun->fde = fde;
- VEC_safe_push (dw_fde_ref, gc, fde_vec, fde);
+ vec_safe_push (fde_vec, fde);
return fde;
}
@@ -1199,13 +1197,9 @@ typedef struct GTY(()) deferred_locations_struct
dw_die_ref die;
} deferred_locations;
-DEF_VEC_O(deferred_locations);
-DEF_VEC_ALLOC_O(deferred_locations,gc);
-static GTY(()) VEC(deferred_locations, gc) *deferred_locations_list;
+static GTY(()) vec<deferred_locations, va_gc> *deferred_locations_list;
-DEF_VEC_P(dw_die_ref);
-DEF_VEC_ALLOC_P(dw_die_ref,heap);
/* Describe an entry into the .debug_addr section. */
@@ -2474,8 +2468,6 @@ typedef struct GTY(()) dw_line_info_struct {
unsigned int val;
} dw_line_info_entry;
-DEF_VEC_O(dw_line_info_entry);
-DEF_VEC_ALLOC_O(dw_line_info_entry, gc);
typedef struct GTY(()) dw_line_info_table_struct {
/* The label that marks the end of this section. */
@@ -2490,13 +2482,11 @@ typedef struct GTY(()) dw_line_info_table_struct {
bool is_stmt;
bool in_use;
- VEC(dw_line_info_entry, gc) *entries;
+ vec<dw_line_info_entry, va_gc> *entries;
} dw_line_info_table;
typedef dw_line_info_table *dw_line_info_table_p;
-DEF_VEC_P(dw_line_info_table_p);
-DEF_VEC_ALLOC_P(dw_line_info_table_p, gc);
/* Each DIE attribute has a field specifying the attribute kind,
a link to the next attribute in the chain, and an attribute value.
@@ -2508,8 +2498,6 @@ typedef struct GTY(()) dw_attr_struct {
}
dw_attr_node;
-DEF_VEC_O(dw_attr_node);
-DEF_VEC_ALLOC_O(dw_attr_node,gc);
/* The Debugging Information Entry (DIE) structure. DIEs form a tree.
The children of each node form a circular list linked by
@@ -2522,7 +2510,7 @@ typedef struct GTY((chain_circular ("%h.die_sib"))) die_struct {
comdat_type_node_ref GTY ((tag ("1"))) die_type_node;
}
GTY ((desc ("%0.comdat_type_p"))) die_id;
- VEC(dw_attr_node,gc) * die_attr;
+ vec<dw_attr_node, va_gc> *die_attr;
dw_die_ref die_parent;
dw_die_ref die_child;
dw_die_ref die_sib;
@@ -2556,8 +2544,6 @@ typedef struct GTY(()) pubname_struct {
}
pubname_entry;
-DEF_VEC_O(pubname_entry);
-DEF_VEC_ALLOC_O(pubname_entry, gc);
struct GTY(()) dw_ranges_struct {
/* If this is positive, it's a block number, otherwise it's a
@@ -2574,8 +2560,6 @@ typedef struct GTY(()) macinfo_struct {
}
macinfo_entry;
-DEF_VEC_O(macinfo_entry);
-DEF_VEC_ALLOC_O(macinfo_entry, gc);
struct GTY(()) dw_ranges_by_label_struct {
const char *begin;
@@ -2721,8 +2705,6 @@ typedef struct GTY(()) die_arg_entry_struct {
tree arg;
} die_arg_entry;
-DEF_VEC_O(die_arg_entry);
-DEF_VEC_ALLOC_O(die_arg_entry,gc);
/* Node of the variable location list. */
struct GTY ((chain_next ("%h.next"))) var_loc_node {
@@ -2786,7 +2768,7 @@ static int tail_call_site_count = -1;
/* Vector mapping block numbers to DW_TAG_{lexical_block,inlined_subroutine}
DIEs. */
-static VEC (dw_die_ref, heap) *block_map;
+static vec<dw_die_ref> block_map;
/* A cached location list. */
struct GTY (()) cached_dw_loc_list_def {
@@ -2830,7 +2812,7 @@ static GTY(()) dw_line_info_table *text_section_line_info;
static GTY(()) dw_line_info_table *cold_text_section_line_info;
/* The set of all non-default tables of line number info. */
-static GTY(()) VEC (dw_line_info_table_p, gc) *separate_line_info;
+static GTY(()) vec<dw_line_info_table_p, va_gc> *separate_line_info;
/* A flag to tell pubnames/types export if there is an info section to
refer to. */
@@ -2838,21 +2820,21 @@ static bool info_section_emitted;
/* A pointer to the base of a table that contains a list of publicly
accessible names. */
-static GTY (()) VEC (pubname_entry, gc) * pubname_table;
+static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
/* A pointer to the base of a table that contains a list of publicly
accessible types. */
-static GTY (()) VEC (pubname_entry, gc) * pubtype_table;
+static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
/* A pointer to the base of a table that contains a list of macro
defines/undefines (and file start/end markers). */
-static GTY (()) VEC (macinfo_entry, gc) * macinfo_table;
+static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
/* True if .debug_macinfo or .debug_macros section is going to be
emitted. */
#define have_macinfo \
(debug_info_level >= DINFO_LEVEL_VERBOSE \
- && !VEC_empty (macinfo_entry, macinfo_table))
+ && !macinfo_table->is_empty ())
/* Array of dies for which we should generate .debug_ranges info. */
static GTY ((length ("ranges_table_allocated"))) dw_ranges_ref ranges_table;
@@ -2898,20 +2880,20 @@ static GTY(()) int label_num;
/* Cached result of previous call to lookup_filename. */
static GTY(()) struct dwarf_file_data * file_table_last_lookup;
-static GTY(()) VEC(die_arg_entry,gc) *tmpl_value_parm_die_table;
+static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
/* Instances of generic types for which we need to generate debug
info that describe their generic parameters and arguments. That
generation needs to happen once all types are properly laid out so
we do it at the end of compilation. */
-static GTY(()) VEC(tree,gc) *generic_type_instances;
+static GTY(()) vec<tree, va_gc> *generic_type_instances;
/* Offset from the "steady-state frame pointer" to the frame base,
within the current function. */
static HOST_WIDE_INT frame_pointer_fb_offset;
static bool frame_pointer_fb_offset_valid;
-static VEC (dw_die_ref, heap) *base_types;
+static vec<dw_die_ref> base_types;
/* Forward declarations for functions defined in this file. */
@@ -3053,7 +3035,7 @@ static void calc_base_type_die_sizes (void);
static void mark_dies (dw_die_ref);
static void unmark_dies (dw_die_ref);
static void unmark_all_dies (dw_die_ref);
-static unsigned long size_of_pubnames (VEC (pubname_entry,gc) *);
+static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
static unsigned long size_of_aranges (void);
static enum dwarf_form value_format (dw_attr_ref);
static void output_value_format (dw_attr_ref);
@@ -3069,7 +3051,7 @@ static void add_pubname (tree, dw_die_ref);
static void add_enumerator_pubname (const char *, dw_die_ref);
static void add_pubname_string (const char *, dw_die_ref);
static void add_pubtype (tree, dw_die_ref);
-static void output_pubnames (VEC (pubname_entry,gc) *);
+static void output_pubnames (vec<pubname_entry, va_gc> *);
static void output_aranges (unsigned long);
static unsigned int add_ranges_num (int);
static unsigned int add_ranges (const_tree);
@@ -3647,9 +3629,8 @@ add_dwarf_attr (dw_die_ref die, dw_attr_ref attr)
if (die == NULL)
return;
- if (die->die_attr == NULL)
- die->die_attr = VEC_alloc (dw_attr_node, gc, 1);
- VEC_safe_push (dw_attr_node, gc, die->die_attr, *attr);
+ vec_safe_reserve (die->die_attr, 1);
+ vec_safe_push (die->die_attr, *attr);
}
static inline enum dw_val_class
@@ -4442,7 +4423,7 @@ get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
if (! die)
return NULL;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (a->dw_attr == attr_kind)
return a;
else if (a->dw_attr == DW_AT_specification
@@ -4588,16 +4569,16 @@ remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
if (! die)
return;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (a->dw_attr == attr_kind)
{
if (AT_class (a) == dw_val_class_str)
if (a->dw_attr_val.v.val_str->refcount)
a->dw_attr_val.v.val_str->refcount--;
- /* VEC_ordered_remove should help reduce the number of abbrevs
+ /* vec::ordered_remove should help reduce the number of abbrevs
that are needed. */
- VEC_ordered_remove (dw_attr_node, die->die_attr, ix);
+ die->die_attr->ordered_remove (ix);
return;
}
}
@@ -5258,7 +5239,7 @@ print_die (dw_die_ref die, FILE *outfile)
fprintf (outfile, "\n");
}
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
{
print_spaces (outfile);
fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
@@ -5517,7 +5498,7 @@ die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
CHECKSUM (die->die_tag);
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
attr_checksum (a, ctx, mark);
FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
@@ -5831,7 +5812,7 @@ collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
dw_attr_ref a;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
{
switch (a->dw_attr)
{
@@ -6268,12 +6249,11 @@ same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
if (die1->die_tag != die2->die_tag)
return 0;
- if (VEC_length (dw_attr_node, die1->die_attr)
- != VEC_length (dw_attr_node, die2->die_attr))
+ if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
return 0;
- FOR_EACH_VEC_ELT (dw_attr_node, die1->die_attr, ix, a1)
- if (!same_attr_p (a1, &VEC_index (dw_attr_node, die2->die_attr, ix), mark))
+ FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
+ if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
return 0;
c1 = die1->die_child;
@@ -6668,7 +6648,7 @@ is_declaration_die (dw_die_ref die)
dw_attr_ref a;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (a->dw_attr == DW_AT_declaration)
return 1;
@@ -6755,7 +6735,7 @@ clone_die (dw_die_ref die)
clone = ggc_alloc_cleared_die_node ();
clone->die_tag = die->die_tag;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
add_dwarf_attr (clone, a);
return clone;
@@ -6801,7 +6781,7 @@ clone_as_declaration (dw_die_ref die)
clone = ggc_alloc_cleared_die_node ();
clone->die_tag = die->die_tag;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
{
/* We don't want to copy over all attributes.
For example we don't want DW_AT_byte_size because otherwise we will no
@@ -6865,7 +6845,7 @@ copy_declaration_context (dw_die_ref unit, dw_die_ref die)
remove_AT (die, DW_AT_specification);
- FOR_EACH_VEC_ELT (dw_attr_node, decl->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
{
if (a->dw_attr != DW_AT_name
&& a->dw_attr != DW_AT_declaration
@@ -7205,7 +7185,7 @@ copy_decls_walk (dw_die_ref unit, dw_die_ref die, htab_t decl_table)
dw_attr_ref a;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
{
if (AT_class (a) == dw_val_class_die_ref)
{
@@ -7326,7 +7306,7 @@ output_location_lists (dw_die_ref die)
dw_attr_ref a;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (AT_class (a) == dw_val_class_loc_list)
output_loc_list (AT_loc_list (a));
@@ -7411,7 +7391,7 @@ optimize_external_refs_1 (dw_die_ref die, htab_t map)
/* Scan the DIE references, and remember any that refer to DIEs from
other CUs (i.e. those which are not marked). */
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (AT_class (a) == dw_val_class_die_ref
&& (c = AT_ref (a))->die_mark == 0
&& is_type_die (c))
@@ -7491,7 +7471,7 @@ build_abbrev_table (dw_die_ref die, htab_t extern_map)
/* Scan the DIE references, and replace any that refer to
DIEs from other CUs (i.e. those which are not marked) with
the local stubs we built in optimize_external_refs. */
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (AT_class (a) == dw_val_class_die_ref
&& (c = AT_ref (a))->die_mark == 0)
{
@@ -7518,13 +7498,12 @@ build_abbrev_table (dw_die_ref die, htab_t extern_map)
if ((abbrev->die_child != NULL) != (die->die_child != NULL))
continue;
- if (VEC_length (dw_attr_node, abbrev->die_attr)
- != VEC_length (dw_attr_node, die->die_attr))
+ if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
continue;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, die_a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
{
- abbrev_a = &VEC_index (dw_attr_node, abbrev->die_attr, ix);
+ abbrev_a = &(*abbrev->die_attr)[ix];
if ((abbrev_a->dw_attr != die_a->dw_attr)
|| (value_format (abbrev_a) != value_format (die_a)))
{
@@ -7587,7 +7566,7 @@ size_of_die (dw_die_ref die)
enum dwarf_form form;
size += size_of_uleb128 (die->die_abbrev);
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
{
switch (AT_class (a))
{
@@ -7764,7 +7743,7 @@ calc_base_type_die_sizes (void)
#endif
die_offset += size_of_die (comp_unit_die ());
- for (i = 0; VEC_iterate (dw_die_ref, base_types, i, base_type); i++)
+ for (i = 0; base_types.iterate (i, &base_type); i++)
{
#if ENABLE_ASSERT_CHECKING
gcc_assert (base_type->die_offset == 0
@@ -7823,7 +7802,7 @@ unmark_all_dies (dw_die_ref die)
FOR_EACH_CHILD (die, c, unmark_all_dies (c));
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (AT_class (a) == dw_val_class_die_ref)
unmark_all_dies (AT_ref (a));
}
@@ -7832,14 +7811,14 @@ unmark_all_dies (dw_die_ref die)
generated for the compilation unit. */
static unsigned long
-size_of_pubnames (VEC (pubname_entry, gc) * names)
+size_of_pubnames (vec<pubname_entry, va_gc> *names)
{
unsigned long size;
unsigned i;
pubname_ref p;
size = DWARF_PUBNAMES_HEADER_SIZE;
- FOR_EACH_VEC_ELT (pubname_entry, names, i, p)
+ FOR_EACH_VEC_ELT (*names, i, p)
if (names != pubtype_table
|| p->die->die_offset != 0
|| !flag_eliminate_unused_debug_types)
@@ -7868,7 +7847,7 @@ size_of_aranges (void)
unsigned fde_idx;
dw_fde_ref fde;
- FOR_EACH_VEC_ELT (dw_fde_ref, fde_vec, fde_idx, fde)
+ FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
{
if (!fde->in_std_section)
size += 2 * DWARF2_ADDR_SIZE;
@@ -8086,8 +8065,7 @@ output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
else
dw2_asm_output_data (1, DW_children_no, "DW_children_no");
- for (ix = 0; VEC_iterate (dw_attr_node, abbrev->die_attr, ix, a_attr);
- ix++)
+ for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
{
dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
dwarf_attr_name (a_attr->dw_attr));
@@ -8344,7 +8322,7 @@ output_die (dw_die_ref die)
(unsigned long)die->die_offset,
dwarf_tag_name (die->die_tag));
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
{
const char *name = dwarf_attr_name (a->dw_attr);
@@ -8885,7 +8863,7 @@ add_pubname_string (const char *str, dw_die_ref die)
e.die = die;
e.name = xstrdup (str);
- VEC_safe_push (pubname_entry, gc, pubname_table, e);
+ vec_safe_push (pubname_table, e);
}
static void
@@ -8919,7 +8897,7 @@ add_enumerator_pubname (const char *scope_name, dw_die_ref die)
gcc_assert (scope_name);
e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
e.die = die;
- VEC_safe_push (pubname_entry, gc, pubname_table, e);
+ vec_safe_push (pubname_table, e);
}
/* Add a new entry to .debug_pubtypes if appropriate. */
@@ -8962,7 +8940,7 @@ add_pubtype (tree decl, dw_die_ref die)
{
e.die = die;
e.name = concat (scope_name, name, NULL);
- VEC_safe_push (pubname_entry, gc, pubtype_table, e);
+ vec_safe_push (pubtype_table, e);
}
/* Although it might be more consistent to add the pubinfo for the
@@ -8984,7 +8962,7 @@ add_pubtype (tree decl, dw_die_ref die)
visible names; or the public types table used to find type definitions. */
static void
-output_pubnames (VEC (pubname_entry, gc) * names)
+output_pubnames (vec<pubname_entry, va_gc> *names)
{
unsigned i;
unsigned long pubnames_length = size_of_pubnames (names);
@@ -9018,7 +8996,7 @@ output_pubnames (VEC (pubname_entry, gc) * names)
dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
"Compilation Unit Length");
- FOR_EACH_VEC_ELT (pubname_entry, names, i, pub)
+ FOR_EACH_VEC_ELT (*names, i, pub)
{
/* Enumerator names are part of the pubname table, but the parent
DW_TAG_enumeration_type die may have been pruned. Don't output
@@ -9121,7 +9099,7 @@ output_aranges (unsigned long aranges_length)
unsigned fde_idx;
dw_fde_ref fde;
- FOR_EACH_VEC_ELT (dw_fde_ref, fde_vec, fde_idx, fde)
+ FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
{
if (DECL_IGNORED_P (fde->decl))
continue;
@@ -9642,7 +9620,7 @@ output_one_line_info_table (dw_line_info_table *table)
dw_line_info_entry *ent;
size_t i;
- FOR_EACH_VEC_ELT (dw_line_info_entry, table->entries, i, ent)
+ FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
{
switch (ent->opcode)
{
@@ -9828,7 +9806,7 @@ output_line_info (bool prologue_only)
dw_line_info_table *table;
size_t i;
- FOR_EACH_VEC_ELT (dw_line_info_table_p, separate_line_info, i, table)
+ FOR_EACH_VEC_ELT (*separate_line_info, i, table)
if (table->in_use)
{
output_one_line_info_table (table);
@@ -12139,7 +12117,7 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode,
symref:
mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
+ vec_safe_push (used_rtx_array, rtl);
break;
case CONCAT:
@@ -13044,7 +13022,7 @@ loc_descriptor (rtx rtl, enum machine_mode mode,
{
loc_result = new_addr_loc_descr (rtl, dtprel_false);
add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
+ vec_safe_push (used_rtx_array, rtl);
}
break;
@@ -14760,7 +14738,7 @@ add_const_value_attribute (dw_die_ref die, rtx rtl)
loc_result = new_addr_loc_descr (rtl, dtprel_false);
add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
add_AT_loc (die, DW_AT_location, loc_result);
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
+ vec_safe_push (used_rtx_array, rtl);
return true;
}
return false;
@@ -14913,7 +14891,7 @@ rtl_for_decl_init (tree init, tree type)
case CONSTRUCTOR:
if (TREE_CONSTANT (init))
{
- VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (init);
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
bool constant_p = true;
tree value;
unsigned HOST_WIDE_INT ix;
@@ -15334,7 +15312,7 @@ defer_location (tree variable, dw_die_ref die)
deferred_locations entry;
entry.variable = variable;
entry.die = die;
- VEC_safe_push (deferred_locations, gc, deferred_locations_list, entry);
+ vec_safe_push (deferred_locations_list, entry);
}
/* Helper function for tree_add_const_value_attribute. Natively encode
@@ -15395,7 +15373,7 @@ native_encode_initializer (tree init, unsigned char *array, int size)
min_index = tree_low_cst (TYPE_MIN_VALUE (TYPE_DOMAIN (type)), 0);
memset (array, '\0', size);
- FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (init), cnt, ce)
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
{
tree val = ce->value;
tree index = ce->index;
@@ -15441,7 +15419,7 @@ native_encode_initializer (tree init, unsigned char *array, int size)
if (TREE_CODE (type) == RECORD_TYPE)
field = TYPE_FIELDS (type);
- FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (init), cnt, ce)
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
{
tree val = ce->value;
int pos, fieldsize;
@@ -15581,7 +15559,7 @@ convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
/* ??? Bald assumption that the CIE opcode list does not contain
advance opcodes. */
- FOR_EACH_VEC_ELT (dw_cfi_ref, cie_cfi_vec, ix, cfi)
+ FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
lookup_cfa_1 (cfi, &next_cfa, &remember);
last_cfa = next_cfa;
@@ -15597,7 +15575,7 @@ convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
start_label = last_label = fde->dw_fde_second_begin;
}
- FOR_EACH_VEC_ELT (dw_cfi_ref, fde->dw_fde_cfi, ix, cfi)
+ FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
{
switch (cfi->dw_cfi_opc)
{
@@ -16278,7 +16256,7 @@ add_name_and_src_coords_attributes (dw_die_ref die, tree decl)
{
add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
XEXP (DECL_RTL (decl), 0), false);
- VEC_safe_push (rtx, gc, used_rtx_array, XEXP (DECL_RTL (decl), 0));
+ vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
}
#endif /* VMS_DEBUGGING_INFO */
}
@@ -16320,7 +16298,7 @@ dwarf2out_vms_debug_main_pointer (void)
static void
push_decl_scope (tree scope)
{
- VEC_safe_push (tree, gc, decl_scope_table, scope);
+ vec_safe_push (decl_scope_table, scope);
}
/* Pop a declaration scope. */
@@ -16328,7 +16306,7 @@ push_decl_scope (tree scope)
static inline void
pop_decl_scope (void)
{
- VEC_pop (tree, decl_scope_table);
+ decl_scope_table->pop ();
}
/* walk_tree helper function for uses_local_type, below. */
@@ -16901,10 +16879,9 @@ retry_incomplete_types (void)
{
int i;
- for (i = VEC_length (tree, incomplete_types) - 1; i >= 0; i--)
- if (should_emit_struct_debug (VEC_index (tree, incomplete_types, i),
- DINFO_USAGE_DIR_USE))
- gen_type_die (VEC_index (tree, incomplete_types, i), comp_unit_die ());
+ for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
+ if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
+ gen_type_die ((*incomplete_types)[i], comp_unit_die ());
}
/* Determine what tag to use for a record type. */
@@ -17528,8 +17505,8 @@ gen_call_site_die (tree decl, dw_die_ref subr_die,
&& block != DECL_INITIAL (decl)
&& TREE_CODE (block) == BLOCK)
{
- if (VEC_length (dw_die_ref, block_map) > BLOCK_NUMBER (block))
- stmt_die = VEC_index (dw_die_ref, block_map, BLOCK_NUMBER (block));
+ if (block_map.length () > BLOCK_NUMBER (block))
+ stmt_die = block_map[BLOCK_NUMBER (block)];
if (stmt_die)
break;
block = BLOCK_SUPERCONTEXT (block);
@@ -18613,10 +18590,9 @@ gen_lexical_block_die (tree stmt, dw_die_ref context_die, int depth)
if (call_arg_locations)
{
- if (VEC_length (dw_die_ref, block_map) <= BLOCK_NUMBER (stmt))
- VEC_safe_grow_cleared (dw_die_ref, heap, block_map,
- BLOCK_NUMBER (stmt) + 1);
- VEC_replace (dw_die_ref, block_map, BLOCK_NUMBER (stmt), stmt_die);
+ if (block_map.length () <= BLOCK_NUMBER (stmt))
+ block_map.safe_grow_cleared (BLOCK_NUMBER (stmt) + 1);
+ block_map[BLOCK_NUMBER (stmt)] = stmt_die;
}
if (! BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
@@ -18651,10 +18627,9 @@ gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die, int depth)
if (call_arg_locations)
{
- if (VEC_length (dw_die_ref, block_map) <= BLOCK_NUMBER (stmt))
- VEC_safe_grow_cleared (dw_die_ref, heap, block_map,
- BLOCK_NUMBER (stmt) + 1);
- VEC_replace (dw_die_ref, block_map, BLOCK_NUMBER (stmt), subr_die);
+ if (block_map.length () <= BLOCK_NUMBER (stmt))
+ block_map.safe_grow_cleared (BLOCK_NUMBER (stmt) + 1);
+ block_map[BLOCK_NUMBER (stmt)] = subr_die;
}
add_abstract_origin_attribute (subr_die, decl);
if (TREE_ASM_WRITTEN (stmt))
@@ -18755,8 +18730,6 @@ gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
}
typedef const char *dchar_p; /* For DEF_VEC_P. */
-DEF_VEC_P(dchar_p);
-DEF_VEC_ALLOC_P(dchar_p,heap);
static char *producer_string;
@@ -18767,7 +18740,7 @@ static char *
gen_producer_string (void)
{
size_t j;
- VEC(dchar_p, heap) *switches = NULL;
+ vec<dchar_p> switches = vec<dchar_p>();
const char *language_string = lang_hooks.name;
char *producer, *tail;
const char *p;
@@ -18828,8 +18801,7 @@ gen_producer_string (void)
default:
break;
}
- VEC_safe_push (dchar_p, heap, switches,
- save_decoded_options[j].orig_option_with_args_text);
+ switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
break;
}
@@ -18839,7 +18811,7 @@ gen_producer_string (void)
sprintf (tail, "%s %s", language_string, version_string);
tail += plen;
- FOR_EACH_VEC_ELT (dchar_p, switches, j, p)
+ FOR_EACH_VEC_ELT (switches, j, p)
{
len = strlen (p);
*tail = ' ';
@@ -18848,7 +18820,7 @@ gen_producer_string (void)
}
*tail = '\0';
- VEC_free (dchar_p, heap, switches);
+ switches.release ();
return producer;
}
@@ -18881,7 +18853,7 @@ gen_compile_unit_die (const char *filename)
tree t;
const char *common_lang = NULL;
- FOR_EACH_VEC_ELT (tree, all_translation_units, i, t)
+ FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
{
if (!TRANSLATION_UNIT_LANGUAGE (t))
continue;
@@ -18890,7 +18862,7 @@ gen_compile_unit_die (const char *filename)
else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
;
else if (strncmp (common_lang, "GNU C", 5) == 0
- && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
+ && strncmp(TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
/* Mixing C and C++ is ok, use C++ in that case. */
common_lang = "GNU C++";
else
@@ -19005,14 +18977,14 @@ gen_member_die (tree type, dw_die_ref context_die)
/* First output info about the base classes. */
if (binfo)
{
- VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (binfo);
+ vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
int i;
tree base;
for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
gen_inheritance_die (base,
- (accesses ? VEC_index (tree, accesses, i)
- : access_public_node), context_die);
+ (accesses ? (*accesses)[i] : access_public_node),
+ context_die);
}
/* Now output info about the data members and type members. */
@@ -19138,7 +19110,7 @@ gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
/* We don't need to do this for function-local types. */
if (TYPE_STUB_DECL (type)
&& ! decl_function_context (TYPE_STUB_DECL (type)))
- VEC_safe_push (tree, gc, incomplete_types, type);
+ vec_safe_push (incomplete_types, type);
}
if (get_AT (type_die, DW_AT_name))
@@ -20386,7 +20358,7 @@ dwarf2out_function_decl (tree decl)
call_arg_loc_last = NULL;
call_site_count = -1;
tail_call_site_count = -1;
- VEC_free (dw_die_ref, heap, block_map);
+ block_map.release ();
htab_empty (decl_loc_table);
htab_empty (cached_dw_loc_list_table);
}
@@ -20537,14 +20509,11 @@ append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
return;
if (!tmpl_value_parm_die_table)
- tmpl_value_parm_die_table
- = VEC_alloc (die_arg_entry, gc, 32);
+ vec_alloc (tmpl_value_parm_die_table, 32);
entry.die = die;
entry.arg = arg;
- VEC_safe_push (die_arg_entry, gc,
- tmpl_value_parm_die_table,
- entry);
+ vec_safe_push (tmpl_value_parm_die_table, entry);
}
/* Return TRUE if T is an instance of generic type, FALSE
@@ -20568,10 +20537,10 @@ schedule_generic_params_dies_gen (tree t)
if (!generic_type_p (t))
return;
- if (generic_type_instances == NULL)
- generic_type_instances = VEC_alloc (tree, gc, 256);
+ if (!generic_type_instances)
+ vec_alloc (generic_type_instances, 256);
- VEC_safe_push (tree, gc, generic_type_instances, t);
+ vec_safe_push (generic_type_instances, t);
}
/* Add a DW_AT_const_value attribute to DIEs that were scheduled
@@ -20586,7 +20555,7 @@ gen_remaining_tmpl_value_param_die_attribute (void)
unsigned i;
die_arg_entry *e;
- FOR_EACH_VEC_ELT (die_arg_entry, tmpl_value_parm_die_table, i, e)
+ FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
tree_add_const_value_attribute (e->die, e->arg);
}
}
@@ -20602,10 +20571,10 @@ gen_scheduled_generic_parms_dies (void)
unsigned i;
tree t;
- if (generic_type_instances == NULL)
+ if (!generic_type_instances)
return;
- FOR_EACH_VEC_ELT (tree, generic_type_instances, i, t)
+ FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
gen_generic_params_dies (t);
}
@@ -20877,7 +20846,7 @@ set_cur_line_info_table (section *sec)
table = new_line_info_table ();
table->end_label = end_label;
- VEC_safe_push (dw_line_info_table_p, gc, separate_line_info, table);
+ vec_safe_push (separate_line_info, table);
}
if (DWARF2_ASM_LINE_DEBUG_INFO)
@@ -20926,7 +20895,7 @@ push_dw_line_info_entry (dw_line_info_table *table,
dw_line_info_entry e;
e.opcode = opcode;
e.val = val;
- VEC_safe_push (dw_line_info_entry, gc, table->entries, e);
+ vec_safe_push (table->entries, e);
}
/* Output a label to mark the beginning of a source code line entry
@@ -21046,7 +21015,7 @@ dwarf2out_start_source_file (unsigned int lineno, const char *filename)
e.code = DW_MACINFO_start_file;
e.lineno = lineno;
e.info = ggc_strdup (filename);
- VEC_safe_push (macinfo_entry, gc, macinfo_table, e);
+ vec_safe_push (macinfo_table, e);
}
}
@@ -21065,7 +21034,7 @@ dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
e.code = DW_MACINFO_end_file;
e.lineno = lineno;
e.info = NULL;
- VEC_safe_push (macinfo_entry, gc, macinfo_table, e);
+ vec_safe_push (macinfo_table, e);
}
}
@@ -21082,17 +21051,17 @@ dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
macinfo_entry e;
/* Insert a dummy first entry to be able to optimize the whole
predefined macro block using DW_MACRO_GNU_transparent_include. */
- if (VEC_empty (macinfo_entry, macinfo_table) && lineno <= 1)
+ if (macinfo_table->is_empty () && lineno <= 1)
{
e.code = 0;
e.lineno = 0;
e.info = NULL;
- VEC_safe_push (macinfo_entry, gc, macinfo_table, e);
+ vec_safe_push (macinfo_table, e);
}
e.code = DW_MACINFO_define;
e.lineno = lineno;
e.info = ggc_strdup (buffer);
- VEC_safe_push (macinfo_entry, gc, macinfo_table, e);
+ vec_safe_push (macinfo_table, e);
}
}
@@ -21109,17 +21078,17 @@ dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
macinfo_entry e;
/* Insert a dummy first entry to be able to optimize the whole
predefined macro block using DW_MACRO_GNU_transparent_include. */
- if (VEC_empty (macinfo_entry, macinfo_table) && lineno <= 1)
+ if (macinfo_table->is_empty () && lineno <= 1)
{
e.code = 0;
e.lineno = 0;
e.info = NULL;
- VEC_safe_push (macinfo_entry, gc, macinfo_table, e);
+ vec_safe_push (macinfo_table, e);
}
e.code = DW_MACINFO_undef;
e.lineno = lineno;
e.info = ggc_strdup (buffer);
- VEC_safe_push (macinfo_entry, gc, macinfo_table, e);
+ vec_safe_push (macinfo_table, e);
}
}
@@ -21230,7 +21199,7 @@ output_macinfo_op (macinfo_entry *ref)
If the define/undef entry should be emitted normally, return 0. */
static unsigned
-optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
+optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
htab_t *macinfo_htab)
{
macinfo_entry *first, *second, *cur, *inc;
@@ -21242,8 +21211,8 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
unsigned int i, count, encoded_filename_len, linebuf_len;
void **slot;
- first = &VEC_index (macinfo_entry, macinfo_table, idx);
- second = &VEC_index (macinfo_entry, macinfo_table, idx + 1);
+ first = &(*macinfo_table)[idx];
+ second = &(*macinfo_table)[idx + 1];
/* Optimize only if there are at least two consecutive define/undef ops,
and either all of them are before first DW_MACINFO_start_file
@@ -21251,7 +21220,7 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
in some included header file. */
if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
return 0;
- if (VEC_empty (macinfo_entry, files))
+ if (vec_safe_is_empty (files))
{
if (first->lineno > 1 || second->lineno > 1)
return 0;
@@ -21263,10 +21232,10 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
with first and at the same time compute md5 checksum of their
codes, linenumbers and strings. */
md5_init_ctx (&ctx);
- for (i = idx; VEC_iterate (macinfo_entry, macinfo_table, i, cur); i++)
+ for (i = idx; macinfo_table->iterate (i, &cur); i++)
if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
break;
- else if (VEC_empty (macinfo_entry, files) && cur->lineno > 1)
+ else if (vec_safe_is_empty (files) && cur->lineno > 1)
break;
else
{
@@ -21280,10 +21249,10 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
/* From the containing include filename (if any) pick up just
usable characters from its basename. */
- if (VEC_empty (macinfo_entry, files))
+ if (vec_safe_is_empty (files))
base = "";
else
- base = lbasename (VEC_last (macinfo_entry, files).info);
+ base = lbasename (files->last ().info);
for (encoded_filename_len = 0, i = 0; base[i]; i++)
if (ISIDNUM (base[i]) || base[i] == '.')
encoded_filename_len++;
@@ -21314,7 +21283,7 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
/* Construct a macinfo_entry for DW_MACRO_GNU_transparent_include
in the empty vector entry before the first define/undef. */
- inc = &VEC_index (macinfo_entry, macinfo_table, idx - 1);
+ inc = &(*macinfo_table)[idx - 1];
inc->code = DW_MACRO_GNU_transparent_include;
inc->lineno = 0;
inc->info = ggc_strdup (grp_name);
@@ -21332,10 +21301,7 @@ optimize_macinfo_range (unsigned int idx, VEC (macinfo_entry, gc) *files,
output_macinfo_op (inc);
/* And clear all macinfo_entry in the range to avoid emitting them
in the second pass. */
- for (i = idx;
- VEC_iterate (macinfo_entry, macinfo_table, i, cur)
- && i < idx + count;
- i++)
+ for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
{
cur->code = 0;
cur->info = NULL;
@@ -21361,7 +21327,7 @@ save_macinfo_strings (void)
unsigned i;
macinfo_entry *ref;
- for (i = 0; VEC_iterate (macinfo_entry, macinfo_table, i, ref); i++)
+ for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
{
switch (ref->code)
{
@@ -21392,9 +21358,9 @@ static void
output_macinfo (void)
{
unsigned i;
- unsigned long length = VEC_length (macinfo_entry, macinfo_table);
+ unsigned long length = vec_safe_length (macinfo_table);
macinfo_entry *ref;
- VEC (macinfo_entry, gc) *files = NULL;
+ vec<macinfo_entry, va_gc> *files = NULL;
htab_t macinfo_htab = NULL;
if (! length)
@@ -21427,25 +21393,25 @@ output_macinfo (void)
DW_MACRO_GNU_transparent_include op is emitted and kept in
the vector before the first define/undef in the range and the
whole range of define/undef ops is not emitted and kept. */
- for (i = 0; VEC_iterate (macinfo_entry, macinfo_table, i, ref); i++)
+ for (i = 0; macinfo_table->iterate (i, &ref); i++)
{
switch (ref->code)
{
case DW_MACINFO_start_file:
- VEC_safe_push (macinfo_entry, gc, files, *ref);
+ vec_safe_push (files, *ref);
break;
case DW_MACINFO_end_file:
- if (!VEC_empty (macinfo_entry, files))
- VEC_pop (macinfo_entry, files);
+ if (!vec_safe_is_empty (files))
+ files->pop ();
break;
case DW_MACINFO_define:
case DW_MACINFO_undef:
if (!dwarf_strict
&& HAVE_COMDAT_GROUP
- && VEC_length (macinfo_entry, files) != 1
+ && vec_safe_length (files) != 1
&& i > 0
&& i + 1 < length
- && VEC_index (macinfo_entry, macinfo_table, i - 1).code == 0)
+ && (*macinfo_table)[i - 1].code == 0)
{
unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
if (count)
@@ -21477,7 +21443,7 @@ output_macinfo (void)
DW_MACRO_GNU_transparent_include entries terminate the
current chain and switch to a new comdat .debug_macinfo
section and emit the define/undef entries within it. */
- for (i = 0; VEC_iterate (macinfo_entry, macinfo_table, i, ref); i++)
+ for (i = 0; macinfo_table->iterate (i, &ref); i++)
switch (ref->code)
{
case 0:
@@ -21539,7 +21505,7 @@ dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
cached_dw_loc_list_table_eq, NULL);
/* Allocate the initial hunk of the decl_scope_table. */
- decl_scope_table = VEC_alloc (tree, gc, 256);
+ vec_alloc (decl_scope_table, 256);
/* Allocate the initial hunk of the abbrev_die_table. */
abbrev_die_table = ggc_alloc_cleared_vec_dw_die_ref
@@ -21549,12 +21515,12 @@ dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
abbrev_die_table_in_use = 1;
/* Allocate the pubtypes and pubnames vectors. */
- pubname_table = VEC_alloc (pubname_entry, gc, 32);
- pubtype_table = VEC_alloc (pubname_entry, gc, 32);
+ vec_alloc (pubname_table, 32);
+ vec_alloc (pubtype_table, 32);
- incomplete_types = VEC_alloc (tree, gc, 64);
+ vec_alloc (incomplete_types, 64);
- used_rtx_array = VEC_alloc (rtx, gc, 32);
+ vec_alloc (used_rtx_array, 32);
if (!dwarf_split_debug_info)
{
@@ -21638,7 +21604,7 @@ dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL, 0);
if (debug_info_level >= DINFO_LEVEL_VERBOSE)
- macinfo_table = VEC_alloc (macinfo_entry, gc, 64);
+ vec_alloc (macinfo_table, 64);
switch_to_section (text_section);
ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
@@ -21859,7 +21825,7 @@ prune_unused_types_walk_attribs (dw_die_ref die)
dw_attr_ref a;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
{
if (a->dw_attr_val.val_class == dw_val_class_die_ref)
{
@@ -22073,7 +22039,7 @@ prune_unused_types_update_strings (dw_die_ref die)
dw_attr_ref a;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (AT_class (a) == dw_val_class_str)
{
struct indirect_string_node *s = a->dw_attr_val.v.val_str;
@@ -22180,10 +22146,10 @@ prune_unused_types (void)
are unusual in that they are pubnames that are the children of pubtypes.
They should only be marked via their parent DW_TAG_enumeration_type die,
not as roots in themselves. */
- FOR_EACH_VEC_ELT (pubname_entry, pubname_table, i, pub)
+ FOR_EACH_VEC_ELT (*pubname_table, i, pub)
if (pub->die->die_tag != DW_TAG_enumerator)
prune_unused_types_mark (pub->die, 1);
- for (i = 0; VEC_iterate (dw_die_ref, base_types, i, base_type); i++)
+ for (i = 0; base_types.iterate (i, &base_type); i++)
prune_unused_types_mark (base_type, 1);
if (debug_str_hash)
@@ -22249,24 +22215,24 @@ htab_ct_eq (const void *of1, const void *of2)
static inline void
move_linkage_attr (dw_die_ref die)
{
- unsigned ix = VEC_length (dw_attr_node, die->die_attr);
- dw_attr_node linkage = VEC_index (dw_attr_node, die->die_attr, ix - 1);
+ unsigned ix = vec_safe_length (die->die_attr);
+ dw_attr_node linkage = (*die->die_attr)[ix - 1];
gcc_assert (linkage.dw_attr == DW_AT_linkage_name
|| linkage.dw_attr == DW_AT_MIPS_linkage_name);
while (--ix > 0)
{
- dw_attr_node *prev = &VEC_index (dw_attr_node, die->die_attr, ix - 1);
+ dw_attr_node *prev = &(*die->die_attr)[ix - 1];
if (prev->dw_attr == DW_AT_decl_line || prev->dw_attr == DW_AT_name)
break;
}
- if (ix != VEC_length (dw_attr_node, die->die_attr) - 1)
+ if (ix != vec_safe_length (die->die_attr) - 1)
{
- VEC_pop (dw_attr_node, die->die_attr);
- VEC_quick_insert (dw_attr_node, die->die_attr, ix, linkage);
+ die->die_attr->pop ();
+ die->die_attr->quick_insert (ix, linkage);
}
}
@@ -22305,7 +22271,7 @@ mark_base_types (dw_loc_descr_ref loc)
base_type->die_mark++;
else
{
- VEC_safe_push (dw_die_ref, heap, base_types, base_type);
+ base_types.safe_push (base_type);
base_type->die_mark = 1;
}
}
@@ -22351,12 +22317,12 @@ move_marked_base_types (void)
unsigned int i;
dw_die_ref base_type, die, c;
- if (VEC_empty (dw_die_ref, base_types))
+ if (base_types.is_empty ())
return;
/* Sort by decreasing usage count, they will be added again in that
order later on. */
- VEC_qsort (dw_die_ref, base_types, base_type_cmp);
+ base_types.qsort (base_type_cmp);
die = comp_unit_die ();
c = die->die_child;
do
@@ -22375,7 +22341,7 @@ move_marked_base_types (void)
while (c != die->die_child);
gcc_assert (die->die_child);
c = die->die_child;
- for (i = 0; VEC_iterate (dw_die_ref, base_types, i, base_type); i++)
+ for (i = 0; base_types.iterate (i, &base_type); i++)
{
base_type->die_mark = 0;
base_type->die_sib = c->die_sib;
@@ -22404,7 +22370,7 @@ resolve_one_addr (rtx *addr, void *data ATTRIBUTE_UNUSED)
if (!rtl || !MEM_P (rtl))
return 1;
rtl = XEXP (rtl, 0);
- VEC_safe_push (rtx, gc, used_rtx_array, rtl);
+ vec_safe_push (used_rtx_array, rtl);
*addr = rtl;
return 0;
}
@@ -22562,7 +22528,7 @@ resolve_addr (dw_die_ref die)
dw_loc_list_ref *curr, *start, loc;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
switch (AT_class (a))
{
case dw_val_class_loc_list:
@@ -23144,7 +23110,7 @@ optimize_location_lists_1 (dw_die_ref die, htab_t htab)
unsigned ix;
void **slot;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (AT_class (a) == dw_val_class_loc_list)
{
dw_loc_list_ref list = AT_loc_list (a);
@@ -23173,7 +23139,7 @@ index_location_lists (dw_die_ref die)
dw_attr_ref a;
unsigned ix;
- FOR_EACH_VEC_ELT (dw_attr_node, die->die_attr, ix, a)
+ FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
if (AT_class (a) == dw_val_class_loc_list)
{
dw_loc_list_ref list = AT_loc_list (a);
@@ -23242,14 +23208,15 @@ dwarf2out_finish (const char *filename)
add_comp_dir_attribute (comp_unit_die ());
}
- for (i = 0; i < VEC_length (deferred_locations, deferred_locations_list); i++)
- {
- add_location_or_const_value_attribute (
- VEC_index (deferred_locations, deferred_locations_list, i).die,
- VEC_index (deferred_locations, deferred_locations_list, i).variable,
- false,
- DW_AT_location);
- }
+ if (deferred_locations_list)
+ for (i = 0; i < deferred_locations_list->length (); i++)
+ {
+ add_location_or_const_value_attribute (
+ (*deferred_locations_list)[i].die,
+ (*deferred_locations_list)[i].variable,
+ false,
+ DW_AT_location);
+ }
/* Traverse the limbo die list, and add parent/child links. The only
dies without parents that should be here are concrete instances of
@@ -23408,7 +23375,7 @@ dwarf2out_finish (const char *filename)
add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
cold_end_label, &range_list_added, true);
- FOR_EACH_VEC_ELT (dw_fde_ref, fde_vec, fde_idx, fde)
+ FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
{
if (DECL_IGNORED_P (fde->decl))
continue;
diff --git a/gcc/dwarf2out.h b/gcc/dwarf2out.h
index 7fa62581177..8027c1e788f 100644
--- a/gcc/dwarf2out.h
+++ b/gcc/dwarf2out.h
@@ -62,11 +62,8 @@ typedef struct GTY(()) dw_cfi_struct {
}
dw_cfi_node;
-DEF_VEC_P (dw_cfi_ref);
-DEF_VEC_ALLOC_P (dw_cfi_ref, heap);
-DEF_VEC_ALLOC_P (dw_cfi_ref, gc);
-typedef VEC(dw_cfi_ref, gc) *cfi_vec;
+typedef vec<dw_cfi_ref, va_gc> *cfi_vec;
typedef struct dw_fde_struct *dw_fde_ref;
diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c
index f39d8616069..aac9b671f34 100644
--- a/gcc/emit-rtl.c
+++ b/gcc/emit-rtl.c
@@ -42,7 +42,6 @@ along with GCC; see the file COPYING3. If not see
#include "flags.h"
#include "function.h"
#include "expr.h"
-#include "vecprim.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "hashtab.h"
diff --git a/gcc/except.c b/gcc/except.c
index 6256b8dec4b..c0ac835062b 100644
--- a/gcc/except.c
+++ b/gcc/except.c
@@ -189,8 +189,8 @@ static int add_action_record (htab_t, int, int);
static int collect_one_action_chain (htab_t, eh_region);
static int add_call_site (rtx, int, int);
-static void push_uleb128 (VEC (uchar, gc) **, unsigned int);
-static void push_sleb128 (VEC (uchar, gc) **, int);
+static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
+static void push_sleb128 (vec<uchar, va_gc> **, int);
#ifndef HAVE_AS_LEB128
static int dw2_size_of_call_site_table (int);
static int sjlj_size_of_call_site_table (void);
@@ -304,8 +304,8 @@ init_eh_for_function (void)
cfun->eh = ggc_alloc_cleared_eh_status ();
/* Make sure zero'th entries are used. */
- VEC_safe_push (eh_region, gc, cfun->eh->region_array, NULL);
- VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, NULL);
+ vec_safe_push (cfun->eh->region_array, (eh_region)0);
+ vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
}
/* Routines to generate the exception tree somewhat directly.
@@ -332,8 +332,8 @@ gen_eh_region (enum eh_region_type type, eh_region outer)
cfun->eh->region_tree = new_eh;
}
- new_eh->index = VEC_length (eh_region, cfun->eh->region_array);
- VEC_safe_push (eh_region, gc, cfun->eh->region_array, new_eh);
+ new_eh->index = vec_safe_length (cfun->eh->region_array);
+ vec_safe_push (cfun->eh->region_array, new_eh);
/* Copy the language's notion of whether to use __cxa_end_cleanup. */
if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
@@ -413,10 +413,10 @@ gen_eh_landing_pad (eh_region region)
lp->next_lp = region->landing_pads;
lp->region = region;
- lp->index = VEC_length (eh_landing_pad, cfun->eh->lp_array);
+ lp->index = vec_safe_length (cfun->eh->lp_array);
region->landing_pads = lp;
- VEC_safe_push (eh_landing_pad, gc, cfun->eh->lp_array, lp);
+ vec_safe_push (cfun->eh->lp_array, lp);
return lp;
}
@@ -424,7 +424,7 @@ gen_eh_landing_pad (eh_region region)
eh_region
get_eh_region_from_number_fn (struct function *ifun, int i)
{
- return VEC_index (eh_region, ifun->eh->region_array, i);
+ return (*ifun->eh->region_array)[i];
}
eh_region
@@ -436,7 +436,7 @@ get_eh_region_from_number (int i)
eh_landing_pad
get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
{
- return VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
+ return (*ifun->eh->lp_array)[i];
}
eh_landing_pad
@@ -449,13 +449,13 @@ eh_region
get_eh_region_from_lp_number_fn (struct function *ifun, int i)
{
if (i < 0)
- return VEC_index (eh_region, ifun->eh->region_array, -i);
+ return (*ifun->eh->region_array)[-i];
else if (i == 0)
return NULL;
else
{
eh_landing_pad lp;
- lp = VEC_index (eh_landing_pad, ifun->eh->lp_array, i);
+ lp = (*ifun->eh->lp_array)[i];
return lp->region;
}
}
@@ -609,7 +609,7 @@ eh_region_outermost (struct function *ifun, eh_region region_a,
gcc_assert (ifun->eh->region_array);
gcc_assert (ifun->eh->region_tree);
- b_outer = sbitmap_alloc (VEC_length (eh_region, ifun->eh->region_array));
+ b_outer = sbitmap_alloc (ifun->eh->region_array->length());
bitmap_clear (b_outer);
do
@@ -754,10 +754,10 @@ add_ttypes_entry (htab_t ttypes_hash, tree type)
n = XNEW (struct ttypes_filter);
n->t = type;
- n->filter = VEC_length (tree, cfun->eh->ttype_data) + 1;
+ n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
*slot = n;
- VEC_safe_push (tree, gc, cfun->eh->ttype_data, type);
+ vec_safe_push (cfun->eh->ttype_data, type);
}
return n->filter;
@@ -781,9 +781,9 @@ add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
int len;
if (targetm.arm_eabi_unwinder)
- len = VEC_length (tree, cfun->eh->ehspec_data.arm_eabi);
+ len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
else
- len = VEC_length (uchar, cfun->eh->ehspec_data.other);
+ len = vec_safe_length (cfun->eh->ehspec_data.other);
/* Filter value is a -1 based byte index into a uleb128 buffer. */
@@ -796,8 +796,7 @@ add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
for (; list ; list = TREE_CHAIN (list))
{
if (targetm.arm_eabi_unwinder)
- VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi,
- TREE_VALUE (list));
+ vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
else
{
/* Look up each type in the list and encode its filter
@@ -807,9 +806,9 @@ add_ehspec_entry (htab_t ehspec_hash, htab_t ttypes_hash, tree list)
}
}
if (targetm.arm_eabi_unwinder)
- VEC_safe_push (tree, gc, cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
+ vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
else
- VEC_safe_push (uchar, gc, cfun->eh->ehspec_data.other, 0);
+ vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
}
return n->filter;
@@ -828,16 +827,16 @@ assign_filter_values (void)
eh_region r;
eh_catch c;
- cfun->eh->ttype_data = VEC_alloc (tree, gc, 16);
+ vec_alloc (cfun->eh->ttype_data, 16);
if (targetm.arm_eabi_unwinder)
- cfun->eh->ehspec_data.arm_eabi = VEC_alloc (tree, gc, 64);
+ vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
else
- cfun->eh->ehspec_data.other = VEC_alloc (uchar, gc, 64);
+ vec_alloc (cfun->eh->ehspec_data.other, 64);
ttypes = htab_create (31, ttypes_filter_hash, ttypes_filter_eq, free);
ehspec = htab_create (31, ehspec_filter_hash, ehspec_filter_eq, free);
- for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
{
if (r == NULL)
continue;
@@ -966,7 +965,7 @@ dw2_build_landing_pads (void)
if (flag_reorder_blocks_and_partition)
e_flags |= EDGE_PRESERVE;
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
{
basic_block bb;
rtx seq;
@@ -1004,7 +1003,7 @@ dw2_build_landing_pads (void)
}
-static VEC (int, heap) *sjlj_lp_call_site_index;
+static vec<int> sjlj_lp_call_site_index;
/* Process all active landing pads. Assign each one a compact dispatch
index, and a call-site index. */
@@ -1016,12 +1015,12 @@ sjlj_assign_call_site_values (void)
int i, disp_index;
eh_landing_pad lp;
- crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
+ vec_alloc (crtl->eh.action_record_data, 64);
ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
disp_index = 0;
call_site_base = 1;
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
if (lp && lp->post_landing_pad)
{
int action, call_site;
@@ -1041,7 +1040,7 @@ sjlj_assign_call_site_values (void)
/* Otherwise, look it up in the table. */
else
call_site = add_call_site (GEN_INT (disp_index), action, 0);
- VEC_replace (int, sjlj_lp_call_site_index, i, call_site);
+ sjlj_lp_call_site_index[i] = call_site;
disp_index++;
}
@@ -1079,7 +1078,7 @@ sjlj_mark_call_sites (void)
if (nothrow)
continue;
if (lp)
- this_call_site = VEC_index (int, sjlj_lp_call_site_index, lp->index);
+ this_call_site = sjlj_lp_call_site_index[lp->index];
else if (r == NULL)
{
/* Calls (and trapping insns) without notes are outside any
@@ -1240,7 +1239,7 @@ sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
eh_region r;
edge e;
int i, disp_index;
- VEC(tree, heap) *dispatch_labels = NULL;
+ vec<tree> dispatch_labels = vec<tree>();
fc = crtl->eh.sjlj_fc;
@@ -1287,9 +1286,9 @@ sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
/* If there's exactly one call site in the function, don't bother
generating a switch statement. */
if (num_dispatch > 1)
- dispatch_labels = VEC_alloc (tree, heap, num_dispatch);
+ dispatch_labels.create (num_dispatch);
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
if (lp && lp->post_landing_pad)
{
rtx seq2, label;
@@ -1305,7 +1304,7 @@ sjlj_emit_dispatch_table (rtx dispatch_label, int num_dispatch)
t_label = create_artificial_label (UNKNOWN_LOCATION);
t = build_int_cst (integer_type_node, disp_index);
case_elt = build_case_label (t, NULL, t_label);
- VEC_quick_push (tree, dispatch_labels, case_elt);
+ dispatch_labels.quick_push (case_elt);
label = label_rtx (t_label);
}
else
@@ -1398,10 +1397,10 @@ sjlj_build_landing_pads (void)
{
int num_dispatch;
- num_dispatch = VEC_length (eh_landing_pad, cfun->eh->lp_array);
+ num_dispatch = vec_safe_length (cfun->eh->lp_array);
if (num_dispatch == 0)
return;
- VEC_safe_grow (int, heap, sjlj_lp_call_site_index, num_dispatch);
+ sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
num_dispatch = sjlj_assign_call_site_values ();
if (num_dispatch > 0)
@@ -1438,7 +1437,7 @@ sjlj_build_landing_pads (void)
sjlj_emit_function_exit ();
}
- VEC_free (int, heap, sjlj_lp_call_site_index);
+ sjlj_lp_call_site_index.release ();
}
/* After initial rtl generation, call back to finish generating
@@ -1505,7 +1504,7 @@ remove_eh_landing_pad (eh_landing_pad lp)
if (lp->post_landing_pad)
EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
- VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
+ (*cfun->eh->lp_array)[lp->index] = NULL;
}
/* Splice REGION from the region tree. */
@@ -1520,7 +1519,7 @@ remove_eh_handler (eh_region region)
{
if (lp->post_landing_pad)
EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
- VEC_replace (eh_landing_pad, cfun->eh->lp_array, lp->index, NULL);
+ (*cfun->eh->lp_array)[lp->index] = NULL;
}
outer = region->outer;
@@ -1543,7 +1542,7 @@ remove_eh_handler (eh_region region)
}
*pp = region->next_peer;
- VEC_replace (eh_region, cfun->eh->region_array, region->index, NULL);
+ (*cfun->eh->region_array)[region->index] = NULL;
}
/* Invokes CALLBACK for every exception handler landing pad label.
@@ -1555,7 +1554,7 @@ for_each_eh_label (void (*callback) (rtx))
eh_landing_pad lp;
int i;
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
{
if (lp)
{
@@ -1713,10 +1712,10 @@ get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
}
if (lp_nr < 0)
- r = VEC_index (eh_region, cfun->eh->region_array, -lp_nr);
+ r = (*cfun->eh->region_array)[-lp_nr];
else
{
- lp = VEC_index (eh_landing_pad, cfun->eh->lp_array, lp_nr);
+ lp = (*cfun->eh->lp_array)[lp_nr];
r = lp->region;
}
@@ -1950,7 +1949,7 @@ expand_builtin_eh_common (tree region_nr_t)
gcc_assert (host_integerp (region_nr_t, 0));
region_nr = tree_low_cst (region_nr_t, 0);
- region = VEC_index (eh_region, cfun->eh->region_array, region_nr);
+ region = (*cfun->eh->region_array)[region_nr];
/* ??? We shouldn't have been able to delete a eh region without
deleting all the code that depended on it. */
@@ -2247,7 +2246,7 @@ add_action_record (htab_t ar_hash, int filter, int next)
if ((new_ar = *slot) == NULL)
{
new_ar = XNEW (struct action_record);
- new_ar->offset = VEC_length (uchar, crtl->eh.action_record_data) + 1;
+ new_ar->offset = crtl->eh.action_record_data->length () + 1;
new_ar->filter = filter;
new_ar->next = next;
*slot = new_ar;
@@ -2259,7 +2258,7 @@ add_action_record (htab_t ar_hash, int filter, int next)
push_sleb128 (&crtl->eh.action_record_data, filter);
if (next)
- next -= VEC_length (uchar, crtl->eh.action_record_data) + 1;
+ next -= crtl->eh.action_record_data->length () + 1;
push_sleb128 (&crtl->eh.action_record_data, next);
}
@@ -2383,11 +2382,9 @@ add_call_site (rtx landing_pad, int action, int section)
record->landing_pad = landing_pad;
record->action = action;
- VEC_safe_push (call_site_record, gc,
- crtl->eh.call_site_record_v[section], record);
+ vec_safe_push (crtl->eh.call_site_record_v[section], record);
- return call_site_base + VEC_length (call_site_record,
- crtl->eh.call_site_record_v[section]) - 1;
+ return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
}
/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
@@ -2410,7 +2407,7 @@ convert_to_eh_region_ranges (void)
rtx last_no_action_insn_before_switch = NULL_RTX;
int saved_call_site_base = call_site_base;
- crtl->eh.action_record_data = VEC_alloc (uchar, gc, 64);
+ vec_alloc (crtl->eh.action_record_data, 64);
ar_hash = htab_create (31, action_record_hash, action_record_eq, free);
@@ -2534,12 +2531,11 @@ convert_to_eh_region_ranges (void)
opening a new one afterwards. */
else if (last_action != -3)
last_landing_pad = pc_rtx;
- call_site_base += VEC_length (call_site_record,
- crtl->eh.call_site_record_v[cur_sec]);
+ if (crtl->eh.call_site_record_v[cur_sec])
+ call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
cur_sec++;
gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
- crtl->eh.call_site_record_v[cur_sec]
- = VEC_alloc (call_site_record, gc, 10);
+ vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
}
if (last_action >= -1 && ! first_no_action_insn)
@@ -2586,7 +2582,7 @@ struct rtl_opt_pass pass_convert_to_eh_region_ranges =
};
static void
-push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
+push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
{
do
{
@@ -2594,13 +2590,13 @@ push_uleb128 (VEC (uchar, gc) **data_area, unsigned int value)
value >>= 7;
if (value)
byte |= 0x80;
- VEC_safe_push (uchar, gc, *data_area, byte);
+ vec_safe_push (*data_area, byte);
}
while (value);
}
static void
-push_sleb128 (VEC (uchar, gc) **data_area, int value)
+push_sleb128 (vec<uchar, va_gc> **data_area, int value)
{
unsigned char byte;
int more;
@@ -2613,7 +2609,7 @@ push_sleb128 (VEC (uchar, gc) **data_area, int value)
|| (value == -1 && (byte & 0x40) != 0));
if (more)
byte |= 0x80;
- VEC_safe_push (uchar, gc, *data_area, byte);
+ vec_safe_push (*data_area, byte);
}
while (more);
}
@@ -2623,14 +2619,14 @@ push_sleb128 (VEC (uchar, gc) **data_area, int value)
static int
dw2_size_of_call_site_table (int section)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[section]);
+ int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
int size = n * (4 + 4 + 4);
int i;
for (i = 0; i < n; ++i)
{
struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record_v[section], i);
+ (*crtl->eh.call_site_record_v[section])[i];
size += size_of_uleb128 (cs->action);
}
@@ -2640,14 +2636,14 @@ dw2_size_of_call_site_table (int section)
static int
sjlj_size_of_call_site_table (void)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[0]);
+ int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
int size = 0;
int i;
for (i = 0; i < n; ++i)
{
struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record_v[0], i);
+ (*crtl->eh.call_site_record_v[0])[i];
size += size_of_uleb128 (INTVAL (cs->landing_pad));
size += size_of_uleb128 (cs->action);
}
@@ -2659,7 +2655,7 @@ sjlj_size_of_call_site_table (void)
static void
dw2_output_call_site_table (int cs_format, int section)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[section]);
+ int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
int i;
const char *begin;
@@ -2672,8 +2668,7 @@ dw2_output_call_site_table (int cs_format, int section)
for (i = 0; i < n; ++i)
{
- struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record_v[section], i);
+ struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
char reg_start_lab[32];
char reg_end_lab[32];
char landing_pad_lab[32];
@@ -2721,13 +2716,12 @@ dw2_output_call_site_table (int cs_format, int section)
static void
sjlj_output_call_site_table (void)
{
- int n = VEC_length (call_site_record, crtl->eh.call_site_record_v[0]);
+ int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
int i;
for (i = 0; i < n; ++i)
{
- struct call_site_record_d *cs =
- VEC_index (call_site_record, crtl->eh.call_site_record_v[0], i);
+ struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
"region %d landing pad", i);
@@ -2856,10 +2850,10 @@ output_one_function_exception_table (int section)
int have_tt_data;
int tt_format_size = 0;
- have_tt_data = (VEC_length (tree, cfun->eh->ttype_data)
+ have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
|| (targetm.arm_eabi_unwinder
- ? VEC_length (tree, cfun->eh->ehspec_data.arm_eabi)
- : VEC_length (uchar, cfun->eh->ehspec_data.other)));
+ ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
+ : vec_safe_length (cfun->eh->ehspec_data.other)));
/* Indicate the format of the @TType entries. */
if (! have_tt_data)
@@ -2922,8 +2916,8 @@ output_one_function_exception_table (int section)
before_disp = 1 + 1;
after_disp = (1 + size_of_uleb128 (call_site_len)
+ call_site_len
- + VEC_length (uchar, crtl->eh.action_record_data)
- + (VEC_length (tree, cfun->eh->ttype_data)
+ + vec_safe_length (crtl->eh.action_record_data)
+ + (vec_safe_length (cfun->eh->ttype_data)
* tt_format_size));
disp = after_disp;
@@ -2981,17 +2975,17 @@ output_one_function_exception_table (int section)
/* ??? Decode and interpret the data for flag_debug_asm. */
{
uchar uc;
- FOR_EACH_VEC_ELT (uchar, crtl->eh.action_record_data, i, uc)
+ FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
}
if (have_tt_data)
assemble_align (tt_format_size * BITS_PER_UNIT);
- i = VEC_length (tree, cfun->eh->ttype_data);
+ i = vec_safe_length (cfun->eh->ttype_data);
while (i-- > 0)
{
- tree type = VEC_index (tree, cfun->eh->ttype_data, i);
+ tree type = (*cfun->eh->ttype_data)[i];
output_ttype (type, tt_format, tt_format_size);
}
@@ -3005,14 +2999,14 @@ output_one_function_exception_table (int section)
{
tree type;
for (i = 0;
- VEC_iterate (tree, cfun->eh->ehspec_data.arm_eabi, i, type); ++i)
+ vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
output_ttype (type, tt_format, tt_format_size);
}
else
{
uchar uc;
for (i = 0;
- VEC_iterate (uchar, cfun->eh->ehspec_data.other, i, uc); ++i)
+ vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
dw2_asm_output_data (1, uc,
i ? NULL : "Exception specification table");
}
@@ -3041,7 +3035,7 @@ output_function_exception_table (const char *fnname)
targetm.asm_out.emit_except_table_label (asm_out_file);
output_one_function_exception_table (0);
- if (crtl->eh.call_site_record_v[1] != NULL)
+ if (crtl->eh.call_site_record_v[1])
output_one_function_exception_table (1);
switch_to_section (current_function_section ());
@@ -3232,7 +3226,7 @@ verify_eh_tree (struct function *fun)
return;
count_r = 0;
- for (i = 1; VEC_iterate (eh_region, fun->eh->region_array, i, r); ++i)
+ for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
if (r)
{
if (r->index == i)
@@ -3245,7 +3239,7 @@ verify_eh_tree (struct function *fun)
}
count_lp = 0;
- for (i = 1; VEC_iterate (eh_landing_pad, fun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
if (lp)
{
if (lp->index == i)
@@ -3262,7 +3256,7 @@ verify_eh_tree (struct function *fun)
r = fun->eh->region_tree;
while (1)
{
- if (VEC_index (eh_region, fun->eh->region_array, r->index) != r)
+ if ((*fun->eh->region_array)[r->index] != r)
{
error ("region_array is corrupted for region %i", r->index);
err = true;
@@ -3281,7 +3275,7 @@ verify_eh_tree (struct function *fun)
for (lp = r->landing_pads; lp ; lp = lp->next_lp)
{
- if (VEC_index (eh_landing_pad, fun->eh->lp_array, lp->index) != lp)
+ if ((*fun->eh->lp_array)[lp->index] != lp)
{
error ("lp_array is corrupted for lp %i", lp->index);
err = true;
diff --git a/gcc/except.h b/gcc/except.h
index 5b3939e0fc4..473bbfceda8 100644
--- a/gcc/except.h
+++ b/gcc/except.h
@@ -27,8 +27,6 @@ along with GCC; see the file COPYING3. If not see
#endif
#include "hashtab.h"
-#include "vecprim.h"
-#include "vecir.h"
struct function;
struct eh_region_d;
@@ -189,12 +187,7 @@ typedef struct eh_landing_pad_d *eh_landing_pad;
typedef struct eh_catch_d *eh_catch;
typedef struct eh_region_d *eh_region;
-DEF_VEC_P(eh_region);
-DEF_VEC_ALLOC_P(eh_region, gc);
-DEF_VEC_ALLOC_P(eh_region, heap);
-DEF_VEC_P(eh_landing_pad);
-DEF_VEC_ALLOC_P(eh_landing_pad, gc);
/* The exception status for each function. */
@@ -205,10 +198,10 @@ struct GTY(()) eh_status
eh_region region_tree;
/* The same information as an indexable array. */
- VEC(eh_region,gc) *region_array;
+ vec<eh_region, va_gc> *region_array;
/* The landing pads as an indexable array. */
- VEC(eh_landing_pad,gc) *lp_array;
+ vec<eh_landing_pad, va_gc> *lp_array;
/* At the gimple level, a mapping from gimple statement to landing pad
or must-not-throw region. See record_stmt_eh_region. */
@@ -216,15 +209,15 @@ struct GTY(()) eh_status
/* All of the runtime type data used by the function. These objects
are emitted to the lang-specific-data-area for the function. */
- VEC(tree,gc) *ttype_data;
+ vec<tree, va_gc> *ttype_data;
/* The table of all action chains. These encode the eh_region tree in
a compact form for use by the runtime, and is also emitted to the
lang-specific-data-area. Note that the ARM EABI uses a different
format for the encoding than all other ports. */
union eh_status_u {
- VEC(tree,gc) * GTY((tag ("1"))) arm_eabi;
- VEC(uchar,gc) * GTY((tag ("0"))) other;
+ vec<tree, va_gc> *GTY((tag ("1"))) arm_eabi;
+ vec<uchar, va_gc> *GTY((tag ("0"))) other;
} GTY ((desc ("targetm.arm_eabi_unwinder"))) ehspec_data;
};
diff --git a/gcc/expr.c b/gcc/expr.c
index b1b83d0e1df..d1da390e282 100644
--- a/gcc/expr.c
+++ b/gcc/expr.c
@@ -5779,7 +5779,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
register whose mode size isn't equal to SIZE since
clear_storage can't handle this case. */
else if (size > 0
- && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
+ && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
!= fields_length (type))
|| mostly_zeros_p (exp))
&& (!REG_P (target)
@@ -6241,7 +6241,7 @@ store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
/* Store each element of the constructor into the corresponding
element of TARGET, determined by counting the elements. */
for (idx = 0, i = 0;
- VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
+ vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
idx++, i += bitsize / elt_size)
{
HOST_WIDE_INT eltpos;
@@ -7131,7 +7131,7 @@ safe_from_p (const_rtx x, tree exp, int top_p)
constructor_elt *ce;
unsigned HOST_WIDE_INT idx;
- FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
|| !safe_from_p (x, ce->value, 0))
return 0;
@@ -9325,9 +9325,9 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
}
if (!tmp)
{
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
unsigned i;
- v = VEC_alloc (constructor_elt, gc, VECTOR_CST_NELTS (exp));
+ vec_alloc (v, VECTOR_CST_NELTS (exp));
for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
tmp = build_constructor (type, v);
diff --git a/gcc/expr.h b/gcc/expr.h
index 0f5e8541fb0..7ad5f8192e4 100644
--- a/gcc/expr.h
+++ b/gcc/expr.h
@@ -740,6 +740,6 @@ rtx get_personality_function (tree);
extern void expand_case (gimple);
/* Like expand_case but special-case for SJLJ exception dispatching. */
-extern void expand_sjlj_dispatch_table (rtx, VEC(tree,heap) *);
+extern void expand_sjlj_dispatch_table (rtx, vec<tree> );
#endif /* GCC_EXPR_H */
diff --git a/gcc/final.c b/gcc/final.c
index fc10dd6d5a6..2bd6aebe4bc 100644
--- a/gcc/final.c
+++ b/gcc/final.c
@@ -77,7 +77,6 @@ along with GCC; see the file COPYING3. If not see
#include "cgraph.h"
#include "coverage.h"
#include "df.h"
-#include "vecprim.h"
#include "ggc.h"
#include "cfgloop.h"
#include "params.h"
@@ -309,7 +308,7 @@ dbr_sequence_length (void)
static int *insn_lengths;
-VEC(int,heap) *insn_addresses_;
+vec<int> insn_addresses_;
/* Max uid for which the above arrays are valid. */
static int insn_lengths_max_uid;
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 56e0554737e..4dcd3220390 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -9612,7 +9612,7 @@ vec_cst_ctor_to_array (tree arg, tree *elts)
{
constructor_elt *elt;
- FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (arg), i, elt)
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
return false;
else
@@ -9657,7 +9657,8 @@ fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
if (need_ctor)
{
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, nelts);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, nelts);
for (i = 0; i < nelts; i++)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
return build_constructor (type, v);
@@ -14094,15 +14095,16 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type,
}
else
{
- VEC(constructor_elt, gc) *vals;
+ vec<constructor_elt, va_gc> *vals;
unsigned i;
if (CONSTRUCTOR_NELTS (arg0) == 0)
- return build_constructor (type, NULL);
+ return build_constructor (type,
+ NULL);
if (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (arg0,
0)->value))
!= VECTOR_TYPE)
{
- vals = VEC_alloc (constructor_elt, gc, n);
+ vec_alloc (vals, n);
for (i = 0;
i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
++i)
@@ -14347,15 +14349,15 @@ fold (tree expr)
&& TREE_CODE (op0) == CONSTRUCTOR
&& ! type_contains_placeholder_p (TREE_TYPE (op0)))
{
- VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
- unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
+ unsigned HOST_WIDE_INT end = vec_safe_length (elts);
unsigned HOST_WIDE_INT begin = 0;
/* Find a matching index by means of a binary search. */
while (begin != end)
{
unsigned HOST_WIDE_INT middle = (begin + end) / 2;
- tree index = VEC_index (constructor_elt, elts, middle).index;
+ tree index = (*elts)[middle].index;
if (TREE_CODE (index) == INTEGER_CST
&& tree_int_cst_lt (index, op1))
@@ -14370,7 +14372,7 @@ fold (tree expr)
&& tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
end = middle;
else
- return VEC_index (constructor_elt, elts, middle).value;
+ return (*elts)[middle].value;
}
}
diff --git a/gcc/fortran/ChangeLog b/gcc/fortran/ChangeLog
index bb3e70821d1..c704838b723 100644
--- a/gcc/fortran/ChangeLog
+++ b/gcc/fortran/ChangeLog
@@ -1,3 +1,18 @@
+2012-11-17 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * frontend-passes.c: Use new vec API in vec.h.
+ * trans-array.c: Likewise.
+ * trans-common.c: Likewise.
+ * trans-decl.c: Likewise.
+ * trans-expr.c: Likewise.
+ * trans-intrinsic.c: Likewise.
+ * trans-openmp.c: Likewise.
+ * trans-stmt.c: Likewise.
+ * trans-types.c: Likewise.
+ * trans.h: Likewise.
+
2012-11-17 Jakub Jelinek <jakub@redhat.com>
PR fortran/55341
@@ -12,6 +27,7 @@
* resolve.c (resolve_typebound_intrinsic_op): Only add typebound
operators to the operator list in the namespace of the derived type.
+
2012-11-12 Jan Hubicka <jh@suse.cz>
* f95-lang.c (ATTR_NOTHROW_LEAF_MALLOC_LIST): New macro.
diff --git a/gcc/fortran/frontend-passes.c b/gcc/fortran/frontend-passes.c
index 0cba9112a08..287807efbc3 100644
--- a/gcc/fortran/frontend-passes.c
+++ b/gcc/fortran/frontend-passes.c
@@ -38,7 +38,7 @@ static bool optimize_comparison (gfc_expr *, gfc_intrinsic_op);
static bool optimize_trim (gfc_expr *);
static bool optimize_lexical_comparison (gfc_expr *);
static void optimize_minmaxloc (gfc_expr **);
-static bool empty_string (gfc_expr *e);
+static bool is_empty_string (gfc_expr *e);
/* How deep we are inside an argument list. */
@@ -742,7 +742,7 @@ optimize_assignment (gfc_code * c)
remove_trim (rhs);
/* Replace a = ' ' by a = '' to optimize away a memcpy. */
- if (empty_string(rhs))
+ if (is_empty_string(rhs))
rhs->value.character.length = 0;
}
@@ -865,7 +865,7 @@ optimize_op (gfc_expr *e)
/* Return true if a constant string contains only blanks. */
static bool
-empty_string (gfc_expr *e)
+is_empty_string (gfc_expr *e)
{
int i;
@@ -967,8 +967,8 @@ optimize_comparison (gfc_expr *e, gfc_intrinsic_op op)
&& (op == INTRINSIC_EQ || op == INTRINSIC_NE))
{
bool empty_op1, empty_op2;
- empty_op1 = empty_string (op1);
- empty_op2 = empty_string (op2);
+ empty_op1 = is_empty_string (op1);
+ empty_op2 = is_empty_string (op2);
if (empty_op1 || empty_op2)
{
diff --git a/gcc/fortran/trans-array.c b/gcc/fortran/trans-array.c
index fc628ca28b6..24adfdeafbe 100644
--- a/gcc/fortran/trans-array.c
+++ b/gcc/fortran/trans-array.c
@@ -1626,7 +1626,7 @@ gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
else
{
/* Collect multiple scalar constants into a constructor. */
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
tree init;
tree bound;
tree tmptype;
@@ -1985,7 +1985,7 @@ gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
gfc_array_spec as;
gfc_se se;
int i;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* First traverse the constructor list, converting the constants
to tree to build an initializer. */
@@ -5317,7 +5317,7 @@ gfc_conv_array_initializer (tree type, gfc_expr * expr)
HOST_WIDE_INT hi;
unsigned HOST_WIDE_INT lo;
tree index, range;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
if (expr->expr_type == EXPR_VARIABLE
&& expr->symtree->n.sym->attr.flavor == FL_PARAMETER
diff --git a/gcc/fortran/trans-common.c b/gcc/fortran/trans-common.c
index 86cf0070ed3..474774fe8f6 100644
--- a/gcc/fortran/trans-common.c
+++ b/gcc/fortran/trans-common.c
@@ -487,7 +487,7 @@ get_init_field (segment_info *head, tree union_type, tree *field_init,
tree tmp, field;
tree init;
unsigned char *data, *chk;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
tree type = unsigned_char_type_node;
int i;
@@ -644,7 +644,7 @@ create_common (gfc_common_head *com, segment_info *head, bool saw_equiv)
if (is_init)
{
tree ctor, tmp;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
if (field != NULL_TREE && field_init != NULL_TREE)
CONSTRUCTOR_APPEND_ELT (v, field, field_init);
@@ -664,7 +664,7 @@ create_common (gfc_common_head *com, segment_info *head, bool saw_equiv)
}
}
- gcc_assert (!VEC_empty (constructor_elt, v));
+ gcc_assert (!v->is_empty ());
ctor = build_constructor (union_type, v);
TREE_CONSTANT (ctor) = 1;
TREE_STATIC (ctor) = 1;
diff --git a/gcc/fortran/trans-decl.c b/gcc/fortran/trans-decl.c
index 77502170c3c..c661fb358ac 100644
--- a/gcc/fortran/trans-decl.c
+++ b/gcc/fortran/trans-decl.c
@@ -2284,8 +2284,8 @@ build_entry_thunks (gfc_namespace * ns, bool global)
gfc_save_backend_locus (&old_loc);
for (el = ns->entries; el; el = el->next)
{
- VEC(tree,gc) *args = NULL;
- VEC(tree,gc) *string_args = NULL;
+ vec<tree, va_gc> *args = NULL;
+ vec<tree, va_gc> *string_args = NULL;
thunk_sym = el->sym;
@@ -2300,16 +2300,16 @@ build_entry_thunks (gfc_namespace * ns, bool global)
/* Pass extra parameter identifying this entry point. */
tmp = build_int_cst (gfc_array_index_type, el->id);
- VEC_safe_push (tree, gc, args, tmp);
+ vec_safe_push (args, tmp);
if (thunk_sym->attr.function)
{
if (gfc_return_by_reference (ns->proc_name))
{
tree ref = DECL_ARGUMENTS (current_function_decl);
- VEC_safe_push (tree, gc, args, ref);
+ vec_safe_push (args, ref);
if (ns->proc_name->ts.type == BT_CHARACTER)
- VEC_safe_push (tree, gc, args, DECL_CHAIN (ref));
+ vec_safe_push (args, DECL_CHAIN (ref));
}
}
@@ -2333,27 +2333,27 @@ build_entry_thunks (gfc_namespace * ns, bool global)
{
/* Pass the argument. */
DECL_ARTIFICIAL (thunk_formal->sym->backend_decl) = 1;
- VEC_safe_push (tree, gc, args, thunk_formal->sym->backend_decl);
+ vec_safe_push (args, thunk_formal->sym->backend_decl);
if (formal->sym->ts.type == BT_CHARACTER)
{
tmp = thunk_formal->sym->ts.u.cl->backend_decl;
- VEC_safe_push (tree, gc, string_args, tmp);
+ vec_safe_push (string_args, tmp);
}
}
else
{
/* Pass NULL for a missing argument. */
- VEC_safe_push (tree, gc, args, null_pointer_node);
+ vec_safe_push (args, null_pointer_node);
if (formal->sym->ts.type == BT_CHARACTER)
{
tmp = build_int_cst (gfc_charlen_type_node, 0);
- VEC_safe_push (tree, gc, string_args, tmp);
+ vec_safe_push (string_args, tmp);
}
}
}
/* Call the master function. */
- VEC_safe_splice (tree, gc, args, string_args);
+ vec_safe_splice (args, string_args);
tmp = ns->proc_name->backend_decl;
tmp = build_call_expr_loc_vec (input_location, tmp, args);
if (ns->proc_name->attr.mixed_entry_master)
@@ -2616,7 +2616,7 @@ static tree
build_library_function_decl_1 (tree name, const char *spec,
tree rettype, int nargs, va_list p)
{
- VEC(tree,gc) *arglist;
+ vec<tree, va_gc> *arglist;
tree fntype;
tree fndecl;
int n;
@@ -2625,11 +2625,11 @@ build_library_function_decl_1 (tree name, const char *spec,
gcc_assert (current_function_decl == NULL_TREE);
/* Create a list of the argument types. */
- arglist = VEC_alloc (tree, gc, abs (nargs));
+ vec_alloc (arglist, abs (nargs));
for (n = abs (nargs); n > 0; n--)
{
tree argtype = va_arg (p, tree);
- VEC_quick_push (tree, arglist, argtype);
+ arglist->quick_push (argtype);
}
/* Build the function type and decl. */
@@ -5005,7 +5005,7 @@ create_main_function (tree fndecl)
language standard parameters. */
{
tree array_type, array, var;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Passing a new option to the library requires four modifications:
+ add it to the tree_cons list below
diff --git a/gcc/fortran/trans-expr.c b/gcc/fortran/trans-expr.c
index b0bd7f57004..d6410d3ac49 100644
--- a/gcc/fortran/trans-expr.c
+++ b/gcc/fortran/trans-expr.c
@@ -661,7 +661,7 @@ gfc_copy_class_to_class (tree from, tree to, tree nelems)
tree to_data;
tree to_ref;
tree from_ref;
- VEC(tree,gc) *args;
+ vec<tree, va_gc> *args;
tree tmp;
tree index;
stmtblock_t loopbody;
@@ -696,13 +696,13 @@ gfc_copy_class_to_class (tree from, tree to, tree nelems)
if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (from_data)))
{
from_ref = gfc_get_class_array_ref (index, from);
- VEC_safe_push (tree, gc, args, from_ref);
+ vec_safe_push (args, from_ref);
}
else
- VEC_safe_push (tree, gc, args, from_data);
+ vec_safe_push (args, from_data);
to_ref = gfc_get_class_array_ref (index, to);
- VEC_safe_push (tree, gc, args, to_ref);
+ vec_safe_push (args, to_ref);
tmp = build_call_vec (fcn_type, fcn, args);
@@ -724,8 +724,8 @@ gfc_copy_class_to_class (tree from, tree to, tree nelems)
else
{
gcc_assert (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (from_data)));
- VEC_safe_push (tree, gc, args, from_data);
- VEC_safe_push (tree, gc, args, to_data);
+ vec_safe_push (args, from_data);
+ vec_safe_push (args, to_data);
tmp = build_call_vec (fcn_type, fcn, args);
}
@@ -3822,11 +3822,11 @@ conv_isocbinding_procedure (gfc_se * se, gfc_symbol * sym,
int
gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
gfc_actual_arglist * args, gfc_expr * expr,
- VEC(tree,gc) *append_args)
+ vec<tree, va_gc> *append_args)
{
gfc_interface_mapping mapping;
- VEC(tree,gc) *arglist;
- VEC(tree,gc) *retargs;
+ vec<tree, va_gc> *arglist;
+ vec<tree, va_gc> *retargs;
tree tmp;
tree fntype;
gfc_se parmse;
@@ -3837,7 +3837,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
tree var;
tree len;
tree base_object;
- VEC(tree,gc) *stringargs;
+ vec<tree, va_gc> *stringargs;
tree result = NULL;
gfc_formal_arglist *formal;
gfc_actual_arglist *arg;
@@ -4608,7 +4608,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
/* Character strings are passed as two parameters, a length and a
pointer - except for Bind(c) which only passes the pointer. */
if (parmse.string_length != NULL_TREE && !sym->attr.is_bind_c)
- VEC_safe_push (tree, gc, stringargs, parmse.string_length);
+ vec_safe_push (stringargs, parmse.string_length);
/* For descriptorless coarrays and assumed-shape coarray dummies, we
pass the token and the offset as additional arguments. */
@@ -4618,9 +4618,8 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
&& e == NULL)
{
/* Token and offset. */
- VEC_safe_push (tree, gc, stringargs, null_pointer_node);
- VEC_safe_push (tree, gc, stringargs,
- build_int_cst (gfc_array_index_type, 0));
+ vec_safe_push (stringargs, null_pointer_node);
+ vec_safe_push (stringargs, build_int_cst (gfc_array_index_type, 0));
gcc_assert (fsym->attr.optional);
}
else if (fsym && fsym->attr.codimension
@@ -4646,7 +4645,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
tmp = GFC_TYPE_ARRAY_CAF_TOKEN (caf_type);
}
- VEC_safe_push (tree, gc, stringargs, tmp);
+ vec_safe_push (stringargs, tmp);
if (GFC_DESCRIPTOR_TYPE_P (caf_type)
&& GFC_TYPE_ARRAY_AKIND (caf_type) == GFC_ARRAY_ALLOCATABLE)
@@ -4692,10 +4691,10 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
offset = fold_build2_loc (input_location, PLUS_EXPR,
gfc_array_index_type, offset, tmp);
- VEC_safe_push (tree, gc, stringargs, offset);
+ vec_safe_push (stringargs, offset);
}
- VEC_safe_push (tree, gc, arglist, parmse.expr);
+ vec_safe_push (arglist, parmse.expr);
}
gfc_finish_interface_mapping (&mapping, &se->pre, &se->post);
@@ -4719,7 +4718,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
if (ts.deferred)
cl.backend_decl = gfc_create_var (gfc_charlen_type_node, "slen");
else if (!sym->attr.dummy)
- cl.backend_decl = VEC_index (tree, stringargs, 0);
+ cl.backend_decl = (*stringargs)[0];
else
{
formal = sym->ns->proc_name->formal;
@@ -4796,7 +4795,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
else
result = build_fold_indirect_ref_loc (input_location,
se->expr);
- VEC_safe_push (tree, gc, retargs, se->expr);
+ vec_safe_push (retargs, se->expr);
}
else if (comp && comp->attr.dimension)
{
@@ -4832,7 +4831,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
/* Pass the temporary as the first argument. */
result = info->descriptor;
tmp = gfc_build_addr_expr (NULL_TREE, result);
- VEC_safe_push (tree, gc, retargs, tmp);
+ vec_safe_push (retargs, tmp);
}
else if (!comp && sym->result->attr.dimension)
{
@@ -4868,7 +4867,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
/* Pass the temporary as the first argument. */
result = info->descriptor;
tmp = gfc_build_addr_expr (NULL_TREE, result);
- VEC_safe_push (tree, gc, retargs, tmp);
+ vec_safe_push (retargs, tmp);
}
else if (ts.type == BT_CHARACTER)
{
@@ -4899,7 +4898,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
else
var = gfc_conv_string_tmp (se, type, len);
- VEC_safe_push (tree, gc, retargs, var);
+ vec_safe_push (retargs, var);
}
else
{
@@ -4907,7 +4906,7 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
type = gfc_get_complex_type (ts.kind);
var = gfc_build_addr_expr (NULL_TREE, gfc_create_var (type, "cmplx"));
- VEC_safe_push (tree, gc, retargs, var);
+ vec_safe_push (retargs, var);
}
/* Add the string length to the argument list. */
@@ -4917,28 +4916,28 @@ gfc_conv_procedure_call (gfc_se * se, gfc_symbol * sym,
if (TREE_CODE (tmp) != VAR_DECL)
tmp = gfc_evaluate_now (len, &se->pre);
tmp = gfc_build_addr_expr (NULL_TREE, tmp);
- VEC_safe_push (tree, gc, retargs, tmp);
+ vec_safe_push (retargs, tmp);
}
else if (ts.type == BT_CHARACTER)
- VEC_safe_push (tree, gc, retargs, len);
+ vec_safe_push (retargs, len);
}
gfc_free_interface_mapping (&mapping);
/* We need to glom RETARGS + ARGLIST + STRINGARGS + APPEND_ARGS. */
- arglen = (VEC_length (tree, arglist)
- + VEC_length (tree, stringargs) + VEC_length (tree, append_args));
- VEC_reserve_exact (tree, gc, retargs, arglen);
+ arglen = (vec_safe_length (arglist) + vec_safe_length (stringargs)
+ + vec_safe_length (append_args));
+ vec_safe_reserve (retargs, arglen);
/* Add the return arguments. */
- VEC_splice (tree, retargs, arglist);
+ retargs->splice (arglist);
/* Add the hidden string length parameters to the arguments. */
- VEC_splice (tree, retargs, stringargs);
+ retargs->splice (stringargs);
/* We may want to append extra arguments here. This is used e.g. for
calls to libgfortran_matmul_??, which need extra information. */
- if (!VEC_empty (tree, append_args))
- VEC_splice (tree, retargs, append_args);
+ if (!vec_safe_is_empty (append_args))
+ retargs->splice (append_args);
arglist = retargs;
/* Generate the actual call. */
@@ -5423,7 +5422,8 @@ gfc_conv_function_expr (gfc_se * se, gfc_expr * expr)
if (!sym)
sym = expr->symtree->n.sym;
- gfc_conv_procedure_call (se, sym, expr->value.function.actual, expr, NULL);
+ gfc_conv_procedure_call (se, sym, expr->value.function.actual, expr,
+ NULL);
}
@@ -5965,7 +5965,7 @@ gfc_conv_structure (gfc_se * se, gfc_expr * expr, int init)
tree val;
tree type;
tree tmp;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
gcc_assert (se->ss == NULL);
gcc_assert (expr->expr_type == EXPR_STRUCTURE);
@@ -7139,7 +7139,8 @@ gfc_trans_zero_assign (gfc_expr * expr)
a = {} instead. */
if (!POINTER_TYPE_P (TREE_TYPE (dest)))
return build2_loc (input_location, MODIFY_EXPR, void_type_node,
- dest, build_constructor (TREE_TYPE (dest), NULL));
+ dest, build_constructor (TREE_TYPE (dest),
+ NULL));
/* Convert arguments to the correct types. */
dest = fold_convert (pvoid_type_node, dest);
diff --git a/gcc/fortran/trans-intrinsic.c b/gcc/fortran/trans-intrinsic.c
index a31284ee27c..5d9ce5c4f69 100644
--- a/gcc/fortran/trans-intrinsic.c
+++ b/gcc/fortran/trans-intrinsic.c
@@ -724,7 +724,7 @@ static tree
gfc_get_intrinsic_lib_fndecl (gfc_intrinsic_map_t * m, gfc_expr * expr)
{
tree type;
- VEC(tree,gc) *argtypes;
+ vec<tree, va_gc> *argtypes;
tree fndecl;
gfc_actual_arglist *actual;
tree *pdecl;
@@ -809,7 +809,7 @@ gfc_get_intrinsic_lib_fndecl (gfc_intrinsic_map_t * m, gfc_expr * expr)
for (actual = expr->value.function.actual; actual; actual = actual->next)
{
type = gfc_typenode_for_spec (&actual->expr->ts);
- VEC_safe_push (tree, gc, argtypes, type);
+ vec_safe_push (argtypes, type);
}
type = build_function_type_vec (gfc_typenode_for_spec (ts), argtypes);
fndecl = build_decl (input_location,
@@ -2341,7 +2341,7 @@ static void
gfc_conv_intrinsic_funcall (gfc_se * se, gfc_expr * expr)
{
gfc_symbol *sym;
- VEC(tree,gc) *append_args;
+ vec<tree, va_gc> *append_args;
gcc_assert (!se->ss || se->ss->info->expr == expr);
@@ -2381,19 +2381,19 @@ gfc_conv_intrinsic_funcall (gfc_se * se, gfc_expr * expr)
gemm_fndecl = gfor_fndecl_zgemm;
}
- append_args = VEC_alloc (tree, gc, 3);
- VEC_quick_push (tree, append_args, build_int_cst (cint, 1));
- VEC_quick_push (tree, append_args,
- build_int_cst (cint, gfc_option.blas_matmul_limit));
- VEC_quick_push (tree, append_args,
- gfc_build_addr_expr (NULL_TREE, gemm_fndecl));
+ vec_alloc (append_args, 3);
+ append_args->quick_push (build_int_cst (cint, 1));
+ append_args->quick_push (build_int_cst (cint,
+ gfc_option.blas_matmul_limit));
+ append_args->quick_push (gfc_build_addr_expr (NULL_TREE,
+ gemm_fndecl));
}
else
{
- append_args = VEC_alloc (tree, gc, 3);
- VEC_quick_push (tree, append_args, build_int_cst (cint, 0));
- VEC_quick_push (tree, append_args, build_int_cst (cint, 0));
- VEC_quick_push (tree, append_args, null_pointer_node);
+ vec_alloc (append_args, 3);
+ append_args->quick_push (build_int_cst (cint, 0));
+ append_args->quick_push (build_int_cst (cint, 0));
+ append_args->quick_push (null_pointer_node);
}
}
@@ -4486,7 +4486,7 @@ conv_generic_with_optional_char_arg (gfc_se* se, gfc_expr* expr,
unsigned cur_pos;
gfc_actual_arglist* arg;
gfc_symbol* sym;
- VEC(tree,gc) *append_args;
+ vec<tree, va_gc> *append_args;
/* Find the two arguments given as position. */
cur_pos = 0;
@@ -4516,8 +4516,8 @@ conv_generic_with_optional_char_arg (gfc_se* se, gfc_expr* expr,
tree dummy;
dummy = build_int_cst (gfc_charlen_type_node, 0);
- append_args = VEC_alloc (tree, gc, 1);
- VEC_quick_push (tree, append_args, dummy);
+ vec_alloc (append_args, 1);
+ append_args->quick_push (dummy);
}
/* Build the call itself. */
@@ -5985,7 +5985,7 @@ gfc_conv_intrinsic_sr_kind (gfc_se *se, gfc_expr *expr)
gfc_actual_arglist *actual;
tree type;
gfc_se argse;
- VEC(tree,gc) *args = NULL;
+ vec<tree, va_gc> *args = NULL;
for (actual = expr->value.function.actual; actual; actual = actual->next)
{
@@ -6011,7 +6011,7 @@ gfc_conv_intrinsic_sr_kind (gfc_se *se, gfc_expr *expr)
gfc_add_block_to_block (&se->pre, &argse.pre);
gfc_add_block_to_block (&se->post, &argse.post);
- VEC_safe_push (tree, gc, args, argse.expr);
+ vec_safe_push (args, argse.expr);
}
/* Convert it to the required type. */
diff --git a/gcc/fortran/trans-openmp.c b/gcc/fortran/trans-openmp.c
index e843692e020..a844b08a317 100644
--- a/gcc/fortran/trans-openmp.c
+++ b/gcc/fortran/trans-openmp.c
@@ -1293,8 +1293,6 @@ typedef struct dovar_init_d {
tree init;
} dovar_init;
-DEF_VEC_O(dovar_init);
-DEF_VEC_ALLOC_O(dovar_init,heap);
static tree
gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
@@ -1307,7 +1305,7 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
stmtblock_t body;
gfc_omp_clauses *clauses = code->ext.omp_clauses;
int i, collapse = clauses->collapse;
- VEC(dovar_init,heap) *inits = NULL;
+ vec<dovar_init> inits = vec<dovar_init>();
dovar_init *di;
unsigned ix;
@@ -1435,7 +1433,7 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
tmp = fold_build2_loc (input_location, MULT_EXPR, type, count, step);
tmp = fold_build2_loc (input_location, PLUS_EXPR, type, from, tmp);
dovar_init e = {dovar, tmp};
- VEC_safe_push (dovar_init, heap, inits, e);
+ inits.safe_push (e);
}
if (!dovar_found)
@@ -1506,9 +1504,9 @@ gfc_trans_omp_do (gfc_code *code, stmtblock_t *pblock,
gfc_start_block (&body);
- FOR_EACH_VEC_ELT (dovar_init, inits, ix, di)
+ FOR_EACH_VEC_ELT (inits, ix, di)
gfc_add_modify (&body, di->var, di->init);
- VEC_free (dovar_init, heap, inits);
+ inits.release ();
/* Cycle statement is implemented with a goto. Exit statement must not be
present for this loop. */
diff --git a/gcc/fortran/trans-stmt.c b/gcc/fortran/trans-stmt.c
index de22ce036e8..bdc559b4274 100644
--- a/gcc/fortran/trans-stmt.c
+++ b/gcc/fortran/trans-stmt.c
@@ -489,7 +489,8 @@ gfc_trans_call (gfc_code * code, bool dependency_check,
/* Add the subroutine call to the block. */
gfc_conv_procedure_call (&loopse, code->resolved_sym,
- code->ext.actual, code->expr1, NULL);
+ code->ext.actual, code->expr1,
+ NULL);
if (mask && count1)
{
@@ -2094,7 +2095,7 @@ gfc_trans_character_select (gfc_code *code)
gfc_code *c;
gfc_se se, expr1se;
int n, k;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
tree pchartype = gfc_get_pchar_type (code->expr1->ts.kind);
@@ -2322,7 +2323,7 @@ gfc_trans_character_select (gfc_code *code)
/* Generate the structure describing the branches */
for (d = cp; d; d = d->right)
{
- VEC(constructor_elt,gc) *node = NULL;
+ vec<constructor_elt, va_gc> *node = NULL;
gfc_init_se (&se, NULL);
diff --git a/gcc/fortran/trans-types.c b/gcc/fortran/trans-types.c
index 81b7fa5ca27..35a39c57859 100644
--- a/gcc/fortran/trans-types.c
+++ b/gcc/fortran/trans-types.c
@@ -2690,7 +2690,7 @@ tree
gfc_get_function_type (gfc_symbol * sym)
{
tree type;
- VEC(tree,gc) *typelist;
+ vec<tree, va_gc> *typelist;
gfc_formal_arglist *f;
gfc_symbol *arg;
int alternate_return;
@@ -2713,7 +2713,7 @@ gfc_get_function_type (gfc_symbol * sym)
if (sym->attr.entry_master)
/* Additional parameter for selecting an entry point. */
- VEC_safe_push (tree, gc, typelist, gfc_array_index_type);
+ vec_safe_push (typelist, gfc_array_index_type);
if (sym->result)
arg = sym->result;
@@ -2732,17 +2732,16 @@ gfc_get_function_type (gfc_symbol * sym)
|| arg->ts.type == BT_CHARACTER)
type = build_reference_type (type);
- VEC_safe_push (tree, gc, typelist, type);
+ vec_safe_push (typelist, type);
if (arg->ts.type == BT_CHARACTER)
{
if (!arg->ts.deferred)
/* Transfer by value. */
- VEC_safe_push (tree, gc, typelist, gfc_charlen_type_node);
+ vec_safe_push (typelist, gfc_charlen_type_node);
else
/* Deferred character lengths are transferred by reference
so that the value can be returned. */
- VEC_safe_push (tree, gc, typelist,
- build_pointer_type (gfc_charlen_type_node));
+ vec_safe_push (typelist, build_pointer_type(gfc_charlen_type_node));
}
}
@@ -2780,7 +2779,7 @@ gfc_get_function_type (gfc_symbol * sym)
used without an explicit interface, and cannot be passed as
actual parameters for a dummy procedure. */
- VEC_safe_push (tree, gc, typelist, type);
+ vec_safe_push (typelist, type);
}
else
{
@@ -2803,11 +2802,11 @@ gfc_get_function_type (gfc_symbol * sym)
so that the value can be returned. */
type = build_pointer_type (gfc_charlen_type_node);
- VEC_safe_push (tree, gc, typelist, type);
+ vec_safe_push (typelist, type);
}
}
- if (!VEC_empty (tree, typelist)
+ if (!vec_safe_is_empty (typelist)
|| sym->attr.is_main_program
|| sym->attr.if_source != IFSRC_UNKNOWN)
is_varargs = false;
diff --git a/gcc/fortran/trans.h b/gcc/fortran/trans.h
index 652893ee60c..954dcd3400f 100644
--- a/gcc/fortran/trans.h
+++ b/gcc/fortran/trans.h
@@ -427,7 +427,7 @@ int gfc_is_intrinsic_libcall (gfc_expr *);
/* Used to call ordinary functions/subroutines
and procedure pointer components. */
int gfc_conv_procedure_call (gfc_se *, gfc_symbol *, gfc_actual_arglist *,
- gfc_expr *, VEC(tree,gc) *);
+ gfc_expr *, vec<tree, va_gc> *);
void gfc_conv_subref_array_arg (gfc_se *, gfc_expr *, int, sym_intent, bool);
diff --git a/gcc/function.c b/gcc/function.c
index 08f0b89c022..876e1c6297c 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -61,7 +61,6 @@ along with GCC; see the file COPYING3. If not see
#include "tree-pass.h"
#include "predict.h"
#include "df.h"
-#include "vecprim.h"
#include "params.h"
#include "bb-reorder.h"
@@ -115,14 +114,14 @@ static GTY((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
htab_t types_used_by_vars_hash = NULL;
-VEC(tree,gc) *types_used_by_cur_var_decl;
+vec<tree, va_gc> *types_used_by_cur_var_decl;
/* Forward declarations. */
static struct temp_slot *find_temp_slot_from_address (rtx);
static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
static void pad_below (struct args_size *, enum machine_mode, tree);
-static void reorder_blocks_1 (rtx, tree, VEC(tree,heap) **);
+static void reorder_blocks_1 (rtx, tree, vec<tree> *);
static int all_blocks (tree, tree *);
static tree *get_block_vector (tree, int *);
extern tree debug_find_var_in_block_tree (tree, tree);
@@ -140,9 +139,7 @@ static void set_insn_locations (rtx, int) ATTRIBUTE_UNUSED;
typedef struct function *function_p;
-DEF_VEC_P(function_p);
-DEF_VEC_ALLOC_P(function_p,heap);
-static VEC(function_p,heap) *function_context_stack;
+static vec<function_p> function_context_stack;
/* Save the current context for compilation of a nested function.
This is called from language-specific code. */
@@ -153,7 +150,7 @@ push_function_context (void)
if (cfun == 0)
allocate_struct_function (NULL, false);
- VEC_safe_push (function_p, heap, function_context_stack, cfun);
+ function_context_stack.safe_push (cfun);
set_cfun (NULL);
}
@@ -163,7 +160,7 @@ push_function_context (void)
void
pop_function_context (void)
{
- struct function *p = VEC_pop (function_p, function_context_stack);
+ struct function *p = function_context_stack.pop ();
set_cfun (p);
current_function_decl = p->decl;
@@ -593,10 +590,10 @@ insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
static struct temp_slot **
temp_slots_at_level (int level)
{
- if (level >= (int) VEC_length (temp_slot_p, used_temp_slots))
- VEC_safe_grow_cleared (temp_slot_p, gc, used_temp_slots, level + 1);
+ if (level >= (int) vec_safe_length (used_temp_slots))
+ vec_safe_grow_cleared (used_temp_slots, level + 1);
- return &(VEC_address (temp_slot_p, used_temp_slots)[level]);
+ return &(*used_temp_slots)[level];
}
/* Returns the maximal temporary slot level. */
@@ -607,7 +604,7 @@ max_slot_level (void)
if (!used_temp_slots)
return -1;
- return VEC_length (temp_slot_p, used_temp_slots) - 1;
+ return used_temp_slots->length () - 1;
}
/* Moves temporary slot TEMP to LEVEL. */
@@ -1198,7 +1195,7 @@ init_temp_slots (void)
{
/* We have not allocated any temporaries yet. */
avail_temp_slots = 0;
- used_temp_slots = 0;
+ vec_alloc (used_temp_slots, 0);
temp_slot_level = 0;
n_temp_slots_in_use = 0;
@@ -1877,7 +1874,7 @@ instantiate_decls (tree fndecl)
FOR_EACH_LOCAL_DECL (cfun, ix, decl)
if (DECL_RTL_SET_P (decl))
instantiate_decl_rtl (DECL_RTL (decl));
- VEC_free (tree, gc, cfun->local_decls);
+ vec_free (cfun->local_decls);
}
/* Pass through the INSNS of function FNDECL and convert virtual register
@@ -2215,12 +2212,12 @@ assign_parms_initialize_all (struct assign_parm_data_all *all)
needed, else the old list. */
static void
-split_complex_args (VEC(tree, heap) **args)
+split_complex_args (vec<tree> *args)
{
unsigned i;
tree p;
- FOR_EACH_VEC_ELT (tree, *args, i, p)
+ FOR_EACH_VEC_ELT (*args, i, p)
{
tree type = TREE_TYPE (p);
if (TREE_CODE (type) == COMPLEX_TYPE
@@ -2245,7 +2242,7 @@ split_complex_args (VEC(tree, heap) **args)
DECL_IGNORED_P (p) = addressable;
TREE_ADDRESSABLE (p) = 0;
layout_decl (p, 0);
- VEC_replace (tree, *args, i, p);
+ (*args)[i] = p;
/* Build a second synthetic decl. */
decl = build_decl (EXPR_LOCATION (p),
@@ -2254,7 +2251,7 @@ split_complex_args (VEC(tree, heap) **args)
DECL_ARTIFICIAL (decl) = addressable;
DECL_IGNORED_P (decl) = addressable;
layout_decl (decl, 0);
- VEC_safe_insert (tree, heap, *args, ++i, decl);
+ args->safe_insert (++i, decl);
}
}
}
@@ -2263,16 +2260,16 @@ split_complex_args (VEC(tree, heap) **args)
the hidden struct return argument, and (abi willing) complex args.
Return the new parameter list. */
-static VEC(tree, heap) *
+static vec<tree>
assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
{
tree fndecl = current_function_decl;
tree fntype = TREE_TYPE (fndecl);
- VEC(tree, heap) *fnargs = NULL;
+ vec<tree> fnargs = vec<tree>();
tree arg;
for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
- VEC_safe_push (tree, heap, fnargs, arg);
+ fnargs.safe_push (arg);
all->orig_fnargs = DECL_ARGUMENTS (fndecl);
@@ -2293,7 +2290,7 @@ assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
DECL_CHAIN (decl) = all->orig_fnargs;
all->orig_fnargs = decl;
- VEC_safe_insert (tree, heap, fnargs, 0, decl);
+ fnargs.safe_insert (0, decl);
all->function_result_decl = decl;
}
@@ -3259,7 +3256,7 @@ assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
static void
assign_parms_unsplit_complex (struct assign_parm_data_all *all,
- VEC(tree, heap) *fnargs)
+ vec<tree> fnargs)
{
tree parm;
tree orig_fnargs = all->orig_fnargs;
@@ -3273,8 +3270,8 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all,
rtx tmp, real, imag;
enum machine_mode inner = GET_MODE_INNER (DECL_MODE (parm));
- real = DECL_RTL (VEC_index (tree, fnargs, i));
- imag = DECL_RTL (VEC_index (tree, fnargs, i + 1));
+ real = DECL_RTL (fnargs[i]);
+ imag = DECL_RTL (fnargs[i + 1]);
if (inner != GET_MODE (real))
{
real = gen_lowpart_SUBREG (inner, real);
@@ -3307,8 +3304,8 @@ assign_parms_unsplit_complex (struct assign_parm_data_all *all,
tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
SET_DECL_RTL (parm, tmp);
- real = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i));
- imag = DECL_INCOMING_RTL (VEC_index (tree, fnargs, i + 1));
+ real = DECL_INCOMING_RTL (fnargs[i]);
+ imag = DECL_INCOMING_RTL (fnargs[i + 1]);
if (inner != GET_MODE (real))
{
real = gen_lowpart_SUBREG (inner, real);
@@ -3329,7 +3326,7 @@ assign_parms (tree fndecl)
{
struct assign_parm_data_all all;
tree parm;
- VEC(tree, heap) *fnargs;
+ vec<tree> fnargs;
unsigned i;
crtl->args.internal_arg_pointer
@@ -3338,7 +3335,7 @@ assign_parms (tree fndecl)
assign_parms_initialize_all (&all);
fnargs = assign_parms_augmented_arg_list (&all);
- FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
+ FOR_EACH_VEC_ELT (fnargs, i, parm)
{
struct assign_parm_data_one data;
@@ -3413,7 +3410,7 @@ assign_parms (tree fndecl)
if (targetm.calls.split_complex_arg)
assign_parms_unsplit_complex (&all, fnargs);
- VEC_free (tree, heap, fnargs);
+ fnargs.release ();
/* Output all parameter conversion instructions (possibly including calls)
now that all parameters have been copied out of hard registers. */
@@ -3578,13 +3575,13 @@ gimplify_parameters (void)
struct assign_parm_data_all all;
tree parm;
gimple_seq stmts = NULL;
- VEC(tree, heap) *fnargs;
+ vec<tree> fnargs;
unsigned i;
assign_parms_initialize_all (&all);
fnargs = assign_parms_augmented_arg_list (&all);
- FOR_EACH_VEC_ELT (tree, fnargs, i, parm)
+ FOR_EACH_VEC_ELT (fnargs, i, parm)
{
struct assign_parm_data_one data;
@@ -3667,7 +3664,7 @@ gimplify_parameters (void)
}
}
- VEC_free (tree, heap, fnargs);
+ fnargs.release ();
return stmts;
}
@@ -4095,12 +4092,12 @@ void
reorder_blocks (void)
{
tree block = DECL_INITIAL (current_function_decl);
- VEC(tree,heap) *block_stack;
+ vec<tree> block_stack;
if (block == NULL_TREE)
return;
- block_stack = VEC_alloc (tree, heap, 10);
+ block_stack.create (10);
/* Reset the TREE_ASM_WRITTEN bit for all blocks. */
clear_block_marks (block);
@@ -4113,7 +4110,7 @@ reorder_blocks (void)
reorder_blocks_1 (get_insns (), block, &block_stack);
BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
- VEC_free (tree, heap, block_stack);
+ block_stack.release ();
}
/* Helper function for reorder_blocks. Reset TREE_ASM_WRITTEN. */
@@ -4130,7 +4127,7 @@ clear_block_marks (tree block)
}
static void
-reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
+reorder_blocks_1 (rtx insns, tree current_block, vec<tree> *p_block_stack)
{
rtx insn;
tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
@@ -4185,11 +4182,11 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
(origin))
== current_block);
- if (VEC_empty (tree, *p_block_stack))
+ if (p_block_stack->is_empty ())
super = current_block;
else
{
- super = VEC_last (tree, *p_block_stack);
+ super = p_block_stack->last ();
gcc_assert (super == current_block
|| BLOCK_FRAGMENT_ORIGIN (super)
== current_block);
@@ -4199,11 +4196,11 @@ reorder_blocks_1 (rtx insns, tree current_block, VEC(tree,heap) **p_block_stack)
BLOCK_SUBBLOCKS (current_block) = block;
current_block = origin;
}
- VEC_safe_push (tree, heap, *p_block_stack, block);
+ p_block_stack->safe_push (block);
}
else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
{
- NOTE_BLOCK (insn) = VEC_pop (tree, *p_block_stack);
+ NOTE_BLOCK (insn) = p_block_stack->pop ();
current_block = BLOCK_SUPERCONTEXT (current_block);
if (BLOCK_FRAGMENT_ORIGIN (current_block))
current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
@@ -4411,7 +4408,7 @@ set_cfun (struct function *new_cfun)
/* Initialized with NOGC, making this poisonous to the garbage collector. */
-static VEC(function_p,heap) *cfun_stack;
+static vec<function_p> cfun_stack;
/* Push the current cfun onto the stack, and set cfun to new_cfun. Also set
current_function_decl accordingly. */
@@ -4421,7 +4418,7 @@ push_cfun (struct function *new_cfun)
{
gcc_assert ((!cfun && !current_function_decl)
|| (cfun && current_function_decl == cfun->decl));
- VEC_safe_push (function_p, heap, cfun_stack, cfun);
+ cfun_stack.safe_push (cfun);
current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
set_cfun (new_cfun);
}
@@ -4431,7 +4428,7 @@ push_cfun (struct function *new_cfun)
void
pop_cfun (void)
{
- struct function *new_cfun = VEC_pop (function_p, cfun_stack);
+ struct function *new_cfun = cfun_stack.pop ();
/* When in_dummy_function, we do have a cfun but current_function_decl is
NULL. We also allow pushing NULL cfun and subsequently changing
current_function_decl to something else and have both restored by
@@ -4527,7 +4524,7 @@ push_struct_function (tree fndecl)
gcc_assert (in_dummy_function
|| (!cfun && !current_function_decl)
|| (cfun && current_function_decl == cfun->decl));
- VEC_safe_push (function_p, heap, cfun_stack, cfun);
+ cfun_stack.safe_push (cfun);
current_function_decl = fndecl;
allocate_struct_function (fndecl, false);
}
@@ -5705,25 +5702,25 @@ active_insn_between (rtx head, rtx tail)
/* LAST_BB is a block that exits, and empty of active instructions.
Examine its predecessors for jumps that can be converted to
(conditional) returns. */
-static VEC (edge, heap) *
+static vec<edge>
convert_jumps_to_returns (basic_block last_bb, bool simple_p,
- VEC (edge, heap) *unconverted ATTRIBUTE_UNUSED)
+ vec<edge> unconverted ATTRIBUTE_UNUSED)
{
int i;
basic_block bb;
rtx label;
edge_iterator ei;
edge e;
- VEC(basic_block,heap) *src_bbs;
+ vec<basic_block> src_bbs;
- src_bbs = VEC_alloc (basic_block, heap, EDGE_COUNT (last_bb->preds));
+ src_bbs.create (EDGE_COUNT (last_bb->preds));
FOR_EACH_EDGE (e, ei, last_bb->preds)
if (e->src != ENTRY_BLOCK_PTR)
- VEC_quick_push (basic_block, src_bbs, e->src);
+ src_bbs.quick_push (e->src);
label = BB_HEAD (last_bb);
- FOR_EACH_VEC_ELT (basic_block, src_bbs, i, bb)
+ FOR_EACH_VEC_ELT (src_bbs, i, bb)
{
rtx jump = BB_END (bb);
@@ -5768,7 +5765,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
if (dump_file)
fprintf (dump_file,
"Failed to redirect bb %d branch.\n", bb->index);
- VEC_safe_push (edge, heap, unconverted, e);
+ unconverted.safe_push (e);
}
#endif
continue;
@@ -5791,7 +5788,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
if (dump_file)
fprintf (dump_file,
"Failed to redirect bb %d branch.\n", bb->index);
- VEC_safe_push (edge, heap, unconverted, e);
+ unconverted.safe_push (e);
}
#endif
continue;
@@ -5801,7 +5798,7 @@ convert_jumps_to_returns (basic_block last_bb, bool simple_p,
redirect_edge_succ (e, EXIT_BLOCK_PTR);
e->flags &= ~EDGE_CROSSING;
}
- VEC_free (basic_block, heap, src_bbs);
+ src_bbs.release ();
return unconverted;
}
@@ -5877,7 +5874,7 @@ thread_prologue_and_epilogue_insns (void)
{
bool inserted;
#ifdef HAVE_simple_return
- VEC (edge, heap) *unconverted_simple_returns = NULL;
+ vec<edge> unconverted_simple_returns = vec<edge>();
bool nonempty_prologue;
bitmap_head bb_flags;
unsigned max_grow_size;
@@ -5975,7 +5972,7 @@ thread_prologue_and_epilogue_insns (void)
HARD_REG_SET prologue_clobbered, prologue_used, live_on_edge;
struct hard_reg_set_container set_up_by_prologue;
rtx p_insn;
- VEC(basic_block, heap) *vec;
+ vec<basic_block> vec;
basic_block bb;
bitmap_head bb_antic_flags;
bitmap_head bb_on_list;
@@ -6011,7 +6008,7 @@ thread_prologue_and_epilogue_insns (void)
/* Find the set of basic blocks that require a stack frame,
and blocks that are too big to be duplicated. */
- vec = VEC_alloc (basic_block, heap, n_basic_blocks);
+ vec.create (n_basic_blocks);
CLEAR_HARD_REG_SET (set_up_by_prologue.set);
add_to_hard_reg_set (&set_up_by_prologue.set, Pmode,
@@ -6051,7 +6048,7 @@ thread_prologue_and_epilogue_insns (void)
if (bb == entry_edge->dest)
goto fail_shrinkwrap;
bitmap_set_bit (&bb_flags, bb->index);
- VEC_quick_push (basic_block, vec, bb);
+ vec.quick_push (bb);
break;
}
else if (size <= max_grow_size)
@@ -6069,23 +6066,23 @@ thread_prologue_and_epilogue_insns (void)
/* For every basic block that needs a prologue, mark all blocks
reachable from it, so as to ensure they are also seen as
requiring a prologue. */
- while (!VEC_empty (basic_block, vec))
+ while (!vec.is_empty ())
{
- basic_block tmp_bb = VEC_pop (basic_block, vec);
+ basic_block tmp_bb = vec.pop ();
FOR_EACH_EDGE (e, ei, tmp_bb->succs)
if (e->dest != EXIT_BLOCK_PTR
&& bitmap_set_bit (&bb_flags, e->dest->index))
- VEC_quick_push (basic_block, vec, e->dest);
+ vec.quick_push (e->dest);
}
/* Find the set of basic blocks that need no prologue, have a
single successor, can be duplicated, meet a max size
requirement, and go to the exit via like blocks. */
- VEC_quick_push (basic_block, vec, EXIT_BLOCK_PTR);
- while (!VEC_empty (basic_block, vec))
+ vec.quick_push (EXIT_BLOCK_PTR);
+ while (!vec.is_empty ())
{
- basic_block tmp_bb = VEC_pop (basic_block, vec);
+ basic_block tmp_bb = vec.pop ();
FOR_EACH_EDGE (e, ei, tmp_bb->preds)
if (single_succ_p (e->src)
@@ -6104,7 +6101,7 @@ thread_prologue_and_epilogue_insns (void)
&& !bitmap_bit_p (&bb_flags, pe->src->index))
break;
if (pe == NULL && bitmap_set_bit (&bb_tail, e->src->index))
- VEC_quick_push (basic_block, vec, e->src);
+ vec.quick_push (e->src);
}
}
@@ -6121,11 +6118,11 @@ thread_prologue_and_epilogue_insns (void)
FOR_EACH_EDGE (e, ei, bb->preds)
if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
&& bitmap_set_bit (&bb_on_list, e->src->index))
- VEC_quick_push (basic_block, vec, e->src);
+ vec.quick_push (e->src);
}
- while (!VEC_empty (basic_block, vec))
+ while (!vec.is_empty ())
{
- basic_block tmp_bb = VEC_pop (basic_block, vec);
+ basic_block tmp_bb = vec.pop ();
bool all_set = true;
bitmap_clear_bit (&bb_on_list, tmp_bb->index);
@@ -6142,7 +6139,7 @@ thread_prologue_and_epilogue_insns (void)
FOR_EACH_EDGE (e, ei, tmp_bb->preds)
if (!bitmap_bit_p (&bb_antic_flags, e->src->index)
&& bitmap_set_bit (&bb_on_list, e->src->index))
- VEC_quick_push (basic_block, vec, e->src);
+ vec.quick_push (e->src);
}
}
/* Find exactly one edge that leads to a block in ANTIC from
@@ -6210,14 +6207,14 @@ thread_prologue_and_epilogue_insns (void)
some_no_pro = true;
}
if (some_pro && some_no_pro)
- VEC_quick_push (basic_block, vec, bb);
+ vec.quick_push (bb);
else
bitmap_clear_bit (&bb_tail, bb->index);
}
/* Find the head of each tail. */
- while (!VEC_empty (basic_block, vec))
+ while (!vec.is_empty ())
{
- basic_block tbb = VEC_pop (basic_block, vec);
+ basic_block tbb = vec.pop ();
if (!bitmap_bit_p (&bb_tail, tbb->index))
continue;
@@ -6299,7 +6296,7 @@ thread_prologue_and_epilogue_insns (void)
bitmap_clear (&bb_tail);
bitmap_clear (&bb_antic_flags);
bitmap_clear (&bb_on_list);
- VEC_free (basic_block, heap, vec);
+ vec.release ();
}
#endif
@@ -6377,7 +6374,7 @@ thread_prologue_and_epilogue_insns (void)
if (LABEL_P (BB_HEAD (last_bb))
&& !active_insn_between (BB_HEAD (last_bb), BB_END (last_bb)))
- convert_jumps_to_returns (last_bb, false, NULL);
+ convert_jumps_to_returns (last_bb, false, vec<edge>());
if (EDGE_COUNT (last_bb->preds) != 0
&& single_succ_p (last_bb))
@@ -6511,7 +6508,7 @@ epilogue_done:
convert to conditional simple_returns, but couldn't for some
reason, create a block to hold a simple_return insn and redirect
those remaining edges. */
- if (!VEC_empty (edge, unconverted_simple_returns))
+ if (!unconverted_simple_returns.is_empty ())
{
basic_block simple_return_block_hot = NULL;
basic_block simple_return_block_cold = NULL;
@@ -6546,7 +6543,7 @@ epilogue_done:
pending_edge_cold = e;
}
- FOR_EACH_VEC_ELT (edge, unconverted_simple_returns, i, e)
+ FOR_EACH_VEC_ELT (unconverted_simple_returns, i, e)
{
basic_block *pdest_bb;
edge pending;
@@ -6585,7 +6582,7 @@ epilogue_done:
}
redirect_edge_and_branch_force (e, *pdest_bb);
}
- VEC_free (edge, heap, unconverted_simple_returns);
+ unconverted_simple_returns.release ();
}
if (entry_edge != orig_entry_edge)
@@ -6851,10 +6848,12 @@ used_types_insert (tree t)
if (cfun)
used_types_insert_helper (t, cfun);
else
- /* So this might be a type referenced by a global variable.
- Record that type so that we can later decide to emit its debug
- information. */
- VEC_safe_push (tree, gc, types_used_by_cur_var_decl, t);
+ {
+ /* So this might be a type referenced by a global variable.
+ Record that type so that we can later decide to emit its
+ debug information. */
+ vec_safe_push (types_used_by_cur_var_decl, t);
+ }
}
}
diff --git a/gcc/function.h b/gcc/function.h
index 59e729dc6bf..63704d4fb09 100644
--- a/gcc/function.h
+++ b/gcc/function.h
@@ -24,8 +24,6 @@ along with GCC; see the file COPYING3. If not see
#include "hashtab.h"
#include "vec.h"
-#include "vecprim.h"
-#include "vecir.h"
#include "machmode.h"
#include "tm.h" /* For CUMULATIVE_ARGS. */
#include "hard-reg-set.h" /* For HARD_REG_SET in struct rtl_data. */
@@ -143,8 +141,6 @@ struct GTY(()) expr_status {
};
typedef struct call_site_record_d *call_site_record;
-DEF_VEC_P(call_site_record);
-DEF_VEC_ALLOC_P(call_site_record, gc);
/* RTL representation of exception handling. */
struct GTY(()) rtl_eh {
@@ -155,9 +151,9 @@ struct GTY(()) rtl_eh {
rtx sjlj_fc;
rtx sjlj_exit_after;
- VEC(uchar,gc) *action_record_data;
+ vec<uchar, va_gc> *action_record_data;
- VEC(call_site_record,gc) *call_site_record_v[2];
+ vec<call_site_record, va_gc> *call_site_record_v[2];
};
#define pending_stack_adjust (crtl->expr.x_pending_stack_adjust)
@@ -173,13 +169,9 @@ typedef struct temp_slot *temp_slot_p;
struct call_site_record_d;
struct dw_fde_struct;
-DEF_VEC_P(temp_slot_p);
-DEF_VEC_ALLOC_P(temp_slot_p,gc);
struct ipa_opt_pass_d;
typedef struct ipa_opt_pass_d *ipa_opt_pass;
-DEF_VEC_P(ipa_opt_pass);
-DEF_VEC_ALLOC_P(ipa_opt_pass,heap);
struct GTY(()) varasm_status {
/* If we're using a per-function constant pool, this is it. */
@@ -316,7 +308,7 @@ struct GTY(()) rtl_data {
rtx x_parm_birth_insn;
/* List of all used temporaries allocated, by level. */
- VEC(temp_slot_p,gc) *x_used_temp_slots;
+ vec<temp_slot_p, va_gc> *x_used_temp_slots;
/* List of available temp slots. */
struct temp_slot *x_avail_temp_slots;
@@ -554,7 +546,7 @@ struct GTY(()) function {
tree nonlocal_goto_save_area;
/* Vector of function local variables, functions, types and constants. */
- VEC(tree,gc) *local_decls;
+ vec<tree, va_gc> *local_decls;
/* For md files. */
@@ -661,11 +653,11 @@ struct GTY(()) function {
static inline void
add_local_decl (struct function *fun, tree d)
{
- VEC_safe_push (tree, gc, fun->local_decls, d);
+ vec_safe_push (fun->local_decls, d);
}
#define FOR_EACH_LOCAL_DECL(FUN, I, D) \
- FOR_EACH_VEC_ELT_REVERSE (tree, (FUN)->local_decls, I, D)
+ FOR_EACH_VEC_SAFE_ELT_REVERSE ((FUN)->local_decls, I, D)
/* If va_list_[gf]pr_size is set to this, it means we don't know how
many units need to be saved. */
@@ -705,7 +697,7 @@ void types_used_by_var_decl_insert (tree type, tree var_decl);
/* During parsing of a global variable, this vector contains the types
referenced by the global variable. */
-extern GTY(()) VEC(tree,gc) *types_used_by_cur_var_decl;
+extern GTY(()) vec<tree, va_gc> *types_used_by_cur_var_decl;
/* cfun shouldn't be set directly; use one of these functions instead. */
extern void set_cfun (struct function *new_cfun);
diff --git a/gcc/fwprop.c b/gcc/fwprop.c
index 545bd1532a5..0f2ee49ac14 100644
--- a/gcc/fwprop.c
+++ b/gcc/fwprop.c
@@ -116,11 +116,9 @@ along with GCC; see the file COPYING3. If not see
static int num_changes;
-DEF_VEC_P(df_ref);
-DEF_VEC_ALLOC_P(df_ref,heap);
-static VEC(df_ref,heap) *use_def_ref;
-static VEC(df_ref,heap) *reg_defs;
-static VEC(df_ref,heap) *reg_defs_stack;
+static vec<df_ref> use_def_ref;
+static vec<df_ref> reg_defs;
+static vec<df_ref> reg_defs_stack;
/* The MD bitmaps are trimmed to include only live registers to cut
memory usage on testcases like insn-recog.c. Track live registers
@@ -135,7 +133,7 @@ static bitmap local_lr;
static inline df_ref
get_def_for_use (df_ref use)
{
- return VEC_index (df_ref, use_def_ref, DF_REF_ID (use));
+ return use_def_ref[DF_REF_ID (use)];
}
@@ -154,7 +152,7 @@ process_defs (df_ref *def_rec, int top_flag)
df_ref def;
while ((def = *def_rec++) != NULL)
{
- df_ref curr_def = VEC_index (df_ref, reg_defs, DF_REF_REGNO (def));
+ df_ref curr_def = reg_defs[DF_REF_REGNO (def)];
unsigned int dregno;
if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) != top_flag)
@@ -162,7 +160,7 @@ process_defs (df_ref *def_rec, int top_flag)
dregno = DF_REF_REGNO (def);
if (curr_def)
- VEC_safe_push (df_ref, heap, reg_defs_stack, curr_def);
+ reg_defs_stack.safe_push (curr_def);
else
{
/* Do not store anything if "transitioning" from NULL to NULL. But
@@ -171,18 +169,18 @@ process_defs (df_ref *def_rec, int top_flag)
if (DF_REF_FLAGS (def) & DF_MD_GEN_FLAGS)
;
else
- VEC_safe_push (df_ref, heap, reg_defs_stack, def);
+ reg_defs_stack.safe_push (def);
}
if (DF_REF_FLAGS (def) & DF_MD_GEN_FLAGS)
{
bitmap_set_bit (local_md, dregno);
- VEC_replace (df_ref, reg_defs, dregno, NULL);
+ reg_defs[dregno] = NULL;
}
else
{
bitmap_clear_bit (local_md, dregno);
- VEC_replace (df_ref, reg_defs, dregno, def);
+ reg_defs[dregno] = def;
}
}
}
@@ -201,11 +199,10 @@ process_uses (df_ref *use_rec, int top_flag)
if ((DF_REF_FLAGS (use) & DF_REF_AT_TOP) == top_flag)
{
unsigned int uregno = DF_REF_REGNO (use);
- if (VEC_index (df_ref, reg_defs, uregno)
+ if (reg_defs[uregno]
&& !bitmap_bit_p (local_md, uregno)
&& bitmap_bit_p (local_lr, uregno))
- VEC_replace (df_ref, use_def_ref, DF_REF_ID (use),
- VEC_index (df_ref, reg_defs, uregno));
+ use_def_ref[DF_REF_ID (use)] = reg_defs[uregno];
}
}
@@ -223,7 +220,7 @@ single_def_use_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
bitmap_copy (local_lr, &lr_bb_info->in);
/* Push a marker for the leave_block callback. */
- VEC_safe_push (df_ref, heap, reg_defs_stack, NULL);
+ reg_defs_stack.safe_push (NULL);
process_uses (df_get_artificial_uses (bb_index), DF_REF_AT_TOP);
process_defs (df_get_artificial_defs (bb_index), DF_REF_AT_TOP);
@@ -254,15 +251,15 @@ single_def_use_leave_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
basic_block bb ATTRIBUTE_UNUSED)
{
df_ref saved_def;
- while ((saved_def = VEC_pop (df_ref, reg_defs_stack)) != NULL)
+ while ((saved_def = reg_defs_stack.pop ()) != NULL)
{
unsigned int dregno = DF_REF_REGNO (saved_def);
/* See also process_defs. */
- if (saved_def == VEC_index (df_ref, reg_defs, dregno))
- VEC_replace (df_ref, reg_defs, dregno, NULL);
+ if (saved_def == reg_defs[dregno])
+ reg_defs[dregno] = NULL;
else
- VEC_replace (df_ref, reg_defs, dregno, saved_def);
+ reg_defs[dregno] = saved_def;
}
}
@@ -283,13 +280,13 @@ build_single_def_use_links (void)
df_analyze ();
df_maybe_reorganize_use_refs (DF_REF_ORDER_BY_INSN_WITH_NOTES);
- use_def_ref = VEC_alloc (df_ref, heap, DF_USES_TABLE_SIZE ());
- VEC_safe_grow_cleared (df_ref, heap, use_def_ref, DF_USES_TABLE_SIZE ());
+ use_def_ref.create (DF_USES_TABLE_SIZE ());
+ use_def_ref.safe_grow_cleared (DF_USES_TABLE_SIZE ());
- reg_defs = VEC_alloc (df_ref, heap, max_reg_num ());
- VEC_safe_grow_cleared (df_ref, heap, reg_defs, max_reg_num ());
+ reg_defs.create (max_reg_num ());
+ reg_defs.safe_grow_cleared (max_reg_num ());
- reg_defs_stack = VEC_alloc (df_ref, heap, n_basic_blocks * 10);
+ reg_defs_stack.create (n_basic_blocks * 10);
local_md = BITMAP_ALLOC (NULL);
local_lr = BITMAP_ALLOC (NULL);
@@ -306,8 +303,8 @@ build_single_def_use_links (void)
BITMAP_FREE (local_lr);
BITMAP_FREE (local_md);
- VEC_free (df_ref, heap, reg_defs);
- VEC_free (df_ref, heap, reg_defs_stack);
+ reg_defs.release ();
+ reg_defs_stack.release ();
}
@@ -912,14 +909,13 @@ update_uses (df_ref *use_rec)
int regno = DF_REF_REGNO (use);
/* Set up the use-def chain. */
- if (DF_REF_ID (use) >= (int) VEC_length (df_ref, use_def_ref))
- VEC_safe_grow_cleared (df_ref, heap, use_def_ref,
- DF_REF_ID (use) + 1);
+ if (DF_REF_ID (use) >= (int) use_def_ref.length ())
+ use_def_ref.safe_grow_cleared (DF_REF_ID (use) + 1);
#ifdef ENABLE_CHECKING
gcc_assert (sparseset_bit_p (active_defs_check, regno));
#endif
- VEC_replace (df_ref, use_def_ref, DF_REF_ID (use), active_defs[regno]);
+ use_def_ref[DF_REF_ID (use)] = active_defs[regno];
}
}
@@ -1425,7 +1421,7 @@ fwprop_done (void)
{
loop_optimizer_finalize ();
- VEC_free (df_ref, heap, use_def_ref);
+ use_def_ref.release ();
free (active_defs);
#ifdef ENABLE_CHECKING
sparseset_free (active_defs_check);
diff --git a/gcc/gcc.c b/gcc/gcc.c
index 7a275e133ad..3d515665f7a 100644
--- a/gcc/gcc.c
+++ b/gcc/gcc.c
@@ -1020,23 +1020,21 @@ static const struct compiler default_compilers[] =
static const int n_default_compilers = ARRAY_SIZE (default_compilers) - 1;
typedef char *char_p; /* For DEF_VEC_P. */
-DEF_VEC_P(char_p);
-DEF_VEC_ALLOC_P(char_p,heap);
/* A vector of options to give to the linker.
These options are accumulated by %x,
and substituted into the linker command with %X. */
-static VEC(char_p,heap) *linker_options;
+static vec<char_p> linker_options;
/* A vector of options to give to the assembler.
These options are accumulated by -Wa,
and substituted into the assembler command with %Y. */
-static VEC(char_p,heap) *assembler_options;
+static vec<char_p> assembler_options;
/* A vector of options to give to the preprocessor.
These options are accumulated by -Wp,
and substituted into the preprocessor command with %Z. */
-static VEC(char_p,heap) *preprocessor_options;
+static vec<char_p> preprocessor_options;
static char *
skip_whitespace (char *p)
@@ -1563,12 +1561,10 @@ set_spec (const char *name, const char *spec, bool user_p)
/* Accumulate a command (program name and args), and run it. */
typedef const char *const_char_p; /* For DEF_VEC_P. */
-DEF_VEC_P(const_char_p);
-DEF_VEC_ALLOC_P(const_char_p,heap);
/* Vector of pointers to arguments in the current line of specifications. */
-static VEC(const_char_p,heap) *argbuf;
+static vec<const_char_p> argbuf;
/* Position in the argbuf vector containing the name of the output file
(the value associated with the "-o" flag). */
@@ -1607,7 +1603,7 @@ static int signal_count;
static void
alloc_args (void)
{
- argbuf = VEC_alloc (const_char_p, heap, 10);
+ argbuf.create (10);
}
/* Clear out the vector of arguments (after a command is executed). */
@@ -1615,7 +1611,7 @@ alloc_args (void)
static void
clear_args (void)
{
- VEC_truncate (const_char_p, argbuf, 0);
+ argbuf.truncate (0);
}
/* Add one argument to the vector at the end.
@@ -1628,10 +1624,10 @@ clear_args (void)
static void
store_arg (const char *arg, int delete_always, int delete_failure)
{
- VEC_safe_push (const_char_p, heap, argbuf, arg);
+ argbuf.safe_push (arg);
if (strcmp (arg, "-o") == 0)
- have_o_argbuf_index = VEC_length (const_char_p, argbuf);
+ have_o_argbuf_index = argbuf.length ();
if (delete_always || delete_failure)
{
const char *p;
@@ -2545,14 +2541,14 @@ execute (void)
if (wrapper_string)
{
string = find_a_file (&exec_prefixes,
- VEC_index (const_char_p, argbuf, 0), X_OK, false);
+ argbuf[0], X_OK, false);
if (string)
- VEC_replace (const_char_p, argbuf, 0, string);
+ argbuf[0] = string;
insert_wrapper (wrapper_string);
}
/* Count # of piped commands. */
- for (n_commands = 1, i = 0; VEC_iterate (const_char_p, argbuf, i, arg); i++)
+ for (n_commands = 1, i = 0; argbuf.iterate (i, &arg); i++)
if (strcmp (arg, "|") == 0)
n_commands++;
@@ -2563,10 +2559,10 @@ execute (void)
and record info about each one.
Also search for the programs that are to be run. */
- VEC_safe_push (const_char_p, heap, argbuf, 0);
+ argbuf.safe_push (0);
- commands[0].prog = VEC_index (const_char_p, argbuf, 0); /* first command. */
- commands[0].argv = VEC_address (const_char_p, argbuf);
+ commands[0].prog = argbuf[0]; /* first command. */
+ commands[0].argv = argbuf.address ();
if (!wrapper_string)
{
@@ -2574,17 +2570,17 @@ execute (void)
commands[0].argv[0] = (string) ? string : commands[0].argv[0];
}
- for (n_commands = 1, i = 0; VEC_iterate (const_char_p, argbuf, i, arg); i++)
+ for (n_commands = 1, i = 0; argbuf.iterate (i, &arg); i++)
if (arg && strcmp (arg, "|") == 0)
{ /* each command. */
#if defined (__MSDOS__) || defined (OS2) || defined (VMS)
fatal_error ("-pipe not supported");
#endif
- VEC_replace (const_char_p, argbuf, i, 0); /* Termination of
+ argbuf[i] = 0; /* Termination of
command args. */
- commands[n_commands].prog = VEC_index (const_char_p, argbuf, i + 1);
+ commands[n_commands].prog = argbuf[i + 1];
commands[n_commands].argv
- = &(VEC_address (const_char_p, argbuf))[i + 1];
+ = &(argbuf.address ())[i + 1];
string = find_a_file (&exec_prefixes, commands[n_commands].prog,
X_OK, false);
if (string)
@@ -3074,20 +3070,19 @@ display_help (void)
static void
add_preprocessor_option (const char *option, int len)
{
- VEC_safe_push (char_p, heap, preprocessor_options,
- save_string (option, len));
+ preprocessor_options.safe_push (save_string (option, len));
}
static void
add_assembler_option (const char *option, int len)
{
- VEC_safe_push (char_p, heap, assembler_options, save_string (option, len));
+ assembler_options.safe_push (save_string (option, len));
}
static void
add_linker_option (const char *option, int len)
{
- VEC_safe_push (char_p, heap, linker_options, save_string (option, len));
+ linker_options.safe_push (save_string (option, len));
}
/* Allocate space for an input file in infiles. */
@@ -4195,7 +4190,7 @@ insert_wrapper (const char *wrapper)
int i;
char *buf = xstrdup (wrapper);
char *p = buf;
- unsigned int old_length = VEC_length (const_char_p, argbuf);
+ unsigned int old_length = argbuf.length ();
do
{
@@ -4205,9 +4200,9 @@ insert_wrapper (const char *wrapper)
}
while ((p = strchr (p, ',')) != NULL);
- VEC_safe_grow (const_char_p, heap, argbuf, old_length + n);
- memmove (VEC_address (const_char_p, argbuf) + n,
- VEC_address (const_char_p, argbuf),
+ argbuf.safe_grow (old_length + n);
+ memmove (argbuf.address () + n,
+ argbuf.address (),
old_length * sizeof (const_char_p));
i = 0;
@@ -4219,7 +4214,7 @@ insert_wrapper (const char *wrapper)
*p = 0;
p++;
}
- VEC_replace (const_char_p, argbuf, i, p);
+ argbuf[i] = p;
i++;
}
while ((p = strchr (p, ',')) != NULL);
@@ -4240,13 +4235,13 @@ do_spec (const char *spec)
If -pipe, this forces out the last command if it ended in `|'. */
if (value == 0)
{
- if (VEC_length (const_char_p, argbuf) > 0
- && !strcmp (VEC_last (const_char_p, argbuf), "|"))
- VEC_pop (const_char_p, argbuf);
+ if (argbuf.length () > 0
+ && !strcmp (argbuf.last (), "|"))
+ argbuf.pop ();
set_collect_gcc_options ();
- if (VEC_length (const_char_p, argbuf) > 0)
+ if (argbuf.length () > 0)
value = execute ();
}
@@ -4342,7 +4337,7 @@ do_self_spec (const char *spec)
if ((switches[i].live_cond & SWITCH_IGNORE))
switches[i].live_cond |= SWITCH_IGNORE_PERMANENTLY;
- if (VEC_length (const_char_p, argbuf) > 0)
+ if (argbuf.length () > 0)
{
const char **argbuf_copy;
struct cl_decoded_option *decoded_options;
@@ -4353,12 +4348,12 @@ do_self_spec (const char *spec)
/* Create a copy of argbuf with a dummy argv[0] entry for
decode_cmdline_options_to_array. */
argbuf_copy = XNEWVEC (const char *,
- VEC_length (const_char_p, argbuf) + 1);
+ argbuf.length () + 1);
argbuf_copy[0] = "";
- memcpy (argbuf_copy + 1, VEC_address (const_char_p, argbuf),
- VEC_length (const_char_p, argbuf) * sizeof (const char *));
+ memcpy (argbuf_copy + 1, argbuf.address (),
+ argbuf.length () * sizeof (const char *));
- decode_cmdline_options_to_array (VEC_length (const_char_p, argbuf) + 1,
+ decode_cmdline_options_to_array (argbuf.length () + 1,
argbuf_copy,
CL_DRIVER, &decoded_options,
&decoded_options_count);
@@ -4502,12 +4497,12 @@ compile_input_file_p (struct infile *infile)
/* Process each member of VEC as a spec. */
static void
-do_specs_vec (VEC(char_p,heap) *vec)
+do_specs_vec (vec<char_p> vec)
{
unsigned ix;
char *opt;
- FOR_EACH_VEC_ELT (char_p, vec, ix, opt)
+ FOR_EACH_VEC_ELT (vec, ix, opt)
{
do_spec_1 (opt, 1, NULL);
/* Make each accumulated option a separate argument. */
@@ -4547,8 +4542,8 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
case '\n':
end_going_arg ();
- if (VEC_length (const_char_p, argbuf) > 0
- && !strcmp (VEC_last (const_char_p, argbuf), "|"))
+ if (argbuf.length () > 0
+ && !strcmp (argbuf.last (), "|"))
{
/* A `|' before the newline means use a pipe here,
but only if -pipe was specified.
@@ -4559,12 +4554,12 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
break;
}
else
- VEC_pop (const_char_p, argbuf);
+ argbuf.pop ();
}
set_collect_gcc_options ();
- if (VEC_length (const_char_p, argbuf) > 0)
+ if (argbuf.length () > 0)
{
value = execute ();
if (value)
@@ -5068,7 +5063,7 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
case 'W':
{
- unsigned int cur_index = VEC_length (const_char_p, argbuf);
+ unsigned int cur_index = argbuf.length ();
/* Handle the {...} following the %W. */
if (*p != '{')
fatal_error ("spec %qs has invalid %<%%W%c%>", spec, *p);
@@ -5078,8 +5073,8 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
end_going_arg ();
/* If any args were output, mark the last one for deletion
on failure. */
- if (VEC_length (const_char_p, argbuf) != cur_index)
- record_temp_file (VEC_last (const_char_p, argbuf), 0, 1);
+ if (argbuf.length () != cur_index)
+ record_temp_file (argbuf.last (), 0, 1);
break;
}
@@ -5099,7 +5094,7 @@ do_spec_1 (const char *spec, int inswitch, const char *soft_matched_part)
string = save_string (p1 + 1, p - p1 - 2);
/* See if we already recorded this option. */
- FOR_EACH_VEC_ELT (char_p, linker_options, ix, opt)
+ FOR_EACH_VEC_ELT (linker_options, ix, opt)
if (! strcmp (string, opt))
{
free (string);
@@ -5384,7 +5379,7 @@ eval_spec_function (const char *func, const char *args)
const char *funcval;
/* Saved spec processing context. */
- VEC(const_char_p,heap) *save_argbuf;
+ vec<const_char_p> save_argbuf;
int save_arg_going;
int save_delete_this_arg;
@@ -5434,11 +5429,11 @@ eval_spec_function (const char *func, const char *args)
/* argbuf_index is an index for the next argument to be inserted, and
so contains the count of the args already inserted. */
- funcval = (*sf->func) (VEC_length (const_char_p, argbuf),
- VEC_address (const_char_p, argbuf));
+ funcval = (*sf->func) (argbuf.length (),
+ argbuf.address ());
/* Pop the spec processing context. */
- VEC_free (const_char_p, heap, argbuf);
+ argbuf.release ();
argbuf = save_argbuf;
arg_going = save_arg_going;
@@ -6398,10 +6393,10 @@ main (int argc, char **argv)
&& !no_sysroot_suffix
&& do_spec_2 (sysroot_suffix_spec) == 0)
{
- if (VEC_length (const_char_p, argbuf) > 1)
+ if (argbuf.length () > 1)
error ("spec failure: more than one arg to SYSROOT_SUFFIX_SPEC");
- else if (VEC_length (const_char_p, argbuf) == 1)
- target_sysroot_suffix = xstrdup (VEC_last (const_char_p, argbuf));
+ else if (argbuf.length () == 1)
+ target_sysroot_suffix = xstrdup (argbuf.last ());
}
#ifdef HAVE_LD_SYSROOT
@@ -6422,10 +6417,10 @@ main (int argc, char **argv)
&& !no_sysroot_suffix
&& do_spec_2 (sysroot_hdrs_suffix_spec) == 0)
{
- if (VEC_length (const_char_p, argbuf) > 1)
+ if (argbuf.length () > 1)
error ("spec failure: more than one arg to SYSROOT_HEADERS_SUFFIX_SPEC");
- else if (VEC_length (const_char_p, argbuf) == 1)
- target_sysroot_hdrs_suffix = xstrdup (VEC_last (const_char_p, argbuf));
+ else if (argbuf.length () == 1)
+ target_sysroot_hdrs_suffix = xstrdup (argbuf.last ());
}
/* Look for startfiles in the standard places. */
@@ -6435,7 +6430,7 @@ main (int argc, char **argv)
{
const char *arg;
int ndx;
- FOR_EACH_VEC_ELT (const_char_p, argbuf, ndx, arg)
+ FOR_EACH_VEC_ELT (argbuf, ndx, arg)
add_sysrooted_prefix (&startfile_prefixes, arg, "BINUTILS",
PREFIX_PRIORITY_LAST, 0, 1);
}
@@ -8278,20 +8273,20 @@ compare_debug_dump_opt_spec_function (int arg,
do_spec_2 ("%{fdump-final-insns=*:%*}");
do_spec_1 (" ", 0, NULL);
- if (VEC_length (const_char_p, argbuf) > 0
- && strcmp (argv[VEC_length (const_char_p, argbuf) - 1], "."))
+ if (argbuf.length () > 0
+ && strcmp (argv[argbuf.length () - 1], "."))
{
if (!compare_debug)
return NULL;
- name = xstrdup (argv[VEC_length (const_char_p, argbuf) - 1]);
+ name = xstrdup (argv[argbuf.length () - 1]);
ret = NULL;
}
else
{
const char *ext = NULL;
- if (VEC_length (const_char_p, argbuf) > 0)
+ if (argbuf.length () > 0)
{
do_spec_2 ("%{o*:%*}%{!o:%{!S:%b%O}%{S:%b.s}}");
ext = ".gkd";
@@ -8303,9 +8298,9 @@ compare_debug_dump_opt_spec_function (int arg,
do_spec_1 (" ", 0, NULL);
- gcc_assert (VEC_length (const_char_p, argbuf) > 0);
+ gcc_assert (argbuf.length () > 0);
- name = concat (VEC_last (const_char_p, argbuf), ext, NULL);
+ name = concat (argbuf.last (), ext, NULL);
ret = concat ("-fdump-final-insns=", name, NULL);
}
@@ -8353,9 +8348,9 @@ compare_debug_self_opt_spec_function (int arg,
do_spec_2 ("%{c|S:%{o*:%*}}");
do_spec_1 (" ", 0, NULL);
- if (VEC_length (const_char_p, argbuf) > 0)
+ if (argbuf.length () > 0)
debug_auxbase_opt = concat ("-auxbase-strip ",
- VEC_last (const_char_p, argbuf),
+ argbuf.last (),
NULL);
else
debug_auxbase_opt = NULL;
diff --git a/gcc/gcse.c b/gcc/gcse.c
index 9b7a92f1072..4be7cdc616b 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -310,8 +310,6 @@ struct occr
};
typedef struct occr *occr_t;
-DEF_VEC_P (occr_t);
-DEF_VEC_ALLOC_P (occr_t, heap);
/* Expression hash tables.
Each hash table is an array of buckets.
@@ -374,7 +372,7 @@ static regset reg_set_bitmap;
/* Array, indexed by basic block number for a list of insns which modify
memory within that block. */
-static VEC (rtx,heap) **modify_mem_list;
+static vec<rtx> *modify_mem_list;
static bitmap modify_mem_list_set;
typedef struct modify_pair_s
@@ -383,12 +381,10 @@ typedef struct modify_pair_s
rtx dest_addr; /* The canonical address of `dest'. */
} modify_pair;
-DEF_VEC_O(modify_pair);
-DEF_VEC_ALLOC_O(modify_pair,heap);
/* This array parallels modify_mem_list, except that it stores MEMs
being set and their canonicalized memory addresses. */
-static VEC (modify_pair,heap) **canon_modify_mem_list;
+static vec<modify_pair> *canon_modify_mem_list;
/* Bitmap indexed by block numbers to record which blocks contain
function calls. */
@@ -611,10 +607,12 @@ alloc_gcse_mem (void)
reg_set_bitmap = ALLOC_REG_SET (NULL);
/* Allocate array to keep a list of insns which modify memory in each
- basic block. */
- modify_mem_list = GCNEWVEC (VEC (rtx,heap) *, last_basic_block);
- canon_modify_mem_list = GCNEWVEC (VEC (modify_pair,heap) *,
- last_basic_block);
+ basic block. The two typedefs are needed to work around the
+ pre-processor limitation with template types in macro arguments. */
+ typedef vec<rtx> vec_rtx_heap;
+ typedef vec<modify_pair> vec_modify_pair_heap;
+ modify_mem_list = GCNEWVEC (vec_rtx_heap, last_basic_block);
+ canon_modify_mem_list = GCNEWVEC (vec_modify_pair_heap, last_basic_block);
modify_mem_list_set = BITMAP_ALLOC (NULL);
blocks_with_calls = BITMAP_ALLOC (NULL);
}
@@ -1004,7 +1002,7 @@ static int
load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x,
int avail_p)
{
- VEC (rtx,heap) *list = modify_mem_list[bb->index];
+ vec<rtx> list = modify_mem_list[bb->index];
rtx setter;
unsigned ix;
@@ -1012,7 +1010,7 @@ load_killed_in_block_p (const_basic_block bb, int uid_limit, const_rtx x,
if (MEM_READONLY_P (x))
return 0;
- FOR_EACH_VEC_ELT_REVERSE (rtx, list, ix, setter)
+ FOR_EACH_VEC_ELT_REVERSE (list, ix, setter)
{
struct mem_conflict_info mci;
@@ -1466,7 +1464,7 @@ canon_list_insert (rtx dest ATTRIBUTE_UNUSED, const_rtx x ATTRIBUTE_UNUSED,
pair.dest = dest;
pair.dest_addr = dest_addr;
- VEC_safe_push (modify_pair, heap, canon_modify_mem_list[bb], pair);
+ canon_modify_mem_list[bb].safe_push (pair);
}
/* Record memory modification information for INSN. We do not actually care
@@ -1480,7 +1478,7 @@ record_last_mem_set_info (rtx insn)
/* load_killed_in_block_p will handle the case of calls clobbering
everything. */
- VEC_safe_push (rtx, heap, modify_mem_list[bb], insn);
+ modify_mem_list[bb].safe_push (insn);
bitmap_set_bit (modify_mem_list_set, bb);
if (CALL_P (insn))
@@ -1622,8 +1620,8 @@ clear_modify_mem_tables (void)
EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
{
- VEC_free (rtx, heap, modify_mem_list[i]);
- VEC_free (modify_pair, heap, canon_modify_mem_list[i]);
+ modify_mem_list[i].release ();
+ canon_modify_mem_list[i].release ();
}
bitmap_clear (modify_mem_list_set);
bitmap_clear (blocks_with_calls);
@@ -1693,12 +1691,12 @@ compute_transp (const_rtx x, int indx, sbitmap *bmap)
blocks_with_calls,
0, bb_index, bi)
{
- VEC (modify_pair,heap) *list
+ vec<modify_pair> list
= canon_modify_mem_list[bb_index];
modify_pair *pair;
unsigned ix;
- FOR_EACH_VEC_ELT_REVERSE (modify_pair, list, ix, pair)
+ FOR_EACH_VEC_ELT_REVERSE (list, ix, pair)
{
rtx dest = pair->dest;
rtx dest_addr = pair->dest_addr;
@@ -3114,9 +3112,9 @@ static int
hoist_code (void)
{
basic_block bb, dominated;
- VEC (basic_block, heap) *dom_tree_walk;
+ vec<basic_block> dom_tree_walk;
unsigned int dom_tree_walk_index;
- VEC (basic_block, heap) *domby;
+ vec<basic_block> domby;
unsigned int i, j, k;
struct expr **index_map;
struct expr *expr;
@@ -3175,11 +3173,11 @@ hoist_code (void)
/* Walk over each basic block looking for potentially hoistable
expressions, nothing gets hoisted from the entry block. */
- FOR_EACH_VEC_ELT (basic_block, dom_tree_walk, dom_tree_walk_index, bb)
+ FOR_EACH_VEC_ELT (dom_tree_walk, dom_tree_walk_index, bb)
{
domby = get_dominated_to_depth (CDI_DOMINATORS, bb, MAX_HOIST_DEPTH);
- if (VEC_length (basic_block, domby) == 0)
+ if (domby.length () == 0)
continue;
/* Examine each expression that is very busy at the exit of this
@@ -3195,7 +3193,7 @@ hoist_code (void)
/* Number of occurrences of EXPR that can be hoisted to BB. */
int hoistable = 0;
/* Occurrences reachable from BB. */
- VEC (occr_t, heap) *occrs_to_hoist = NULL;
+ vec<occr_t> occrs_to_hoist = vec<occr_t>();
/* We want to insert the expression into BB only once, so
note when we've inserted it. */
int insn_inserted_p;
@@ -3224,7 +3222,7 @@ hoist_code (void)
/* We've found a potentially hoistable expression, now
we look at every block BB dominates to see if it
computes the expression. */
- FOR_EACH_VEC_ELT (basic_block, domby, j, dominated)
+ FOR_EACH_VEC_ELT (domby, j, dominated)
{
int max_distance;
@@ -3268,8 +3266,7 @@ hoist_code (void)
hoisted_bbs, occr->insn))
{
hoistable++;
- VEC_safe_push (occr_t, heap,
- occrs_to_hoist, occr);
+ occrs_to_hoist.safe_push (occr);
bitmap_set_bit (from_bbs, dominated->index);
}
}
@@ -3286,11 +3283,10 @@ hoist_code (void)
to nullify any benefit we get from code hoisting. */
if (hoistable > 1 && dbg_cnt (hoist_insn))
{
- /* If (hoistable != VEC_length), then there is
+ /* If (hoistable != vec::length), then there is
an occurrence of EXPR in BB itself. Don't waste
time looking for LCA in this case. */
- if ((unsigned) hoistable
- == VEC_length (occr_t, occrs_to_hoist))
+ if ((unsigned) hoistable == occrs_to_hoist.length ())
{
basic_block lca;
@@ -3299,15 +3295,15 @@ hoist_code (void)
if (lca != bb)
/* Punt, it's better to hoist these occurrences to
LCA. */
- VEC_free (occr_t, heap, occrs_to_hoist);
+ occrs_to_hoist.release ();
}
}
else
/* Punt, no point hoisting a single occurence. */
- VEC_free (occr_t, heap, occrs_to_hoist);
+ occrs_to_hoist.release ();
if (flag_ira_hoist_pressure
- && !VEC_empty (occr_t, occrs_to_hoist))
+ && !occrs_to_hoist.is_empty ())
{
/* Increase register pressure of basic blocks to which
expr is hoisted because of extended live range of
@@ -3341,7 +3337,7 @@ hoist_code (void)
/* Walk through occurrences of I'th expressions we want
to hoist to BB and make the transformations. */
- FOR_EACH_VEC_ELT (occr_t, occrs_to_hoist, j, occr)
+ FOR_EACH_VEC_ELT (occrs_to_hoist, j, occr)
{
rtx insn;
rtx set;
@@ -3377,14 +3373,14 @@ hoist_code (void)
}
}
- VEC_free (occr_t, heap, occrs_to_hoist);
+ occrs_to_hoist.release ();
bitmap_clear (from_bbs);
}
}
- VEC_free (basic_block, heap, domby);
+ domby.release ();
}
- VEC_free (basic_block, heap, dom_tree_walk);
+ dom_tree_walk.release ();
BITMAP_FREE (from_bbs);
if (flag_ira_hoist_pressure)
BITMAP_FREE (hoisted_bbs);
diff --git a/gcc/genattr.c b/gcc/genattr.c
index bb5cf9688a0..ade8c52cfb1 100644
--- a/gcc/genattr.c
+++ b/gcc/genattr.c
@@ -32,7 +32,7 @@ along with GCC; see the file COPYING3. If not see
static void gen_attr (rtx);
-static VEC (rtx, heap) *const_attrs, *reservations;
+static vec<rtx> const_attrs, reservations;
static void
@@ -42,7 +42,7 @@ gen_attr (rtx attr)
int is_const = GET_CODE (XEXP (attr, 2)) == CONST;
if (is_const)
- VEC_safe_push (rtx, heap, const_attrs, attr);
+ const_attrs.safe_push (attr);
printf ("#define HAVE_ATTR_%s 1\n", XSTR (attr, 0));
@@ -119,13 +119,13 @@ find_tune_attr (rtx exp)
if (strcmp (XSTR (exp, 0), "alternative") == 0)
return false;
- FOR_EACH_VEC_ELT (rtx, const_attrs, i, attr)
+ FOR_EACH_VEC_ELT (const_attrs, i, attr)
if (strcmp (XSTR (attr, 0), XSTR (exp, 0)) == 0)
{
unsigned int j;
rtx resv;
- FOR_EACH_VEC_ELT (rtx, reservations, j, resv)
+ FOR_EACH_VEC_ELT (reservations, j, resv)
if (! check_tune_attr (XSTR (attr, 0), XEXP (resv, 2)))
return false;
return true;
@@ -204,14 +204,14 @@ main (int argc, char **argv)
else if (GET_CODE (desc) == DEFINE_INSN_RESERVATION)
{
num_insn_reservations++;
- VEC_safe_push (rtx, heap, reservations, desc);
+ reservations.safe_push (desc);
}
}
if (num_insn_reservations > 0)
{
bool has_tune_attr
- = find_tune_attr (XEXP (VEC_index (rtx, reservations, 0), 2));
+ = find_tune_attr (XEXP (reservations[0], 2));
/* Output interface for pipeline hazards recognition based on
DFA (deterministic finite state automata. */
printf ("\n/* DFA based pipeline interface. */");
diff --git a/gcc/genattrtab.c b/gcc/genattrtab.c
index cef7d406191..7143fc45075 100644
--- a/gcc/genattrtab.c
+++ b/gcc/genattrtab.c
@@ -113,7 +113,6 @@ along with GCC; see the file COPYING3. If not see
#include "errors.h"
#include "read-md.h"
#include "gensupport.h"
-#include "vecprim.h"
#include "fnmatch.h"
#define DEBUG 0
diff --git a/gcc/genautomata.c b/gcc/genautomata.c
index 102e7d2a5a5..faa9bf8bcc5 100644
--- a/gcc/genautomata.c
+++ b/gcc/genautomata.c
@@ -213,20 +213,8 @@ static struct obstack irp;
/* Declare vector types for various data structures: */
-DEF_VEC_P(alt_state_t);
-DEF_VEC_ALLOC_P(alt_state_t, heap);
-DEF_VEC_P(ainsn_t);
-DEF_VEC_ALLOC_P(ainsn_t, heap);
-DEF_VEC_P(state_t);
-DEF_VEC_ALLOC_P(state_t, heap);
-DEF_VEC_P(decl_t);
-DEF_VEC_ALLOC_P(decl_t, heap);
-DEF_VEC_P(reserv_sets_t);
-DEF_VEC_ALLOC_P(reserv_sets_t, heap);
-
-DEF_VEC_I(vect_el_t);
-DEF_VEC_ALLOC_I(vect_el_t, heap);
-typedef VEC(vect_el_t, heap) *vla_hwint_t;
+
+typedef vec<vect_el_t> vla_hwint_t;
/* Forward declarations of functions used before their definitions, only. */
static regexp_t gen_regexp_sequence (const char *);
@@ -1150,7 +1138,7 @@ check_name (const char * name, pos_t pos ATTRIBUTE_UNUSED)
/* Pointers to all declarations during IR generation are stored in the
following. */
-static VEC(decl_t, heap) *decls;
+static vec<decl_t> decls;
/* Given a pointer to a (char *) and a separator, return an alloc'ed
string containing the next separated element, taking parentheses
@@ -1278,7 +1266,7 @@ gen_cpu_unit (rtx def)
DECL_UNIT (decl)->query_p = 0;
DECL_UNIT (decl)->min_occ_cycle_num = -1;
DECL_UNIT (decl)->in_set_p = 0;
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
}
@@ -1306,7 +1294,7 @@ gen_query_cpu_unit (rtx def)
DECL_UNIT (decl)->name = check_name (str_cpu_units [i], decl->pos);
DECL_UNIT (decl)->automaton_name = XSTR (def, 1);
DECL_UNIT (decl)->query_p = 1;
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
}
@@ -1341,7 +1329,7 @@ gen_bypass (rtx def)
DECL_BYPASS (decl)->out_pattern = out_patterns[i];
DECL_BYPASS (decl)->in_pattern = in_patterns[j];
DECL_BYPASS (decl)->bypass_guard_name = XSTR (def, 3);
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
}
@@ -1380,7 +1368,7 @@ gen_excl_set (rtx def)
else
DECL_EXCL (decl)->names [i]
= second_str_cpu_units [i - first_vect_length];
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
/* Process a PRESENCE_SET, a FINAL_PRESENCE_SET, an ABSENCE_SET,
@@ -1449,7 +1437,7 @@ gen_presence_absence_set (rtx def, int presence_p, int final_p)
DECL_ABSENCE (decl)->patterns_num = patterns_length;
DECL_ABSENCE (decl)->final_p = final_p;
}
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
/* Process a PRESENCE_SET.
@@ -1518,7 +1506,7 @@ gen_automaton (rtx def)
decl->mode = dm_automaton;
decl->pos = 0;
DECL_AUTOMATON (decl)->name = check_name (str_automata [i], decl->pos);
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
}
@@ -1723,7 +1711,7 @@ gen_reserv (rtx def)
decl->pos = 0;
DECL_RESERV (decl)->name = check_name (XSTR (def, 0), decl->pos);
DECL_RESERV (decl)->regexp = gen_regexp (XSTR (def, 1));
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
/* Process a DEFINE_INSN_RESERVATION.
@@ -1744,7 +1732,7 @@ gen_insn_reserv (rtx def)
DECL_INSN_RESERV (decl)->default_latency = XINT (def, 1);
DECL_INSN_RESERV (decl)->condexp = XEXP (def, 2);
DECL_INSN_RESERV (decl)->regexp = gen_regexp (XSTR (def, 3));
- VEC_safe_push (decl_t, heap, decls, decl);
+ decls.safe_push (decl);
}
@@ -3349,7 +3337,7 @@ static alt_state_t
uniq_sort_alt_states (alt_state_t alt_states_list)
{
alt_state_t curr_alt_state;
- VEC(alt_state_t, heap) *alt_states;
+ vec<alt_state_t> alt_states;
size_t i;
size_t prev_unique_state_ind;
alt_state_t result;
@@ -3359,33 +3347,31 @@ uniq_sort_alt_states (alt_state_t alt_states_list)
if (alt_states_list->next_alt_state == 0)
return alt_states_list;
- alt_states = VEC_alloc (alt_state_t, heap, 150);
+ alt_states.create (150);
for (curr_alt_state = alt_states_list;
curr_alt_state != NULL;
curr_alt_state = curr_alt_state->next_alt_state)
- VEC_safe_push (alt_state_t, heap, alt_states, curr_alt_state);
+ alt_states.safe_push (curr_alt_state);
- VEC_qsort (alt_state_t, alt_states, alt_state_cmp);
+ alt_states.qsort (alt_state_cmp);
prev_unique_state_ind = 0;
- for (i = 1; i < VEC_length (alt_state_t, alt_states); i++)
- if (VEC_index (alt_state_t, alt_states, prev_unique_state_ind)->state
- != VEC_index (alt_state_t, alt_states, i)->state)
+ for (i = 1; i < alt_states.length (); i++)
+ if (alt_states[prev_unique_state_ind]->state != alt_states[i]->state)
{
prev_unique_state_ind++;
- VEC_replace (alt_state_t, alt_states, prev_unique_state_ind,
- VEC_index (alt_state_t, alt_states, i));
+ alt_states[prev_unique_state_ind] = alt_states[i];
}
- VEC_truncate (alt_state_t, alt_states, prev_unique_state_ind + 1);
+ alt_states.truncate (prev_unique_state_ind + 1);
- for (i = 1; i < VEC_length (alt_state_t, alt_states); i++)
- VEC_index (alt_state_t, alt_states, i-1)->next_sorted_alt_state
- = VEC_index (alt_state_t, alt_states, i);
- VEC_last (alt_state_t, alt_states)->next_sorted_alt_state = 0;
+ for (i = 1; i < alt_states.length (); i++)
+ alt_states[i-1]->next_sorted_alt_state
+ = alt_states[i];
+ alt_states.last ()->next_sorted_alt_state = 0;
- result = VEC_index (alt_state_t, alt_states, 0);
+ result = alt_states[0];
- VEC_free (alt_state_t, heap, alt_states);
+ alt_states.release ();
return result;
}
@@ -5030,7 +5016,7 @@ transform_insn_regexps (void)
static int annotation_message_reported_p;
/* The vector contains all decls which are automata. */
-static VEC(decl_t, heap) *automaton_decls;
+static vec<decl_t> automaton_decls;
/* The following structure describes usage of a unit in a reservation. */
struct unit_usage
@@ -5044,8 +5030,6 @@ struct unit_usage
};
typedef struct unit_usage *unit_usage_t;
-DEF_VEC_P(unit_usage_t);
-DEF_VEC_ALLOC_P(unit_usage_t, heap);
/* Obstack for unit_usage structures. */
static struct obstack unit_usages;
@@ -5056,7 +5040,7 @@ static struct obstack unit_usages;
alternative with given number are referred through element with
index equals to the cycle * number of all alternatives in the
regexp + the alternative number. */
-static VEC(unit_usage_t, heap) *cycle_alt_unit_usages;
+static vec<unit_usage_t> cycle_alt_unit_usages;
/* The following function creates the structure unit_usage for UNIT on
CYCLE in REGEXP alternative with ALT_NUM. The structure is made
@@ -5075,13 +5059,12 @@ store_alt_unit_usage (regexp_t regexp, regexp_t unit, int cycle,
unit_decl = REGEXP_UNIT (unit)->unit_decl;
length = (cycle + 1) * REGEXP_ONEOF (regexp)->regexps_num;
- while (VEC_length (unit_usage_t, cycle_alt_unit_usages) < length)
- VEC_safe_push (unit_usage_t, heap, cycle_alt_unit_usages,
- (unit_usage_t) NULL);
+ while (cycle_alt_unit_usages.length () < length)
+ cycle_alt_unit_usages.safe_push (NULL);
index = cycle * REGEXP_ONEOF (regexp)->regexps_num + alt_num;
prev = NULL;
- for (curr = VEC_index (unit_usage_t, cycle_alt_unit_usages, index);
+ for (curr = cycle_alt_unit_usages[index];
curr != NULL;
prev = curr, curr = curr->next)
if (curr->unit_decl >= unit_decl)
@@ -5095,7 +5078,7 @@ store_alt_unit_usage (regexp_t regexp, regexp_t unit, int cycle,
unit_decl->last_distribution_check_cycle = -1; /* undefined */
unit_usage_ptr->next = curr;
if (prev == NULL)
- VEC_replace (unit_usage_t, cycle_alt_unit_usages, index, unit_usage_ptr);
+ cycle_alt_unit_usages[index] = unit_usage_ptr;
else
prev->next = unit_usage_ptr;
}
@@ -5124,11 +5107,11 @@ equal_alternatives_p (int alt1, int alt2, int n_alts,
unit_usage_t list1, list2;
for (i = 0;
- i < (int) VEC_length (unit_usage_t, cycle_alt_unit_usages);
+ i < (int) cycle_alt_unit_usages.length ();
i += n_alts)
{
- for (list1 = VEC_index (unit_usage_t, cycle_alt_unit_usages, i + alt1),
- list2 = VEC_index (unit_usage_t, cycle_alt_unit_usages, i + alt2);;
+ for (list1 = cycle_alt_unit_usages[i + alt1],
+ list2 = cycle_alt_unit_usages[i + alt2];;
list1 = list1->next, list2 = list2->next)
{
while (list1 != NULL
@@ -5151,8 +5134,6 @@ equal_alternatives_p (int alt1, int alt2, int n_alts,
return true;
}
-DEF_VEC_I(int);
-DEF_VEC_ALLOC_I(int, heap);
/* The function processes given REGEXP to find units with the wrong
distribution. */
@@ -5164,13 +5145,13 @@ check_regexp_units_distribution (const char *insn_reserv_name,
bool annotation_reservation_message_reported_p;
regexp_t seq, allof, unit;
struct unit_usage *unit_usage_ptr;
- VEC(int, heap) *marked;
+ vec<int> marked;
if (regexp == NULL || regexp->mode != rm_oneof)
return;
/* Store all unit usages in the regexp: */
obstack_init (&unit_usages);
- cycle_alt_unit_usages = VEC_alloc (unit_usage_t, heap, 10);
+ cycle_alt_unit_usages.create (10);
for (i = REGEXP_ONEOF (regexp)->regexps_num - 1; i >= 0; i--)
{
@@ -5238,21 +5219,21 @@ check_regexp_units_distribution (const char *insn_reserv_name,
}
}
/* Check distribution: */
- for (i = 0; i < (int) VEC_length (unit_usage_t, cycle_alt_unit_usages); i++)
- for (unit_usage_ptr = VEC_index (unit_usage_t, cycle_alt_unit_usages, i);
+ for (i = 0; i < (int) cycle_alt_unit_usages.length (); i++)
+ for (unit_usage_ptr = cycle_alt_unit_usages[i];
unit_usage_ptr != NULL;
unit_usage_ptr = unit_usage_ptr->next)
unit_usage_ptr->unit_decl->last_distribution_check_cycle = -1;
n_alts = REGEXP_ONEOF (regexp)->regexps_num;
- marked = VEC_alloc (int, heap, n_alts);
+ marked.create (n_alts);
for (i = 0; i < n_alts; i++)
- VEC_safe_push (int, heap, marked, 0);
+ marked.safe_push (0);
annotation_reservation_message_reported_p = false;
- for (i = 0; i < (int) VEC_length (unit_usage_t, cycle_alt_unit_usages); i++)
+ for (i = 0; i < (int) cycle_alt_unit_usages.length (); i++)
{
cycle = i / n_alts;
start = cycle * n_alts;
- for (unit_usage_ptr = VEC_index (unit_usage_t, cycle_alt_unit_usages, i);
+ for (unit_usage_ptr = cycle_alt_unit_usages[i];
unit_usage_ptr != NULL;
unit_usage_ptr = unit_usage_ptr->next)
{
@@ -5260,40 +5241,35 @@ check_regexp_units_distribution (const char *insn_reserv_name,
continue;
unit_usage_ptr->unit_decl->last_distribution_check_cycle = cycle;
for (alt = 0; alt < n_alts; alt++)
- if (! unit_present_on_list_p (VEC_index (unit_usage_t,
- cycle_alt_unit_usages,
- start + alt),
+ if (! unit_present_on_list_p (cycle_alt_unit_usages[start + alt],
unit_usage_ptr->unit_decl))
break;
if (alt >= n_alts)
continue;
- memset (VEC_address (int, marked), 0, n_alts * sizeof (int));
+ memset (marked.address (), 0, n_alts * sizeof (int));
for (alt = 0; alt < n_alts; alt++)
{
- if (! unit_present_on_list_p (VEC_index (unit_usage_t,
- cycle_alt_unit_usages,
- start + alt),
+ if (! unit_present_on_list_p (cycle_alt_unit_usages[start + alt],
unit_usage_ptr->unit_decl))
continue;
for (j = 0;
- j < (int) VEC_length (unit_usage_t, cycle_alt_unit_usages);
+ j < (int) cycle_alt_unit_usages.length ();
j++)
{
alt2 = j % n_alts;
if (! unit_present_on_list_p
- (VEC_index (unit_usage_t, cycle_alt_unit_usages,
- start + alt2),
+ (cycle_alt_unit_usages[start + alt2],
unit_usage_ptr->unit_decl)
&& equal_alternatives_p (alt, alt2, n_alts,
unit_usage_ptr
->unit_decl->automaton_decl))
{
- VEC_replace (int, marked, alt, 1);
- VEC_replace (int, marked, alt2, 1);
+ marked[alt] = 1;
+ marked[alt2] = 1;
}
}
}
- for (alt = 0; alt < n_alts && VEC_index (int, marked, alt); alt++)
+ for (alt = 0; alt < n_alts && marked[alt]; alt++)
;
if (alt < n_alts && 0)
{
@@ -5315,8 +5291,8 @@ check_regexp_units_distribution (const char *insn_reserv_name,
}
}
}
- VEC_free (int, heap, marked);
- VEC_free (unit_usage_t, heap, cycle_alt_unit_usages);
+ marked.release ();
+ cycle_alt_unit_usages.release ();
obstack_free (&unit_usages, NULL);
}
@@ -5330,14 +5306,14 @@ check_unit_distributions_to_automata (void)
if (progress_flag)
fprintf (stderr, "Check unit distributions to automata...");
- automaton_decls = NULL;
+ automaton_decls.create (0);
for (i = 0; i < description->decls_num; i++)
{
decl = description->decls [i];
if (decl->mode == dm_automaton)
- VEC_safe_push (decl_t, heap, automaton_decls, decl);
+ automaton_decls.safe_push (decl);
}
- if (VEC_length (decl_t, automaton_decls) > 1)
+ if (automaton_decls.length () > 1)
{
annotation_message_reported_p = FALSE;
for (i = 0; i < description->decls_num; i++)
@@ -5349,7 +5325,7 @@ check_unit_distributions_to_automata (void)
DECL_INSN_RESERV (decl)->transformed_regexp);
}
}
- VEC_free (decl_t, heap, automaton_decls);
+ automaton_decls.release ();
if (progress_flag)
fprintf (stderr, "done\n");
}
@@ -5509,7 +5485,8 @@ form_ainsn_with_same_reservs (automaton_t automaton)
{
ainsn_t curr_ainsn;
size_t i;
- VEC(ainsn_t, heap) *last_insns = VEC_alloc (ainsn_t, heap, 150);
+ vec<ainsn_t> last_insns;
+ last_insns.create (150);
for (curr_ainsn = automaton->ainsn_list;
curr_ainsn != NULL;
@@ -5521,26 +5498,25 @@ form_ainsn_with_same_reservs (automaton_t automaton)
}
else
{
- for (i = 0; i < VEC_length (ainsn_t, last_insns); i++)
+ for (i = 0; i < last_insns.length (); i++)
if (alt_states_eq
(curr_ainsn->sorted_alt_states,
- VEC_index (ainsn_t, last_insns, i)->sorted_alt_states))
+ last_insns[i]->sorted_alt_states))
break;
curr_ainsn->next_same_reservs_insn = NULL;
- if (i < VEC_length (ainsn_t, last_insns))
+ if (i < last_insns.length ())
{
curr_ainsn->first_insn_with_same_reservs = 0;
- VEC_index (ainsn_t, last_insns, i)->next_same_reservs_insn
- = curr_ainsn;
- VEC_replace (ainsn_t, last_insns, i, curr_ainsn);
+ last_insns[i]->next_same_reservs_insn = curr_ainsn;
+ last_insns[i] = curr_ainsn;
}
else
{
- VEC_safe_push (ainsn_t, heap, last_insns, curr_ainsn);
+ last_insns.safe_push (curr_ainsn);
curr_ainsn->first_insn_with_same_reservs = 1;
}
}
- VEC_free (ainsn_t, heap, last_insns);
+ last_insns.release ();
}
/* Forming unit reservations which can affect creating the automaton
@@ -5582,7 +5558,8 @@ make_automaton (automaton_t automaton)
state_t state;
state_t start_state;
state_t state2;
- VEC(state_t, heap) *state_stack = VEC_alloc(state_t, heap, 150);
+ vec<state_t> state_stack;
+ state_stack.create (150);
int states_n;
reserv_sets_t reservs_matter = form_reservs_matter (automaton);
@@ -5590,11 +5567,11 @@ make_automaton (automaton_t automaton)
start_state = insert_state (get_free_state (1, automaton));
automaton->start_state = start_state;
start_state->it_was_placed_in_stack_for_NDFA_forming = 1;
- VEC_safe_push (state_t, heap, state_stack, start_state);
+ state_stack.safe_push (start_state);
states_n = 1;
- while (VEC_length (state_t, state_stack) != 0)
+ while (state_stack.length () != 0)
{
- state = VEC_pop (state_t, state_stack);
+ state = state_stack.pop ();
for (ainsn = automaton->ainsn_list;
ainsn != NULL;
ainsn = ainsn->next_ainsn)
@@ -5617,7 +5594,7 @@ make_automaton (automaton_t automaton)
{
state2->it_was_placed_in_stack_for_NDFA_forming
= 1;
- VEC_safe_push (state_t, heap, state_stack, state2);
+ state_stack.safe_push (state2);
states_n++;
if (progress_flag && states_n % 100 == 0)
fprintf (stderr, ".");
@@ -5634,14 +5611,14 @@ make_automaton (automaton_t automaton)
if (!state2->it_was_placed_in_stack_for_NDFA_forming)
{
state2->it_was_placed_in_stack_for_NDFA_forming = 1;
- VEC_safe_push (state_t, heap, state_stack, state2);
+ state_stack.safe_push (state2);
states_n++;
if (progress_flag && states_n % 100 == 0)
fprintf (stderr, ".");
}
add_arc (state, state2, automaton->advance_ainsn);
}
- VEC_free (state_t, heap, state_stack);
+ state_stack.release ();
}
/* Form lists of all arcs of STATE marked by the same ainsn. */
@@ -5674,7 +5651,7 @@ form_arcs_marked_by_insn (state_t state)
static int
create_composed_state (state_t original_state, arc_t arcs_marked_by_insn,
- VEC(state_t, heap) **state_stack)
+ vec<state_t> *state_stack)
{
state_t state;
alt_state_t alt_state, curr_alt_state;
@@ -5770,7 +5747,7 @@ create_composed_state (state_t original_state, arc_t arcs_marked_by_insn,
if (!state->it_was_placed_in_stack_for_DFA_forming)
{
state->it_was_placed_in_stack_for_DFA_forming = 1;
- VEC_safe_push (state_t, heap, *state_stack, state);
+ state_stack->safe_push (state);
}
return new_state_p;
}
@@ -5784,20 +5761,20 @@ NDFA_to_DFA (automaton_t automaton)
state_t start_state;
state_t state;
decl_t decl;
- VEC(state_t, heap) *state_stack;
+ vec<state_t> state_stack;
int i;
int states_n;
- state_stack = VEC_alloc (state_t, heap, 0);
+ state_stack.create (0);
/* Create the start state (empty state). */
start_state = automaton->start_state;
start_state->it_was_placed_in_stack_for_DFA_forming = 1;
- VEC_safe_push (state_t, heap, state_stack, start_state);
+ state_stack.safe_push (start_state);
states_n = 1;
- while (VEC_length (state_t, state_stack) != 0)
+ while (state_stack.length () != 0)
{
- state = VEC_pop (state_t, state_stack);
+ state = state_stack.pop ();
form_arcs_marked_by_insn (state);
for (i = 0; i < description->decls_num; i++)
{
@@ -5822,7 +5799,7 @@ NDFA_to_DFA (automaton_t automaton)
if (!state2->it_was_placed_in_stack_for_DFA_forming)
{
state2->it_was_placed_in_stack_for_DFA_forming = 1;
- VEC_safe_push (state_t, heap, state_stack, state2);
+ state_stack.safe_push (state2);
}
add_arc (state, state2, automaton->collapse_ainsn);
}
@@ -5830,7 +5807,7 @@ NDFA_to_DFA (automaton_t automaton)
add_arc (state, state, automaton->collapse_ainsn);
}
}
- VEC_free (state_t, heap, state_stack);
+ state_stack.release ();
}
/* The following variable value is current number (1, 2, ...) of passing
@@ -5872,14 +5849,14 @@ initiate_pass_states (void)
/* The following vla is used for storing pointers to all achieved
states. */
-static VEC(state_t, heap) *all_achieved_states;
+static vec<state_t> all_achieved_states;
/* This function is called by function pass_states to add an achieved
STATE. */
static void
add_achieved_state (state_t state)
{
- VEC_safe_push (state_t, heap, all_achieved_states, state);
+ all_achieved_states.safe_push (state);
}
/* The function sets up equivalence numbers of insns which mark all
@@ -6014,24 +5991,24 @@ compare_states_for_equiv (const void *state_ptr_1,
}
/* The function makes initial partition of STATES on equivalent
- classes and saves it into *CLASSES. This function requires the input
+ classes and saves it into CLASSES. This function requires the input
to be sorted via compare_states_for_equiv(). */
static int
-init_equiv_class (VEC(state_t, heap) *states, VEC (state_t, heap) **classes)
+init_equiv_class (vec<state_t> states, vec<state_t> *classes)
{
size_t i;
state_t prev = 0;
int class_num = 1;
- *classes = VEC_alloc (state_t, heap, 150);
- for (i = 0; i < VEC_length (state_t, states); i++)
+ classes->create (150);
+ for (i = 0; i < states.length (); i++)
{
- state_t state = VEC_index (state_t, states, i);
+ state_t state = states[i];
if (prev)
{
if (compare_states_for_equiv (&prev, &state) != 0)
{
- VEC_safe_push (state_t, heap, *classes, prev);
+ classes->safe_push (prev);
class_num++;
prev = NULL;
}
@@ -6041,17 +6018,17 @@ init_equiv_class (VEC(state_t, heap) *states, VEC (state_t, heap) **classes)
prev = state;
}
if (prev)
- VEC_safe_push (state_t, heap, *classes, prev);
+ classes->safe_push (prev);
return class_num;
}
/* The function copies pointers to equivalent states from vla FROM
into vla TO. */
static void
-copy_equiv_class (VEC(state_t, heap) **to, VEC(state_t, heap) *from)
+copy_equiv_class (vec<state_t> *to, vec<state_t> from)
{
- VEC_free (state_t, heap, *to);
- *to = VEC_copy (state_t, heap, from);
+ to->release ();
+ *to = from.copy ();
}
/* The function processes equivalence class given by its first state,
@@ -6063,7 +6040,7 @@ copy_equiv_class (VEC(state_t, heap) **to, VEC(state_t, heap) *from)
partitioned, the function returns nonzero value. */
static int
partition_equiv_class (state_t first_state, int odd_iteration_flag,
- VEC(state_t, heap) **next_iteration_classes,
+ vec<state_t> *next_iteration_classes,
int *new_equiv_class_num_ptr)
{
state_t new_equiv_class;
@@ -6109,7 +6086,7 @@ partition_equiv_class (state_t first_state, int odd_iteration_flag,
clear_arc_insns_equiv_num (first_state);
}
if (new_equiv_class != NULL)
- VEC_safe_push (state_t, heap, *next_iteration_classes, new_equiv_class);
+ next_iteration_classes->safe_push (new_equiv_class);
first_state = new_equiv_class;
}
return partition_p;
@@ -6117,19 +6094,18 @@ partition_equiv_class (state_t first_state, int odd_iteration_flag,
/* The function finds equivalent states of AUTOMATON. */
static void
-evaluate_equiv_classes (automaton_t automaton,
- VEC(state_t, heap) **equiv_classes)
+evaluate_equiv_classes (automaton_t automaton, vec<state_t> *equiv_classes)
{
int new_equiv_class_num;
int odd_iteration_flag;
int finish_flag;
- VEC (state_t, heap) *next_iteration_classes;
+ vec<state_t> next_iteration_classes;
size_t i;
- all_achieved_states = VEC_alloc (state_t, heap, 1500);
+ all_achieved_states.create (1500);
pass_states (automaton, add_achieved_state);
pass_states (automaton, cache_presence);
- VEC_qsort (state_t, all_achieved_states, compare_states_for_equiv);
+ all_achieved_states.qsort (compare_states_for_equiv);
odd_iteration_flag = 0;
new_equiv_class_num = init_equiv_class (all_achieved_states,
@@ -6142,29 +6118,29 @@ evaluate_equiv_classes (automaton_t automaton,
copy_equiv_class (equiv_classes, next_iteration_classes);
/* Transfer equiv numbers for the next iteration. */
- for (i = 0; i < VEC_length (state_t, all_achieved_states); i++)
+ for (i = 0; i < all_achieved_states.length (); i++)
if (odd_iteration_flag)
- VEC_index (state_t, all_achieved_states, i)->equiv_class_num_2
- = VEC_index (state_t, all_achieved_states, i)->equiv_class_num_1;
+ all_achieved_states[i]->equiv_class_num_2
+ = all_achieved_states[i]->equiv_class_num_1;
else
- VEC_index (state_t, all_achieved_states, i)->equiv_class_num_1
- = VEC_index (state_t, all_achieved_states, i)->equiv_class_num_2;
+ all_achieved_states[i]->equiv_class_num_1
+ = all_achieved_states[i]->equiv_class_num_2;
- for (i = 0; i < VEC_length (state_t, *equiv_classes); i++)
- if (partition_equiv_class (VEC_index (state_t, *equiv_classes, i),
+ for (i = 0; i < equiv_classes->length (); i++)
+ if (partition_equiv_class ((*equiv_classes)[i],
odd_iteration_flag,
&next_iteration_classes,
&new_equiv_class_num))
finish_flag = 0;
}
while (!finish_flag);
- VEC_free (state_t, heap, next_iteration_classes);
- VEC_free (state_t, heap, all_achieved_states);
+ next_iteration_classes.release ();
+ all_achieved_states.release ();
}
/* The function merges equivalent states of AUTOMATON. */
static void
-merge_states (automaton_t automaton, VEC(state_t, heap) *equiv_classes)
+merge_states (automaton_t automaton, vec<state_t> equiv_classes)
{
state_t curr_state;
state_t new_state;
@@ -6177,9 +6153,9 @@ merge_states (automaton_t automaton, VEC(state_t, heap) *equiv_classes)
/* Create states corresponding to equivalence classes containing two
or more states. */
- for (i = 0; i < VEC_length (state_t, equiv_classes); i++)
+ for (i = 0; i < equiv_classes.length (); i++)
{
- curr_state = VEC_index (state_t, equiv_classes, i);
+ curr_state = equiv_classes[i];
if (curr_state->next_equiv_class_state != NULL)
{
/* There are more one states in the class equivalence. */
@@ -6218,9 +6194,9 @@ merge_states (automaton_t automaton, VEC(state_t, heap) *equiv_classes)
curr_state->equiv_class_state = curr_state;
}
- for (i = 0; i < VEC_length (state_t, equiv_classes); i++)
+ for (i = 0; i < equiv_classes.length (); i++)
{
- curr_state = VEC_index (state_t, equiv_classes, i);
+ curr_state = equiv_classes[i];
if (curr_state->next_equiv_class_state != NULL)
{
first_class_state = curr_state;
@@ -6279,13 +6255,13 @@ set_new_cycle_flags (state_t state)
static void
minimize_DFA (automaton_t automaton)
{
- VEC(state_t, heap) *equiv_classes = 0;
+ vec<state_t> equiv_classes = vec<state_t>();
evaluate_equiv_classes (automaton, &equiv_classes);
merge_states (automaton, equiv_classes);
pass_states (automaton, set_new_cycle_flags);
- VEC_free (state_t, heap, equiv_classes);
+ equiv_classes.release ();
}
/* Values of two variables are counted number of states and arcs in an
@@ -6952,7 +6928,7 @@ static void
output_vect (vla_hwint_t vect)
{
int els_on_line;
- size_t vect_length = VEC_length (vect_el_t, vect);
+ size_t vect_length = vect.length ();
size_t i;
els_on_line = 1;
@@ -6961,7 +6937,7 @@ output_vect (vla_hwint_t vect)
else
for (i = 0; i < vect_length; i++)
{
- fprintf (output_file, "%5ld", (long) VEC_index (vect_el_t, vect, i));
+ fprintf (output_file, "%5ld", (long) vect[i]);
if (els_on_line == 10)
{
els_on_line = 0;
@@ -7225,17 +7201,15 @@ output_translate_vect (automaton_t automaton)
int insn_value;
vla_hwint_t translate_vect;
- translate_vect = VEC_alloc (vect_el_t, heap, description->insns_num);
+ translate_vect.create (description->insns_num);
for (insn_value = 0; insn_value < description->insns_num; insn_value++)
/* Undefined value */
- VEC_quick_push (vect_el_t, translate_vect,
- automaton->insn_equiv_classes_num);
+ translate_vect.quick_push (automaton->insn_equiv_classes_num);
for (ainsn = automaton->ainsn_list; ainsn != NULL; ainsn = ainsn->next_ainsn)
- VEC_replace (vect_el_t, translate_vect,
- ainsn->insn_reserv_decl->insn_num,
- ainsn->insn_equiv_class_num);
+ translate_vect[ainsn->insn_reserv_decl->insn_num] =
+ ainsn->insn_equiv_class_num;
fprintf (output_file,
"/* Vector translating external insn codes to internal ones.*/\n");
@@ -7246,7 +7220,7 @@ output_translate_vect (automaton_t automaton)
fprintf (output_file, "[] ATTRIBUTE_UNUSED = {\n");
output_vect (translate_vect);
fprintf (output_file, "};\n\n");
- VEC_free (vect_el_t, heap, translate_vect);
+ translate_vect.release ();
}
/* The value in a table state x ainsn -> something which represents
@@ -7260,8 +7234,7 @@ comb_vect_p (state_ainsn_table_t tab)
{
if (no_comb_flag)
return false;
- return (2 * VEC_length (vect_el_t, tab->full_vect)
- > 5 * VEC_length (vect_el_t, tab->comb_vect));
+ return (2 * tab->full_vect.length () > 5 * tab->comb_vect.length ());
}
/* The following function creates new table for AUTOMATON. */
@@ -7275,18 +7248,17 @@ create_state_ainsn_table (automaton_t automaton)
tab = XCREATENODE (struct state_ainsn_table);
tab->automaton = automaton;
- tab->comb_vect = VEC_alloc (vect_el_t, heap, 10000);
- tab->check_vect = VEC_alloc (vect_el_t, heap, 10000);
+ tab->comb_vect.create (10000);
+ tab->check_vect.create (10000);
- tab->base_vect = 0;
- VEC_safe_grow (vect_el_t, heap, tab->base_vect,
- automaton->achieved_states_num);
+ tab->base_vect.create (0);
+ tab->base_vect.safe_grow (automaton->achieved_states_num);
full_vect_length = (automaton->insn_equiv_classes_num
* automaton->achieved_states_num);
- tab->full_vect = VEC_alloc (vect_el_t, heap, full_vect_length);
+ tab->full_vect.create (full_vect_length);
for (i = 0; i < full_vect_length; i++)
- VEC_quick_push (vect_el_t, tab->full_vect, undefined_vect_el_value);
+ tab->full_vect.quick_push (undefined_vect_el_value);
tab->min_base_vect_el_value = 0;
tab->max_base_vect_el_value = 0;
@@ -7364,27 +7336,25 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
int i;
unsigned long vect_mask, comb_vect_mask;
- vect_length = VEC_length (vect_el_t, vect);
+ vect_length = vect.length ();
gcc_assert (vect_length);
- gcc_assert (VEC_last (vect_el_t, vect) != undefined_vect_el_value);
+ gcc_assert (vect.last () != undefined_vect_el_value);
real_vect_length = tab->automaton->insn_equiv_classes_num;
/* Form full vector in the table: */
{
size_t full_base = tab->automaton->insn_equiv_classes_num * vect_num;
- if (VEC_length (vect_el_t, tab->full_vect) < full_base + vect_length)
- VEC_safe_grow (vect_el_t, heap, tab->full_vect,
- full_base + vect_length);
+ if (tab->full_vect.length () < full_base + vect_length)
+ tab->full_vect.safe_grow (full_base + vect_length);
for (i = 0; i < vect_length; i++)
- VEC_replace (vect_el_t, tab->full_vect, full_base + i,
- VEC_index (vect_el_t, vect, i));
+ tab->full_vect[full_base + i] = vect[i];
}
/* The comb_vect min/max values are also used for the full vector, so
compute them now. */
for (vect_index = 0; vect_index < vect_length; vect_index++)
- if (VEC_index (vect_el_t, vect, vect_index) != undefined_vect_el_value)
+ if (vect[vect_index] != undefined_vect_el_value)
{
- vect_el_t x = VEC_index (vect_el_t, vect, vect_index);
+ vect_el_t x = vect[vect_index];
gcc_assert (x >= 0);
if (tab->max_comb_vect_el_value < x)
tab->max_comb_vect_el_value = x;
@@ -7395,14 +7365,13 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
return;
/* Form comb vector in the table: */
- gcc_assert (VEC_length (vect_el_t, tab->comb_vect)
- == VEC_length (vect_el_t, tab->check_vect));
+ gcc_assert (tab->comb_vect.length () == tab->check_vect.length ());
- comb_vect_els_num = VEC_length (vect_el_t, tab->comb_vect);
+ comb_vect_els_num = tab->comb_vect.length ();
for (first_unempty_vect_index = 0;
first_unempty_vect_index < vect_length;
first_unempty_vect_index++)
- if (VEC_index (vect_el_t, vect, first_unempty_vect_index)
+ if (vect[first_unempty_vect_index]
!= undefined_vect_el_value)
break;
@@ -7419,10 +7388,9 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
vect_index < vect_length
&& vect_index + comb_vect_index < comb_vect_els_num;
vect_index++)
- if (VEC_index (vect_el_t, vect, vect_index)
+ if (vect[vect_index]
!= undefined_vect_el_value
- && (VEC_index (vect_el_t, tab->comb_vect,
- vect_index + comb_vect_index)
+ && (tab->comb_vect[vect_index + comb_vect_index]
!= undefined_vect_el_value))
break;
if (vect_index >= vect_length
@@ -7439,7 +7407,7 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
vect_index++)
{
vect_mask = vect_mask << 1;
- if (VEC_index (vect_el_t, vect, vect_index) != undefined_vect_el_value)
+ if (vect[vect_index] != undefined_vect_el_value)
vect_mask |= 1;
}
@@ -7455,7 +7423,7 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
{
comb_vect_mask <<= 1;
if (vect_index + comb_vect_index < comb_vect_els_num
- && VEC_index (vect_el_t, tab->comb_vect, vect_index + comb_vect_index)
+ && tab->comb_vect[vect_index + comb_vect_index]
!= undefined_vect_el_value)
comb_vect_mask |= 1;
}
@@ -7466,7 +7434,7 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
comb_vect_index++, i++)
{
comb_vect_mask = (comb_vect_mask << 1) | 1;
- comb_vect_mask ^= (VEC_index (vect_el_t, tab->comb_vect, i)
+ comb_vect_mask ^= (tab->comb_vect[i]
== undefined_vect_el_value);
if ((vect_mask & comb_vect_mask) == 0)
goto found;
@@ -7488,25 +7456,22 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
no_state_value = tab->automaton->achieved_states_num;
while (additional_els_num > 0)
{
- VEC_safe_push (vect_el_t, heap, tab->comb_vect, vect_el);
- VEC_safe_push (vect_el_t, heap, tab->check_vect, no_state_value);
+ tab->comb_vect.safe_push (vect_el);
+ tab->check_vect.safe_push (no_state_value);
additional_els_num--;
}
- gcc_assert (VEC_length (vect_el_t, tab->comb_vect)
+ gcc_assert (tab->comb_vect.length ()
>= comb_vect_index + real_vect_length);
/* Fill comb and check vectors. */
for (vect_index = 0; vect_index < vect_length; vect_index++)
- if (VEC_index (vect_el_t, vect, vect_index) != undefined_vect_el_value)
+ if (vect[vect_index] != undefined_vect_el_value)
{
- vect_el_t x = VEC_index (vect_el_t, vect, vect_index);
- gcc_assert (VEC_index (vect_el_t, tab->comb_vect,
- comb_vect_index + vect_index)
+ vect_el_t x = vect[vect_index];
+ gcc_assert (tab->comb_vect[comb_vect_index + vect_index]
== undefined_vect_el_value);
gcc_assert (x >= 0);
- VEC_replace (vect_el_t, tab->comb_vect,
- comb_vect_index + vect_index, x);
- VEC_replace (vect_el_t, tab->check_vect,
- comb_vect_index + vect_index, vect_num);
+ tab->comb_vect[comb_vect_index + vect_index] = x;
+ tab->check_vect[comb_vect_index + vect_index] = vect_num;
}
if (tab->max_comb_vect_el_value < undefined_vect_el_value)
tab->max_comb_vect_el_value = undefined_vect_el_value;
@@ -7517,7 +7482,7 @@ add_vect (state_ainsn_table_t tab, int vect_num, vla_hwint_t vect)
if (tab->min_base_vect_el_value > comb_vect_index)
tab->min_base_vect_el_value = comb_vect_index;
- VEC_replace (vect_el_t, tab->base_vect, vect_num, comb_vect_index);
+ tab->base_vect[vect_num] = comb_vect_index;
}
/* Return number of out arcs of STATE. */
@@ -7558,29 +7523,29 @@ compare_transition_els_num (const void *state_ptr_1,
/* The function adds element EL_VALUE to vector VECT for a table state
x AINSN. */
static void
-add_vect_el (vla_hwint_t *vect, ainsn_t ainsn, int el_value)
+add_vect_el (vla_hwint_t &vect, ainsn_t ainsn, int el_value)
{
int equiv_class_num;
int vect_index;
gcc_assert (ainsn);
equiv_class_num = ainsn->insn_equiv_class_num;
- for (vect_index = VEC_length (vect_el_t, *vect);
+ for (vect_index = vect.length ();
vect_index <= equiv_class_num;
vect_index++)
- VEC_safe_push (vect_el_t, heap, *vect, undefined_vect_el_value);
- VEC_replace (vect_el_t, *vect, equiv_class_num, el_value);
+ vect.safe_push (undefined_vect_el_value);
+ vect[equiv_class_num] = el_value;
}
/* This is for forming vector of states of an automaton. */
-static VEC(state_t, heap) *output_states_vect;
+static vec<state_t> output_states_vect;
/* The function is called by function pass_states. The function adds
STATE to `output_states_vect'. */
static void
add_states_vect_el (state_t state)
{
- VEC_safe_push (state_t, heap, output_states_vect, state);
+ output_states_vect.safe_push (state);
}
/* Form and output vectors (comb, check, base or full vector)
@@ -7590,30 +7555,30 @@ output_trans_table (automaton_t automaton)
{
size_t i;
arc_t arc;
- vla_hwint_t transition_vect = 0;
+ vla_hwint_t transition_vect = vla_hwint_t();
undefined_vect_el_value = automaton->achieved_states_num;
automaton->trans_table = create_state_ainsn_table (automaton);
/* Create vect of pointers to states ordered by num of transitions
from the state (state with the maximum num is the first). */
- output_states_vect = 0;
+ output_states_vect.create (0);
pass_states (automaton, add_states_vect_el);
- VEC_qsort (state_t, output_states_vect, compare_transition_els_num);
+ output_states_vect.qsort (compare_transition_els_num);
- for (i = 0; i < VEC_length (state_t, output_states_vect); i++)
+ for (i = 0; i < output_states_vect.length (); i++)
{
- VEC_truncate (vect_el_t, transition_vect, 0);
- for (arc = first_out_arc (VEC_index (state_t, output_states_vect, i));
+ transition_vect.truncate (0);
+ for (arc = first_out_arc (output_states_vect[i]);
arc != NULL;
arc = next_out_arc (arc))
{
gcc_assert (arc->insn);
if (arc->insn->first_ainsn_with_given_equivalence_num)
- add_vect_el (&transition_vect, arc->insn,
+ add_vect_el (transition_vect, arc->insn,
arc->to_state->order_state_num);
}
add_vect (automaton->trans_table,
- VEC_index (state_t, output_states_vect, i)->order_state_num,
+ output_states_vect[i]->order_state_num,
transition_vect);
}
output_state_ainsn_table
@@ -7621,8 +7586,8 @@ output_trans_table (automaton_t automaton)
output_trans_full_vect_name, output_trans_comb_vect_name,
output_trans_check_vect_name, output_trans_base_vect_name);
- VEC_free (state_t, heap, output_states_vect);
- VEC_free (vect_el_t, heap, transition_vect);
+ output_states_vect.release ();
+ transition_vect.release ();
}
/* Form and output vectors representing minimal issue delay table of
@@ -7641,14 +7606,14 @@ output_min_issue_delay_table (automaton_t automaton)
/* Create vect of pointers to states ordered by num of transitions
from the state (state with the maximum num is the first). */
- output_states_vect = 0;
+ output_states_vect.create (0);
pass_states (automaton, add_states_vect_el);
- min_issue_delay_len = (VEC_length (state_t, output_states_vect)
+ min_issue_delay_len = (output_states_vect.length ()
* automaton->insn_equiv_classes_num);
- min_issue_delay_vect = VEC_alloc (vect_el_t, heap, min_issue_delay_len);
+ min_issue_delay_vect.create (min_issue_delay_len);
for (i = 0; i < min_issue_delay_len; i++)
- VEC_quick_push (vect_el_t, min_issue_delay_vect, -1);
+ min_issue_delay_vect.quick_push (-1);
automaton->max_min_delay = 0;
@@ -7658,10 +7623,10 @@ output_min_issue_delay_table (automaton_t automaton)
changed = 0;
- for (state_no = 0; state_no < VEC_length (state_t, output_states_vect);
+ for (state_no = 0; state_no < output_states_vect.length ();
state_no++)
{
- state_t s = VEC_index (state_t, output_states_vect, state_no);
+ state_t s = output_states_vect[state_no];
arc_t arc;
for (arc = first_out_arc (s); arc; arc = next_out_arc (arc))
@@ -7672,10 +7637,9 @@ output_min_issue_delay_table (automaton_t automaton)
* automaton->insn_equiv_classes_num
+ arc->insn->insn_equiv_class_num;
- if (VEC_index (vect_el_t, min_issue_delay_vect, asn))
+ if (min_issue_delay_vect[asn])
{
- VEC_replace (vect_el_t, min_issue_delay_vect, asn,
- (vect_el_t) 0);
+ min_issue_delay_vect[asn] = (vect_el_t) 0;
changed = 1;
}
@@ -7690,8 +7654,8 @@ output_min_issue_delay_table (automaton_t automaton)
n1 = arc->to_state->order_state_num
* automaton->insn_equiv_classes_num
+ k;
- delay0 = VEC_index (vect_el_t, min_issue_delay_vect, n0);
- delay1 = VEC_index (vect_el_t, min_issue_delay_vect, n1);
+ delay0 = min_issue_delay_vect[n0];
+ delay1 = min_issue_delay_vect[n1];
if (delay1 != -1)
{
if (arc->insn->insn_reserv_decl
@@ -7699,7 +7663,7 @@ output_min_issue_delay_table (automaton_t automaton)
delay1++;
if (delay1 < delay0 || delay0 == -1)
{
- VEC_replace (vect_el_t, min_issue_delay_vect, n0, delay1);
+ min_issue_delay_vect[n0] = delay1;
changed = 1;
}
}
@@ -7714,19 +7678,18 @@ output_min_issue_delay_table (automaton_t automaton)
for (ainsn = automaton->ainsn_list; ainsn; ainsn = ainsn->next_ainsn)
if (ainsn->first_ainsn_with_given_equivalence_num)
{
- for (i = 0; i < VEC_length (state_t, output_states_vect); i++)
+ for (i = 0; i < output_states_vect.length (); i++)
{
- state_t s = VEC_index (state_t, output_states_vect, i);
+ state_t s = output_states_vect[i];
size_t np = s->order_state_num
* automaton->insn_equiv_classes_num
+ ainsn->insn_equiv_class_num;
- vect_el_t x = VEC_index (vect_el_t, min_issue_delay_vect, np);
+ vect_el_t x = min_issue_delay_vect[np];
if (automaton->max_min_delay < x)
automaton->max_min_delay = x;
if (x == -1)
- VEC_replace (vect_el_t, min_issue_delay_vect, np,
- (vect_el_t) 0);
+ min_issue_delay_vect[np] = (vect_el_t) 0;
}
}
@@ -7748,26 +7711,25 @@ output_min_issue_delay_table (automaton_t automaton)
automaton->min_issue_delay_table_compression_factor = cfactor;
compressed_min_issue_delay_len = (min_issue_delay_len+cfactor-1) / cfactor;
- compressed_min_issue_delay_vect
- = VEC_alloc (vect_el_t, heap, compressed_min_issue_delay_len);
+ compressed_min_issue_delay_vect.create (compressed_min_issue_delay_len);
for (i = 0; i < compressed_min_issue_delay_len; i++)
- VEC_quick_push (vect_el_t, compressed_min_issue_delay_vect, 0);
+ compressed_min_issue_delay_vect.quick_push (0);
for (i = 0; i < min_issue_delay_len; i++)
{
size_t ci = i / cfactor;
- vect_el_t x = VEC_index (vect_el_t, min_issue_delay_vect, i);
- vect_el_t cx = VEC_index (vect_el_t, compressed_min_issue_delay_vect, ci);
+ vect_el_t x = min_issue_delay_vect[i];
+ vect_el_t cx = compressed_min_issue_delay_vect[ci];
cx |= x << (8 - (i % cfactor + 1) * (8 / cfactor));
- VEC_replace (vect_el_t, compressed_min_issue_delay_vect, ci, cx);
+ compressed_min_issue_delay_vect[ci] = cx;
}
output_vect (compressed_min_issue_delay_vect);
fprintf (output_file, "};\n\n");
- VEC_free (state_t, heap, output_states_vect);
- VEC_free (vect_el_t, heap, min_issue_delay_vect);
- VEC_free (vect_el_t, heap, compressed_min_issue_delay_vect);
+ output_states_vect.release ();
+ min_issue_delay_vect.release ();
+ compressed_min_issue_delay_vect.release ();
}
/* Form and output vector representing the locked states of
@@ -7777,32 +7739,30 @@ output_dead_lock_vect (automaton_t automaton)
{
size_t i;
arc_t arc;
- vla_hwint_t dead_lock_vect = 0;
+ vla_hwint_t dead_lock_vect = vla_hwint_t();
/* Create vect of pointers to states ordered by num of
transitions from the state (state with the maximum num is the
first). */
automaton->locked_states = 0;
- output_states_vect = 0;
+ output_states_vect.create (0);
pass_states (automaton, add_states_vect_el);
- VEC_safe_grow (vect_el_t, heap, dead_lock_vect,
- VEC_length (state_t, output_states_vect));
- for (i = 0; i < VEC_length (state_t, output_states_vect); i++)
+ dead_lock_vect.safe_grow (output_states_vect.length ());
+ for (i = 0; i < output_states_vect.length (); i++)
{
- state_t s = VEC_index (state_t, output_states_vect, i);
+ state_t s = output_states_vect[i];
arc = first_out_arc (s);
gcc_assert (arc);
if (next_out_arc (arc) == NULL
&& (arc->insn->insn_reserv_decl
== DECL_INSN_RESERV (advance_cycle_insn_decl)))
{
- VEC_replace (vect_el_t, dead_lock_vect, s->order_state_num, 1);
+ dead_lock_vect[s->order_state_num] = 1;
automaton->locked_states++;
}
else
- VEC_replace (vect_el_t, dead_lock_vect, s->order_state_num,
- (vect_el_t) 0);
+ dead_lock_vect[s->order_state_num] = (vect_el_t) 0;
}
if (automaton->locked_states == 0)
return;
@@ -7815,8 +7775,8 @@ output_dead_lock_vect (automaton_t automaton)
fprintf (output_file, "[] = {\n");
output_vect (dead_lock_vect);
fprintf (output_file, "};\n\n");
- VEC_free (state_t, heap, output_states_vect);
- VEC_free (vect_el_t, heap, dead_lock_vect);
+ output_states_vect.release ();
+ dead_lock_vect.release ();
}
/* Form and output vector representing reserved units of the states of
@@ -7824,7 +7784,7 @@ output_dead_lock_vect (automaton_t automaton)
static void
output_reserved_units_table (automaton_t automaton)
{
- vla_hwint_t reserved_units_table = 0;
+ vla_hwint_t reserved_units_table = vla_hwint_t();
int state_byte_size;
int reserved_units_size;
size_t n;
@@ -7834,30 +7794,30 @@ output_reserved_units_table (automaton_t automaton)
return;
/* Create vect of pointers to states. */
- output_states_vect = 0;
+ output_states_vect.create (0);
pass_states (automaton, add_states_vect_el);
/* Create vector. */
state_byte_size = (description->query_units_num + 7) / 8;
- reserved_units_size = (VEC_length (state_t, output_states_vect)
+ reserved_units_size = (output_states_vect.length ()
* state_byte_size);
- reserved_units_table = VEC_alloc (vect_el_t, heap, reserved_units_size);
+ reserved_units_table.create (reserved_units_size);
for (i = 0; i < reserved_units_size; i++)
- VEC_quick_push (vect_el_t, reserved_units_table, 0);
- for (n = 0; n < VEC_length (state_t, output_states_vect); n++)
+ reserved_units_table.quick_push (0);
+ for (n = 0; n < output_states_vect.length (); n++)
{
- state_t s = VEC_index (state_t, output_states_vect, n);
+ state_t s = output_states_vect[n];
for (i = 0; i < description->units_num; i++)
if (units_array [i]->query_p
&& first_cycle_unit_presence (s, i))
{
int ri = (s->order_state_num * state_byte_size
+ units_array [i]->query_num / 8);
- vect_el_t x = VEC_index (vect_el_t, reserved_units_table, ri);
+ vect_el_t x = reserved_units_table[ri];
x += 1 << (units_array [i]->query_num % 8);
- VEC_replace (vect_el_t, reserved_units_table, ri, x);
+ reserved_units_table[ri] = x;
}
}
fprintf (output_file, "\n#if %s\n", CPU_UNITS_QUERY_MACRO_NAME);
@@ -7871,8 +7831,8 @@ output_reserved_units_table (automaton_t automaton)
fprintf (output_file, "};\n#endif /* #if %s */\n\n",
CPU_UNITS_QUERY_MACRO_NAME);
- VEC_free (state_t, heap, output_states_vect);
- VEC_free (vect_el_t, heap, reserved_units_table);
+ output_states_vect.release ();
+ reserved_units_table.release ();
}
/* The function outputs all tables representing DFA(s) used for fast
@@ -8988,7 +8948,7 @@ output_automaton_units (automaton_t automaton)
/* The following variable is used for forming array of all possible cpu unit
reservations described by the current DFA state. */
-static VEC(reserv_sets_t, heap) *state_reservs;
+static vec<reserv_sets_t> state_reservs;
/* The function forms `state_reservs' for STATE. */
static void
@@ -9002,7 +8962,7 @@ add_state_reservs (state_t state)
curr_alt_state = curr_alt_state->next_sorted_alt_state)
add_state_reservs (curr_alt_state->state);
else
- VEC_safe_push (reserv_sets_t, heap, state_reservs, state->reservs);
+ state_reservs.safe_push (state->reservs);
}
/* The function outputs readable representation of all out arcs of
@@ -9070,15 +9030,13 @@ remove_state_duplicate_reservs (void)
{
size_t i, j;
- for (i = 1, j = 0; i < VEC_length (reserv_sets_t, state_reservs); i++)
- if (reserv_sets_cmp (VEC_index (reserv_sets_t, state_reservs, j),
- VEC_index (reserv_sets_t, state_reservs, i)))
+ for (i = 1, j = 0; i < state_reservs.length (); i++)
+ if (reserv_sets_cmp (state_reservs[j], state_reservs[i]))
{
j++;
- VEC_replace (reserv_sets_t, state_reservs, j,
- VEC_index (reserv_sets_t, state_reservs, i));
+ state_reservs[j] = state_reservs[i];
}
- VEC_truncate (reserv_sets_t, state_reservs, j + 1);
+ state_reservs.truncate (j + 1);
}
/* The following function output readable representation of DFA(s)
@@ -9090,24 +9048,23 @@ output_state (state_t state)
{
size_t i;
- state_reservs = 0;
+ state_reservs.create (0);
fprintf (output_description_file, " State #%d", state->order_state_num);
fprintf (output_description_file,
state->new_cycle_p ? " (new cycle)\n" : "\n");
add_state_reservs (state);
- VEC_qsort (reserv_sets_t, state_reservs, state_reservs_cmp);
+ state_reservs.qsort (state_reservs_cmp);
remove_state_duplicate_reservs ();
- for (i = 0; i < VEC_length (reserv_sets_t, state_reservs); i++)
+ for (i = 0; i < state_reservs.length (); i++)
{
fprintf (output_description_file, " ");
- output_reserv_sets (output_description_file,
- VEC_index (reserv_sets_t, state_reservs, i));
+ output_reserv_sets (output_description_file, state_reservs[i]);
fprintf (output_description_file, "\n");
}
fprintf (output_description_file, "\n");
output_state_arcs (state);
- VEC_free (reserv_sets_t, heap, state_reservs);
+ state_reservs.release ();
}
/* The following function output readable representation of
@@ -9172,8 +9129,8 @@ output_statistics (FILE *f)
#ifndef NDEBUG
fprintf
(f, "%5ld transition comb vector els, %5ld trans table els: %s\n",
- (long) VEC_length (vect_el_t, automaton->trans_table->comb_vect),
- (long) VEC_length (vect_el_t, automaton->trans_table->full_vect),
+ (long) automaton->trans_table->comb_vect.length (),
+ (long) automaton->trans_table->full_vect.length (),
(comb_vect_p (automaton->trans_table)
? "use comb vect" : "use simple vect"));
fprintf
@@ -9181,9 +9138,9 @@ output_statistics (FILE *f)
(long) states_num * automaton->insn_equiv_classes_num,
automaton->min_issue_delay_table_compression_factor);
transition_comb_vect_els
- += VEC_length (vect_el_t, automaton->trans_table->comb_vect);
+ += automaton->trans_table->comb_vect.length ();
transition_full_vect_els
- += VEC_length (vect_el_t, automaton->trans_table->full_vect);
+ += automaton->trans_table->full_vect.length ();
min_issue_delay_vect_els
+= states_num * automaton->insn_equiv_classes_num;
locked_states
@@ -9393,14 +9350,14 @@ check_automata_insn_issues (void)
/* The following vla is used for storing pointers to all achieved
states. */
-static VEC(state_t, heap) *automaton_states;
+static vec<state_t> automaton_states;
/* This function is called by function pass_states to add an achieved
STATE. */
static void
add_automaton_state (state_t state)
{
- VEC_safe_push (state_t, heap, automaton_states, state);
+ automaton_states.safe_push (state);
}
/* The following function forms list of important automata (whose
@@ -9415,17 +9372,17 @@ form_important_insn_automata_lists (void)
int i;
size_t n;
- automaton_states = 0;
+ automaton_states.create (0);
/* Mark important ainsns. */
for (automaton = description->first_automaton;
automaton != NULL;
automaton = automaton->next_automaton)
{
- VEC_truncate (state_t, automaton_states, 0);
+ automaton_states.truncate (0);
pass_states (automaton, add_automaton_state);
- for (n = 0; n < VEC_length (state_t, automaton_states); n++)
+ for (n = 0; n < automaton_states.length (); n++)
{
- state_t s = VEC_index (state_t, automaton_states, n);
+ state_t s = automaton_states[n];
for (arc = first_out_arc (s);
arc != NULL;
arc = next_out_arc (arc))
@@ -9439,7 +9396,7 @@ form_important_insn_automata_lists (void)
}
}
}
- VEC_free (state_t, heap, automaton_states);
+ automaton_states.release ();
/* Create automata sets for the insns. */
for (i = 0; i < description->decls_num; i++)
@@ -9477,13 +9434,13 @@ expand_automata (void)
description = XCREATENODEVAR (struct description,
sizeof (struct description)
/* Two entries for special insns. */
- + sizeof (decl_t) * (VEC_length (decl_t, decls) + 1));
- description->decls_num = VEC_length (decl_t, decls);
+ + sizeof (decl_t) * (decls.length () + 1));
+ description->decls_num = decls.length ();
description->normal_decls_num = description->decls_num;
description->query_units_num = 0;
for (i = 0; i < description->decls_num; i++)
{
- description->decls [i] = VEC_index (decl_t, decls, i);
+ description->decls [i] = decls[i];
if (description->decls [i]->mode == dm_unit
&& DECL_UNIT (description->decls [i])->query_p)
DECL_UNIT (description->decls [i])->query_num
@@ -9694,7 +9651,7 @@ main (int argc, char **argv)
if (have_error)
return FATAL_EXIT_CODE;
- if (VEC_length (decl_t, decls) > 0)
+ if (decls.length () > 0)
{
expand_automata ();
if (!have_error)
diff --git a/gcc/genextract.c b/gcc/genextract.c
index fb1428687ca..063c2211152 100644
--- a/gcc/genextract.c
+++ b/gcc/genextract.c
@@ -29,7 +29,6 @@ along with GCC; see the file COPYING3. If not see
#include "read-md.h"
#include "gensupport.h"
#include "vec.h"
-#include "vecprim.h"
/* This structure contains all the information needed to describe one
set of extractions methods. Each method may be used by more than
@@ -70,15 +69,13 @@ static struct code_ptr *peepholes;
/* This structure is used by gen_insn and walk_rtx to accumulate the
data that will be used to produce an extractions structure. */
-DEF_VEC_P(locstr);
-DEF_VEC_ALLOC_P(locstr,heap);
struct accum_extract
{
- VEC(locstr,heap) *oplocs;
- VEC(locstr,heap) *duplocs;
- VEC(int,heap) *dupnums;
- VEC(char,heap) *pathstr;
+ vec<locstr> oplocs;
+ vec<locstr> duplocs;
+ vec<int> dupnums;
+ vec<char> pathstr;
};
int line_no;
@@ -95,10 +92,10 @@ gen_insn (rtx insn, int insn_code_number)
struct code_ptr *link;
struct accum_extract acc;
- acc.oplocs = VEC_alloc (locstr,heap, 10);
- acc.duplocs = VEC_alloc (locstr,heap, 10);
- acc.dupnums = VEC_alloc (int,heap, 10);
- acc.pathstr = VEC_alloc (char,heap, 20);
+ acc.oplocs.create (10);
+ acc.duplocs.create (10);
+ acc.dupnums.create (10);
+ acc.pathstr.create (20);
/* Walk the insn's pattern, remembering at all times the path
down to the walking point. */
@@ -108,9 +105,9 @@ gen_insn (rtx insn, int insn_code_number)
else
for (i = XVECLEN (insn, 1) - 1; i >= 0; i--)
{
- VEC_safe_push (char,heap, acc.pathstr, 'a' + i);
+ acc.pathstr.safe_push ('a' + i);
walk_rtx (XVECEXP (insn, 1, i), &acc);
- VEC_pop (char, acc.pathstr);
+ acc.pathstr.pop ();
}
link = XNEW (struct code_ptr);
@@ -118,9 +115,9 @@ gen_insn (rtx insn, int insn_code_number)
/* See if we find something that already had this extraction method. */
- op_count = VEC_length (locstr, acc.oplocs);
- dup_count = VEC_length (locstr, acc.duplocs);
- gcc_assert (dup_count == VEC_length (int, acc.dupnums));
+ op_count = acc.oplocs.length ();
+ dup_count = acc.duplocs.length ();
+ gcc_assert (dup_count == acc.dupnums.length ());
for (p = extractions; p; p = p->next)
{
@@ -130,7 +127,7 @@ gen_insn (rtx insn, int insn_code_number)
for (j = 0; j < op_count; j++)
{
char *a = p->oplocs[j];
- char *b = VEC_index (locstr, acc.oplocs, j);
+ char *b = acc.oplocs[j];
if (a != b && (!a || !b || strcmp (a, b)))
break;
}
@@ -139,8 +136,8 @@ gen_insn (rtx insn, int insn_code_number)
continue;
for (j = 0; j < dup_count; j++)
- if (p->dupnums[j] != VEC_index (int, acc.dupnums, j)
- || strcmp (p->duplocs[j], VEC_index (locstr, acc.duplocs, j)))
+ if (p->dupnums[j] != acc.dupnums[j]
+ || strcmp (p->duplocs[j], acc.duplocs[j]))
break;
if (j != dup_count)
@@ -170,50 +167,50 @@ gen_insn (rtx insn, int insn_code_number)
p->duplocs = p->oplocs + op_count;
p->dupnums = (int *)(p->duplocs + dup_count);
- memcpy(p->oplocs, VEC_address(locstr,acc.oplocs), op_count*sizeof(locstr));
- memcpy(p->duplocs, VEC_address(locstr,acc.duplocs), dup_count*sizeof(locstr));
- memcpy(p->dupnums, VEC_address(int, acc.dupnums), dup_count*sizeof(int));
+ memcpy(p->oplocs, acc.oplocs.address(), op_count*sizeof(locstr));
+ memcpy(p->duplocs, acc.duplocs.address(), dup_count*sizeof(locstr));
+ memcpy(p->dupnums, acc.dupnums.address(), dup_count*sizeof(int));
done:
- VEC_free (locstr,heap, acc.oplocs);
- VEC_free (locstr,heap, acc.duplocs);
- VEC_free (int,heap, acc.dupnums);
- VEC_free (char,heap, acc.pathstr);
+ acc.oplocs.release ();
+ acc.duplocs.release ();
+ acc.dupnums.release ();
+ acc.pathstr.release ();
}
-/* Helper subroutine of walk_rtx: given a VEC(locstr), an index, and a
+/* Helper subroutine of walk_rtx: given a vec<locstr>, an index, and a
string, insert the string at the index, which should either already
exist and be NULL, or not yet exist within the vector. In the latter
case the vector is enlarged as appropriate. */
static void
-VEC_safe_set_locstr (VEC(locstr,heap) **vp, unsigned int ix, char *str)
+VEC_safe_set_locstr (vec<locstr> *vp, unsigned int ix, char *str)
{
- if (ix < VEC_length (locstr, *vp))
+ if (ix < (*vp).length ())
{
- if (VEC_index (locstr, *vp, ix))
+ if ((*vp)[ix])
{
message_with_line (line_no, "repeated operand number %d", ix);
have_error = 1;
}
else
- VEC_replace (locstr, *vp, ix, str);
+ (*vp)[ix] = str;
}
else
{
- while (ix > VEC_length (locstr, *vp))
- VEC_safe_push (locstr, heap, *vp, NULL);
- VEC_safe_push (locstr, heap, *vp, str);
+ while (ix > (*vp).length ())
+ vp->safe_push (NULL);
+ vp->safe_push (str);
}
}
-/* Another helper subroutine of walk_rtx: given a VEC(char), convert it
+/* Another helper subroutine of walk_rtx: given a vec<char>, convert it
to a NUL-terminated string in malloc memory. */
static char *
-VEC_char_to_string (VEC(char,heap) *v)
+VEC_char_to_string (vec<char> v)
{
- size_t n = VEC_length (char, v);
+ size_t n = v.length ();
char *s = XNEWVEC (char, n + 1);
- memcpy (s, VEC_address (char, v), n);
+ memcpy (s, v.address (), n);
s[n] = '\0';
return s;
}
@@ -251,18 +248,17 @@ walk_rtx (rtx x, struct accum_extract *acc)
base = (code == MATCH_OPERATOR ? '0' : 'a');
for (i = XVECLEN (x, 2) - 1; i >= 0; i--)
{
- VEC_safe_push (char,heap, acc->pathstr, base + i);
+ acc->pathstr.safe_push (base + i);
walk_rtx (XVECEXP (x, 2, i), acc);
- VEC_pop (char, acc->pathstr);
+ acc->pathstr.pop ();
}
return;
case MATCH_DUP:
case MATCH_PAR_DUP:
case MATCH_OP_DUP:
- VEC_safe_push (locstr,heap, acc->duplocs,
- VEC_char_to_string (acc->pathstr));
- VEC_safe_push (int,heap, acc->dupnums, XINT (x, 0));
+ acc->duplocs.safe_push (VEC_char_to_string (acc->pathstr));
+ acc->dupnums.safe_push (XINT (x, 0));
if (code == MATCH_DUP)
break;
@@ -270,9 +266,9 @@ walk_rtx (rtx x, struct accum_extract *acc)
base = (code == MATCH_OP_DUP ? '0' : 'a');
for (i = XVECLEN (x, 1) - 1; i >= 0; i--)
{
- VEC_safe_push (char,heap, acc->pathstr, base + i);
+ acc->pathstr.safe_push (base + i);
walk_rtx (XVECEXP (x, 1, i), acc);
- VEC_pop (char, acc->pathstr);
+ acc->pathstr.pop ();
}
return;
@@ -286,18 +282,18 @@ walk_rtx (rtx x, struct accum_extract *acc)
{
if (fmt[i] == 'e' || fmt[i] == 'u')
{
- VEC_safe_push (char,heap, acc->pathstr, '0' + i);
+ acc->pathstr.safe_push ('0' + i);
walk_rtx (XEXP (x, i), acc);
- VEC_pop (char, acc->pathstr);
+ acc->pathstr.pop ();
}
else if (fmt[i] == 'E')
{
int j;
for (j = XVECLEN (x, i) - 1; j >= 0; j--)
{
- VEC_safe_push (char,heap, acc->pathstr, 'a' + j);
+ acc->pathstr.safe_push ('a' + j);
walk_rtx (XVECEXP (x, i, j), acc);
- VEC_pop (char, acc->pathstr);
+ acc->pathstr.pop ();
}
}
}
diff --git a/gcc/gengtype-lex.l b/gcc/gengtype-lex.l
index fd8090606d3..4c316a3e2ea 100644
--- a/gcc/gengtype-lex.l
+++ b/gcc/gengtype-lex.l
@@ -116,7 +116,6 @@ CXX_KEYWORD inline|public:|private:|protected:|template|operator|friend
return IGNORABLE_CXX_KEYWORD;
}
"GTY"/{EOID} { return GTY_TOKEN; }
-"VEC"/{EOID} { return VEC_TOKEN; }
"union"/{EOID} { return UNION; }
"struct"/{EOID} { return STRUCT; }
"class"/{EOID} { return STRUCT; }
@@ -163,7 +162,7 @@ CXX_KEYWORD inline|public:|private:|protected:|template|operator|friend
}
"..." { return ELLIPSIS; }
-[(){},*:<>;=%|+-] { return yytext[0]; }
+[(){},*:<>;=%|+\!\?\.-] { return yytext[0]; }
/* ignore pp-directives */
^{HWS}"#"{HWS}[a-z_]+[^\n]*\n {lexer_line.line++;}
diff --git a/gcc/gengtype-parse.c b/gcc/gengtype-parse.c
index 5737a156f70..0b466615e23 100644
--- a/gcc/gengtype-parse.c
+++ b/gcc/gengtype-parse.c
@@ -76,7 +76,6 @@ static const char *const token_names[] = {
"union",
"struct",
"enum",
- "VEC",
"...",
"ptr_alias",
"nested_ptr",
@@ -245,31 +244,12 @@ require_template_declaration (const char *tmpl_name)
}
-/* typedef_name: either an ID, or VEC(x,y), or a template type
- specification of the form ID<t1,t2,...,tn>.
-
- FIXME cxx-conversion. VEC(x,y) is currently translated to the
- template 'vec_t<x>'. This is to support the transition to C++ and
- avoid re-writing all the 'VEC(x,y)' declarations in the code. This
- needs to be fixed when the branch is merged into trunk. */
+/* typedef_name: either an ID, or a template type
+ specification of the form ID<t1,t2,...,tn>. */
static const char *
typedef_name (void)
{
- if (token () == VEC_TOKEN)
- {
- const char *c1, *r;
- advance ();
- require ('(');
- c1 = require2 (ID, SCALAR);
- require (',');
- require (ID);
- require (')');
- r = concat ("vec_t<", c1, ">", (char *) 0);
- free (CONST_CAST (char *, c1));
- return r;
- }
-
const char *id = require (ID);
if (token () == '<')
return require_template_declaration (id);
@@ -826,7 +806,6 @@ type (options_p *optsp, bool nested)
return create_scalar_type (s);
case ID:
- case VEC_TOKEN:
s = typedef_name ();
return resolve_typedef (s, &lexer_line);
@@ -907,6 +886,7 @@ type (options_p *optsp, bool nested)
fields = NULL;
kind = TYPE_USER_STRUCT;
consume_balanced ('{', '}');
+ return create_user_defined_type (s, &lexer_line);
}
return new_structure (s, kind, &lexer_line, fields, opts);
diff --git a/gcc/gengtype-state.c b/gcc/gengtype-state.c
index e3317ec36a4..8607ceb1118 100644
--- a/gcc/gengtype-state.c
+++ b/gcc/gengtype-state.c
@@ -52,6 +52,7 @@ type_lineloc (const_type_p ty)
case TYPE_UNION:
case TYPE_LANG_STRUCT:
case TYPE_USER_STRUCT:
+ case TYPE_UNDEFINED:
return CONST_CAST (struct fileloc*, &ty->u.s.line);
case TYPE_PARAM_STRUCT:
return CONST_CAST (struct fileloc*, &ty->u.param_struct.line);
@@ -770,6 +771,23 @@ write_state_string_type (type_p current)
fatal ("Unexpected type in write_state_string_type");
}
+/* Write an undefined type. */
+static void
+write_state_undefined_type (type_p current)
+{
+ DBGPRINTF ("undefined type @ %p #%d '%s'", (void *) current,
+ current->state_number, current->u.s.tag);
+ fprintf (state_file, "undefined ");
+ gcc_assert (current->gc_used == GC_UNUSED);
+ write_state_common_type_content (current);
+ if (current->u.s.tag != NULL)
+ write_state_a_string (current->u.s.tag);
+ else
+ fprintf (state_file, "nil");
+
+ write_state_fileloc (type_lineloc (current));
+}
+
/* Common code to write structure like types. */
static void
@@ -963,6 +981,9 @@ write_state_type (type_p current)
{
case TYPE_NONE:
gcc_unreachable ();
+ case TYPE_UNDEFINED:
+ write_state_undefined_type (current);
+ break;
case TYPE_STRUCT:
write_state_struct_type (current);
break;
@@ -1345,6 +1366,40 @@ read_state_lang_bitmap (lang_bitmap *bitmap)
}
+/* Read an undefined type. */
+static void
+read_state_undefined_type (type_p type)
+{
+ struct state_token_st *t0;
+
+ type->kind = TYPE_UNDEFINED;
+ read_state_common_type_content (type);
+ t0 = peek_state_token (0);
+ if (state_token_kind (t0) == STOK_STRING)
+ {
+ if (state_token_is_name (t0, "nil"))
+ {
+ type->u.s.tag = NULL;
+ DBGPRINTF ("read anonymous undefined type @%p #%d",
+ (void *) type, type->state_number);
+ }
+ else
+ {
+ type->u.s.tag = xstrdup (t0->stok_un.stok_string);
+ DBGPRINTF ("read undefined type @%p #%d '%s'",
+ (void *) type, type->state_number, type->u.s.tag);
+ }
+
+ next_state_tokens (1);
+ read_state_fileloc (&(type->u.s.line));
+ }
+ else
+ {
+ fatal_reading_state (t0, "Bad tag in undefined type");
+ }
+}
+
+
/* Read a GTY-ed struct type. */
static void
read_state_struct_type (type_p type)
@@ -1673,6 +1728,12 @@ read_state_type (type_p *current)
next_state_tokens (1);
read_state_string_type (current);
}
+ else if (state_token_is_name (t0, "undefined"))
+ {
+ *current = XCNEW (struct type);
+ next_state_tokens (1);
+ read_state_undefined_type (*current);
+ }
else if (state_token_is_name (t0, "struct"))
{
*current = XCNEW (struct type);
diff --git a/gcc/gengtype.c b/gcc/gengtype.c
index b9fbd96fcbf..b3f73fe924b 100644
--- a/gcc/gengtype.c
+++ b/gcc/gengtype.c
@@ -171,13 +171,15 @@ dbgprint_count_type_at (const char *fil, int lin, const char *msg, type_p t)
int nb_types = 0, nb_scalar = 0, nb_string = 0;
int nb_struct = 0, nb_union = 0, nb_array = 0, nb_pointer = 0;
int nb_lang_struct = 0, nb_param_struct = 0;
- int nb_user_struct = 0;
+ int nb_user_struct = 0, nb_undefined = 0;
type_p p = NULL;
for (p = t; p; p = p->next)
{
nb_types++;
switch (p->kind)
{
+ case TYPE_UNDEFINED:
+ nb_undefined++;
case TYPE_SCALAR:
nb_scalar++;
break;
@@ -205,7 +207,7 @@ dbgprint_count_type_at (const char *fil, int lin, const char *msg, type_p t)
case TYPE_PARAM_STRUCT:
nb_param_struct++;
break;
- default:
+ case TYPE_NONE:
gcc_unreachable ();
}
}
@@ -222,6 +224,8 @@ dbgprint_count_type_at (const char *fil, int lin, const char *msg, type_p t)
nb_lang_struct, nb_param_struct);
if (nb_user_struct > 0)
fprintf (stderr, "@@%%@@ %d user_structs\n", nb_user_struct);
+ if (nb_undefined > 0)
+ fprintf (stderr, "@@%%@@ %d undefined types\n", nb_undefined);
fprintf (stderr, "\n");
}
#endif /* ENABLE_CHECKING */
@@ -553,7 +557,7 @@ do_scalar_typedef (const char *s, struct fileloc *pos)
/* Define TYPE_NAME to be a user defined type at location POS. */
-static type_p
+type_p
create_user_defined_type (const char *type_name, struct fileloc *pos)
{
type_p ty = find_structure (type_name, TYPE_USER_STRUCT);
@@ -595,20 +599,58 @@ create_user_defined_type (const char *type_name, struct fileloc *pos)
}
-/* Return the type previously defined for S. Use POS to report errors. */
+/* Given a typedef name S, return its associated type. Return NULL if
+ S is not a registered type name. */
-type_p
-resolve_typedef (const char *s, struct fileloc *pos)
+static type_p
+type_for_name (const char *s)
{
pair_p p;
for (p = typedefs; p != NULL; p = p->next)
if (strcmp (p->name, s) == 0)
return p->type;
+ return NULL;
+}
+
+
+/* Create an undefined type with name S and location POS. Return the
+ newly created type. */
+
+static type_p
+create_undefined_type (const char *s, struct fileloc *pos)
+{
+ type_p ty = find_structure (s, TYPE_UNDEFINED);
+ ty->u.s.line = *pos;
+ ty->u.s.bitmap = get_lang_bitmap (pos->file);
+ do_typedef (s, ty, pos);
+ return ty;
+}
+
+
+/* Return the type previously defined for S. Use POS to report errors. */
- /* If we did not find a typedef registered, assume this is a name
- for a user-defined type which will need to provide its own
- marking functions. */
- return create_user_defined_type (s, pos);
+type_p
+resolve_typedef (const char *s, struct fileloc *pos)
+{
+ bool is_template_instance = (strchr (s, '<') != NULL);
+ type_p p = type_for_name (s);
+
+ /* If we did not find a typedef registered, generate a TYPE_UNDEFINED
+ type for regular type identifiers. If the type identifier S is a
+ template instantiation, however, we treat it as a user defined
+ type.
+
+ FIXME, this is actually a limitation in gengtype. Supporting
+ template types and their instances would require keeping separate
+ track of the basic types definition and its instances. This
+ essentially forces all template classes in GC to be marked
+ GTY((user)). */
+ if (!p)
+ p = (is_template_instance)
+ ? create_user_defined_type (s, pos)
+ : create_undefined_type (s, pos);
+
+ return p;
}
@@ -707,7 +749,7 @@ find_structure (const char *name, enum typekind kind)
type_p s;
bool isunion = (kind == TYPE_UNION);
- gcc_assert (union_or_struct_p (kind));
+ gcc_assert (kind == TYPE_UNDEFINED || union_or_struct_p (kind));
for (s = structures; s != NULL; s = s->next)
if (strcmp (name, s->u.s.tag) == 0 && UNION_P (s) == isunion)
@@ -1397,7 +1439,8 @@ adjust_field_type (type_p t, options_p opt)
}
-static void set_gc_used_type (type_p, enum gc_used_enum, type_p *);
+static void set_gc_used_type (type_p, enum gc_used_enum, type_p *,
+ bool = false);
static void set_gc_used (pair_p);
/* Handle OPT for set_gc_used_type. */
@@ -1427,9 +1470,31 @@ process_gc_options (options_p opt, enum gc_used_enum level, int *maybe_undef,
}
-/* Set the gc_used field of T to LEVEL, and handle the types it references. */
+/* Set the gc_used field of T to LEVEL, and handle the types it references.
+
+ If ALLOWED_UNDEFINED_TYPES is true, types of kind TYPE_UNDEFINED
+ are set to GC_UNUSED. Otherwise, an error is emitted for
+ TYPE_UNDEFINED types. This is used to support user-defined
+ template types with non-type arguments.
+
+ For instance, when we parse a template type with enum arguments
+ (e.g. MyType<AnotherType, EnumValue>), the parser created two
+ artificial fields for 'MyType', one for 'AnotherType', the other
+ one for 'EnumValue'.
+
+ At the time that we parse this type we don't know that 'EnumValue'
+ is really an enum value, so the parser creates a TYPE_UNDEFINED
+ type for it. Since 'EnumValue' is never resolved to a known
+ structure, it will stay with TYPE_UNDEFINED.
+
+ Since 'MyType' is a TYPE_USER_STRUCT, we can simply ignore
+ 'EnumValue'. Generating marking code for it would cause
+ compilation failures since the marking routines assumes that
+ 'EnumValue' is a type. */
+
static void
-set_gc_used_type (type_p t, enum gc_used_enum level, type_p param[NUM_PARAM])
+set_gc_used_type (type_p t, enum gc_used_enum level, type_p param[NUM_PARAM],
+ bool allow_undefined_types)
{
if (t->gc_used >= level)
return;
@@ -1445,6 +1510,7 @@ set_gc_used_type (type_p t, enum gc_used_enum level, type_p param[NUM_PARAM])
pair_p f;
int dummy;
type_p dummy2;
+ bool allow_undefined_field_types = (t->kind == TYPE_USER_STRUCT);
process_gc_options (t->u.s.opt, level, &dummy, &dummy, &dummy, &dummy,
&dummy2);
@@ -1472,11 +1538,21 @@ set_gc_used_type (type_p t, enum gc_used_enum level, type_p param[NUM_PARAM])
else if (skip)
; /* target type is not used through this field */
else
- set_gc_used_type (f->type, GC_USED, pass_param ? param : NULL);
+ set_gc_used_type (f->type, GC_USED, pass_param ? param : NULL,
+ allow_undefined_field_types);
}
break;
}
+ case TYPE_UNDEFINED:
+ if (level > GC_UNUSED)
+ {
+ if (!allow_undefined_types)
+ error_at_line (&t->u.s.line, "undefined type `%s'", t->u.s.tag);
+ t->gc_used = GC_UNUSED;
+ }
+ break;
+
case TYPE_POINTER:
set_gc_used_type (t->u.p, GC_POINTED_TO, NULL);
break;
@@ -2397,7 +2473,7 @@ filter_type_name (const char *type_name)
size_t i;
char *s = xstrdup (type_name);
for (i = 0; i < strlen (s); i++)
- if (s[i] == '<' || s[i] == '>' || s[i] == ':')
+ if (s[i] == '<' || s[i] == '>' || s[i] == ':' || s[i] == ',')
s[i] = '_';
return s;
}
@@ -2417,6 +2493,7 @@ output_mangled_typename (outf_p of, const_type_p t)
switch (t->kind)
{
case TYPE_NONE:
+ case TYPE_UNDEFINED:
gcc_unreachable ();
break;
case TYPE_POINTER:
@@ -3042,7 +3119,8 @@ walk_type (type_p t, struct walk_type_data *d)
d->process_field (t, d);
break;
- default:
+ case TYPE_NONE:
+ case TYPE_UNDEFINED:
gcc_unreachable ();
}
}
@@ -3059,6 +3137,7 @@ write_types_process_field (type_p f, const struct walk_type_data *d)
switch (f->kind)
{
case TYPE_NONE:
+ case TYPE_UNDEFINED:
gcc_unreachable ();
case TYPE_POINTER:
oprintf (d->of, "%*s%s (%s%s", d->indent, "",
@@ -3265,7 +3344,6 @@ write_marker_function_name (outf_p of, type_p s, const char *prefix)
gcc_unreachable ();
}
-
/* Write on OF a user-callable routine to act as an entry point for
the marking routine for S, generated by write_func_for_structure.
PREFIX is the prefix to use to distinguish ggc and pch markers. */
@@ -3429,6 +3507,10 @@ write_func_for_structure (type_p orig_s, type_p s, type_p *param,
oprintf (d.of, " *)x_p;\n");
if (chain_next != NULL)
{
+ /* TYPE_USER_STRUCTs should not occur here. These structures
+ are completely handled by user code. */
+ gcc_assert (orig_s->kind != TYPE_USER_STRUCT);
+
oprintf (d.of, " ");
write_type_decl (d.of, s);
oprintf (d.of, " * xlimit = x;\n");
@@ -3760,7 +3842,9 @@ write_types_local_user_process_field (type_p f, const struct walk_type_data *d)
case TYPE_SCALAR:
break;
- default:
+ case TYPE_ARRAY:
+ case TYPE_NONE:
+ case TYPE_UNDEFINED:
gcc_unreachable ();
}
}
@@ -3843,7 +3927,9 @@ write_types_local_process_field (type_p f, const struct walk_type_data *d)
case TYPE_SCALAR:
break;
- default:
+ case TYPE_ARRAY:
+ case TYPE_NONE:
+ case TYPE_UNDEFINED:
gcc_unreachable ();
}
}
@@ -4063,6 +4149,9 @@ contains_scalar_p (type_p t)
return 0;
case TYPE_ARRAY:
return contains_scalar_p (t->u.a.p);
+ case TYPE_USER_STRUCT:
+ /* User-marked structures will typically contain pointers. */
+ return 0;
default:
/* Could also check for structures that have no non-pointer
fields, but there aren't enough of those to worry about. */
@@ -4313,8 +4402,9 @@ write_root (outf_p f, pair_p v, type_p type, const char *name, int has_length,
break;
case TYPE_USER_STRUCT:
- write_root (f, v, type->u.a.p, name, has_length, line, if_marked,
- emit_pch);
+ error_at_line (line, "`%s' must be a pointer type, because it is "
+ "a GC root and its type is marked with GTY((user))",
+ v->name);
break;
case TYPE_POINTER:
@@ -4384,7 +4474,11 @@ write_root (outf_p f, pair_p v, type_p type, const char *name, int has_length,
case TYPE_SCALAR:
break;
- default:
+ case TYPE_NONE:
+ case TYPE_UNDEFINED:
+ case TYPE_UNION:
+ case TYPE_LANG_STRUCT:
+ case TYPE_PARAM_STRUCT:
error_at_line (line, "global `%s' is unimplemented type", name);
}
}
@@ -4880,7 +4974,9 @@ output_typename (outf_p of, const_type_p t)
output_typename (of, t->u.param_struct.stru);
break;
}
- default:
+ case TYPE_NONE:
+ case TYPE_UNDEFINED:
+ case TYPE_ARRAY:
gcc_unreachable ();
}
}
@@ -4941,6 +5037,9 @@ dump_typekind (int indent, enum typekind kind)
case TYPE_STRUCT:
printf ("TYPE_STRUCT");
break;
+ case TYPE_UNDEFINED:
+ printf ("TYPE_UNDEFINED");
+ break;
case TYPE_USER_STRUCT:
printf ("TYPE_USER_STRUCT");
break;
diff --git a/gcc/gengtype.h b/gcc/gengtype.h
index e687e488567..57a67fbfecb 100644
--- a/gcc/gengtype.h
+++ b/gcc/gengtype.h
@@ -134,6 +134,9 @@ extern pair_p variables;
enum typekind {
TYPE_NONE=0, /* Never used, so zeroed memory is invalid. */
+ TYPE_UNDEFINED, /* We have not yet seen a definition for this type.
+ If a type is still undefined when generating code,
+ an error will be generated. */
TYPE_SCALAR, /* Scalar types like char. */
TYPE_STRING, /* The string type. */
TYPE_STRUCT, /* Type for GTY-ed structs. */
@@ -423,6 +426,7 @@ extern type_p resolve_typedef (const char *s, struct fileloc *pos);
extern type_p new_structure (const char *name, enum typekind kind,
struct fileloc *pos, pair_p fields,
options_p o);
+type_p create_user_defined_type (const char *, struct fileloc *);
extern type_p find_structure (const char *s, enum typekind kind);
extern type_p create_scalar_type (const char *name);
extern type_p create_pointer (type_p t);
@@ -457,7 +461,6 @@ enum gty_token
UNION,
STRUCT,
ENUM,
- VEC_TOKEN,
ELLIPSIS,
PTR_ALIAS,
NESTED_PTR,
diff --git a/gcc/genopinit.c b/gcc/genopinit.c
index e0ffc8f286c..077a721b2ea 100644
--- a/gcc/genopinit.c
+++ b/gcc/genopinit.c
@@ -144,10 +144,8 @@ typedef struct pattern_d
unsigned int sort_num;
} pattern;
-DEF_VEC_O(pattern);
-DEF_VEC_ALLOC_O(pattern, heap);
-static VEC(pattern, heap) *patterns;
+static vec<pattern> patterns;
static bool
match_pattern (pattern *p, const char *name, const char *pat)
@@ -265,7 +263,7 @@ gen_insn (rtx insn)
{
p.op = optabs[pindex].op;
p.sort_num = (p.op << 16) | (p.m2 << 8) | p.m1;
- VEC_safe_push (pattern, heap, patterns, p);
+ patterns.safe_push (p);
return;
}
}
@@ -359,7 +357,7 @@ main (int argc, char **argv)
}
/* Sort the collected patterns. */
- qsort (VEC_address (pattern, patterns), VEC_length (pattern, patterns),
+ qsort (patterns.address (), patterns.length (),
sizeof (pattern), pattern_cmp);
/* Now that we've handled the "extra" patterns, eliminate them from
@@ -399,7 +397,7 @@ main (int argc, char **argv)
fprintf (h_file, "#define NUM_NORMLIB_OPTABS %u\n",
last_kind[3] - last_kind[2]);
fprintf (h_file, "#define NUM_OPTAB_PATTERNS %u\n",
- (unsigned) VEC_length (pattern, patterns));
+ (unsigned) patterns.length ());
fprintf (s_file,
"#include \"config.h\"\n"
@@ -420,13 +418,13 @@ main (int argc, char **argv)
fprintf (s_file,
"static const struct optab_pat pats[NUM_OPTAB_PATTERNS] = {\n");
- for (i = 0; VEC_iterate (pattern, patterns, i, p); ++i)
+ for (i = 0; patterns.iterate (i, &p); ++i)
fprintf (s_file, " { %#08x, CODE_FOR_%s },\n", p->sort_num, p->name);
fprintf (s_file, "};\n\n");
fprintf (s_file, "void\ninit_all_optabs (void)\n{\n");
fprintf (s_file, " bool *ena = this_target_optabs->pat_enable;\n");
- for (i = 0; VEC_iterate (pattern, patterns, i, p); ++i)
+ for (i = 0; patterns.iterate (i, &p); ++i)
fprintf (s_file, " ena[%u] = HAVE_%s;\n", i, p->name);
fprintf (s_file, "}\n\n");
diff --git a/gcc/ggc-common.c b/gcc/ggc-common.c
index 6a997238629..a006909ef93 100644
--- a/gcc/ggc-common.c
+++ b/gcc/ggc-common.c
@@ -79,9 +79,7 @@ ggc_htab_delete (void **slot, void *info)
tables, for instance from some plugins; this vector is on the heap
since it is used by GGC internally. */
typedef const struct ggc_root_tab *const_ggc_root_tab_t;
-DEF_VEC_P(const_ggc_root_tab_t);
-DEF_VEC_ALLOC_P(const_ggc_root_tab_t, heap);
-static VEC(const_ggc_root_tab_t, heap) *extra_root_vec;
+static vec<const_ggc_root_tab_t> extra_root_vec;
/* Dynamically register a new GGC root table RT. This is useful for
plugins. */
@@ -90,7 +88,7 @@ void
ggc_register_root_tab (const struct ggc_root_tab* rt)
{
if (rt)
- VEC_safe_push (const_ggc_root_tab_t, heap, extra_root_vec, rt);
+ extra_root_vec.safe_push (rt);
}
/* This extra vector of dynamically registered cache_tab-s is used by
@@ -98,9 +96,7 @@ ggc_register_root_tab (const struct ggc_root_tab* rt)
tables, for instance from some plugins; this vector is on the heap
since it is used by GGC internally. */
typedef const struct ggc_cache_tab *const_ggc_cache_tab_t;
-DEF_VEC_P(const_ggc_cache_tab_t);
-DEF_VEC_ALLOC_P(const_ggc_cache_tab_t, heap);
-static VEC(const_ggc_cache_tab_t, heap) *extra_cache_vec;
+static vec<const_ggc_cache_tab_t> extra_cache_vec;
/* Dynamically register a new GGC cache table CT. This is useful for
plugins. */
@@ -109,7 +105,7 @@ void
ggc_register_cache_tab (const struct ggc_cache_tab* ct)
{
if (ct)
- VEC_safe_push (const_ggc_cache_tab_t, heap, extra_cache_vec, ct);
+ extra_cache_vec.safe_push (ct);
}
/* Scan a hash table that has objects which are to be deleted if they are not
@@ -160,7 +156,7 @@ ggc_mark_roots (void)
for (rt = gt_ggc_rtab; *rt; rt++)
ggc_mark_root_tab (*rt);
- FOR_EACH_VEC_ELT (const_ggc_root_tab_t, extra_root_vec, i, rtp)
+ FOR_EACH_VEC_ELT (extra_root_vec, i, rtp)
ggc_mark_root_tab (rtp);
if (ggc_protect_identifiers)
@@ -171,7 +167,7 @@ ggc_mark_roots (void)
for (ct = gt_ggc_cache_rtab; *ct; ct++)
ggc_scan_cache_tab (*ct);
- FOR_EACH_VEC_ELT (const_ggc_cache_tab_t, extra_cache_vec, i, ctp)
+ FOR_EACH_VEC_ELT (extra_cache_vec, i, ctp)
ggc_scan_cache_tab (ctp);
if (! ggc_protect_identifiers)
diff --git a/gcc/ggc.h b/gcc/ggc.h
index 5f25a580b18..37bbbd19a8b 100644
--- a/gcc/ggc.h
+++ b/gcc/ggc.h
@@ -50,7 +50,7 @@ typedef void (*gt_handle_reorder) (void *, void *, gt_pointer_operator,
/* Used by the gt_pch_n_* routines. Register an object in the hash table. */
extern int gt_pch_note_object (void *, void *, gt_note_pointers,
- enum gt_types_enum);
+ enum gt_types_enum = gt_types_enum_last);
/* Used by the gt_pch_n_* routines. Register that an object has a reorder
function. */
diff --git a/gcc/gimple-low.c b/gcc/gimple-low.c
index 8557c83c3c5..e26d91ef4b4 100644
--- a/gcc/gimple-low.c
+++ b/gcc/gimple-low.c
@@ -55,8 +55,6 @@ struct return_statements_t
};
typedef struct return_statements_t return_statements_t;
-DEF_VEC_O(return_statements_t);
-DEF_VEC_ALLOC_O(return_statements_t,heap);
struct lower_data
{
@@ -65,7 +63,7 @@ struct lower_data
/* A vector of label and return statements to be moved to the end
of the function. */
- VEC(return_statements_t,heap) *return_statements;
+ vec<return_statements_t> return_statements;
/* True if the current statement cannot fall through. */
bool cannot_fallthru;
@@ -105,7 +103,7 @@ lower_function_body (void)
BLOCK_SUBBLOCKS (data.block) = NULL_TREE;
BLOCK_CHAIN (data.block) = NULL_TREE;
TREE_ASM_WRITTEN (data.block) = 1;
- data.return_statements = VEC_alloc (return_statements_t, heap, 8);
+ data.return_statements.create (8);
bind = gimple_seq_first_stmt (body);
lowered_body = NULL;
@@ -119,9 +117,8 @@ lower_function_body (void)
If we've already got one in the return_statements vector, we don't
need to do anything special. Otherwise build one by hand. */
if (gimple_seq_may_fallthru (lowered_body)
- && (VEC_empty (return_statements_t, data.return_statements)
- || gimple_return_retval (VEC_last (return_statements_t,
- data.return_statements).stmt) != NULL))
+ && (data.return_statements.is_empty ()
+ || gimple_return_retval (data.return_statements.last().stmt) != NULL))
{
x = gimple_build_return (NULL);
gimple_set_location (x, cfun->function_end_locus);
@@ -131,18 +128,9 @@ lower_function_body (void)
/* If we lowered any return statements, emit the representative
at the end of the function. */
- while (!VEC_empty (return_statements_t, data.return_statements))
+ while (!data.return_statements.is_empty ())
{
- return_statements_t t;
-
- /* Unfortunately, we can't use VEC_pop because it returns void for
- objects. */
- t = VEC_last (return_statements_t, data.return_statements);
- VEC_truncate (return_statements_t,
- data.return_statements,
- VEC_length (return_statements_t,
- data.return_statements) - 1);
-
+ return_statements_t t = data.return_statements.pop ();
x = gimple_build_label (t.label);
gsi_insert_after (&i, x, GSI_CONTINUE_LINKING);
gsi_insert_after (&i, t.stmt, GSI_CONTINUE_LINKING);
@@ -185,7 +173,7 @@ lower_function_body (void)
= blocks_nreverse (BLOCK_SUBBLOCKS (data.block));
clear_block_marks (data.block);
- VEC_free(return_statements_t, heap, data.return_statements);
+ data.return_statements.release ();
return 0;
}
@@ -841,10 +829,10 @@ lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
return_statements_t tmp_rs;
/* Match this up with an existing return statement that's been created. */
- for (i = VEC_length (return_statements_t, data->return_statements) - 1;
+ for (i = data->return_statements.length () - 1;
i >= 0; i--)
{
- tmp_rs = VEC_index (return_statements_t, data->return_statements, i);
+ tmp_rs = data->return_statements[i];
if (gimple_return_retval (stmt) == gimple_return_retval (tmp_rs.stmt))
{
@@ -860,7 +848,7 @@ lower_gimple_return (gimple_stmt_iterator *gsi, struct lower_data *data)
/* Not found. Create a new label and record the return statement. */
tmp_rs.label = create_artificial_label (cfun->function_end_locus);
tmp_rs.stmt = stmt;
- VEC_safe_push (return_statements_t, heap, data->return_statements, tmp_rs);
+ data->return_statements.safe_push (tmp_rs);
/* Generate a goto statement and remove the return statement. */
found:
diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c
index ad02589faee..8e2a24758dd 100644
--- a/gcc/gimple-ssa-strength-reduction.c
+++ b/gcc/gimple-ssa-strength-reduction.c
@@ -273,9 +273,7 @@ typedef struct incr_info_d incr_info, *incr_info_t;
/* Candidates are maintained in a vector. If candidate X dominates
candidate Y, then X appears before Y in the vector; but the
converse does not necessarily hold. */
-DEF_VEC_P (slsr_cand_t);
-DEF_VEC_ALLOC_P (slsr_cand_t, heap);
-static VEC (slsr_cand_t, heap) *cand_vec;
+static vec<slsr_cand_t> cand_vec;
enum cost_consts
{
@@ -310,7 +308,7 @@ static bool address_arithmetic_p;
static slsr_cand_t
lookup_cand (cand_idx idx)
{
- return VEC_index (slsr_cand_t, cand_vec, idx - 1);
+ return cand_vec[idx - 1];
}
/* Callback to produce a hash value for a candidate chain header. */
@@ -429,14 +427,14 @@ alloc_cand_and_find_basis (enum cand_kind kind, gimple gs, tree base,
c->index = index;
c->cand_type = ctype;
c->kind = kind;
- c->cand_num = VEC_length (slsr_cand_t, cand_vec) + 1;
+ c->cand_num = cand_vec.length () + 1;
c->next_interp = 0;
c->dependent = 0;
c->sibling = 0;
c->def_phi = NULL;
c->dead_savings = savings;
- VEC_safe_push (slsr_cand_t, heap, cand_vec, c);
+ cand_vec.safe_push (c);
c->basis = find_basis_for_candidate (c);
record_potential_basis (c);
@@ -1431,7 +1429,7 @@ dump_cand_vec (void)
fprintf (dump_file, "\nStrength reduction candidate vector:\n\n");
- FOR_EACH_VEC_ELT (slsr_cand_t, cand_vec, i, c)
+ FOR_EACH_VEC_ELT (cand_vec, i, c)
dump_candidate (c);
}
@@ -2538,7 +2536,7 @@ analyze_candidates_and_replace (void)
dependent is the root of a tree of related statements.
Analyze each tree to determine a subset of those
statements that can be replaced with maximum benefit. */
- FOR_EACH_VEC_ELT (slsr_cand_t, cand_vec, i, c)
+ FOR_EACH_VEC_ELT (cand_vec, i, c)
{
slsr_cand_t first_dep;
@@ -2621,7 +2619,7 @@ execute_strength_reduction (void)
gcc_obstack_init (&cand_obstack);
/* Allocate the candidate vector. */
- cand_vec = VEC_alloc (slsr_cand_t, heap, 128);
+ cand_vec.create (128);
/* Allocate the mapping from statements to candidate indices. */
stmt_cand_map = pointer_map_create ();
@@ -2665,7 +2663,7 @@ execute_strength_reduction (void)
htab_delete (base_cand_map);
obstack_free (&chain_obstack, NULL);
pointer_map_destroy (stmt_cand_map);
- VEC_free (slsr_cand_t, heap, cand_vec);
+ cand_vec.release ();
obstack_free (&cand_obstack, NULL);
return 0;
diff --git a/gcc/gimple-streamer-in.c b/gcc/gimple-streamer-in.c
index 0ad0fb1acfa..6736defcec5 100644
--- a/gcc/gimple-streamer-in.c
+++ b/gcc/gimple-streamer-in.c
@@ -42,7 +42,7 @@ input_phi (struct lto_input_block *ib, basic_block bb, struct data_in *data_in,
gimple result;
ix = streamer_read_uhwi (ib);
- phi_result = VEC_index (tree, SSANAMES (fn), ix);
+ phi_result = (*SSANAMES (fn))[ix];
len = EDGE_COUNT (bb->preds);
result = create_phi_node (phi_result, bb);
diff --git a/gcc/gimple.c b/gcc/gimple.c
index 481a4d9e477..5a53e0082c0 100644
--- a/gcc/gimple.c
+++ b/gcc/gimple.c
@@ -222,14 +222,14 @@ gimple_build_call_1 (tree fn, unsigned nargs)
specified in vector ARGS. */
gimple
-gimple_build_call_vec (tree fn, VEC(tree, heap) *args)
+gimple_build_call_vec (tree fn, vec<tree> args)
{
unsigned i;
- unsigned nargs = VEC_length (tree, args);
+ unsigned nargs = args.length ();
gimple call = gimple_build_call_1 (fn, nargs);
for (i = 0; i < nargs; i++)
- gimple_call_set_arg (call, i, VEC_index (tree, args, i));
+ gimple_call_set_arg (call, i, args[i]);
return call;
}
@@ -317,15 +317,15 @@ gimple_build_call_internal (enum internal_fn fn, unsigned nargs, ...)
specified in vector ARGS. */
gimple
-gimple_build_call_internal_vec (enum internal_fn fn, VEC(tree, heap) *args)
+gimple_build_call_internal_vec (enum internal_fn fn, vec<tree> args)
{
unsigned i, nargs;
gimple call;
- nargs = VEC_length (tree, args);
+ nargs = args.length ();
call = gimple_build_call_internal_1 (fn, nargs);
for (i = 0; i < nargs; i++)
- gimple_call_set_arg (call, i, VEC_index (tree, args, i));
+ gimple_call_set_arg (call, i, args[i]);
return call;
}
@@ -660,30 +660,30 @@ gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
LABELS is a vector of destination labels. */
gimple
-gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
- VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
- VEC(tree,gc)* labels)
+gimple_build_asm_vec (const char *string, vec<tree, va_gc> *inputs,
+ vec<tree, va_gc> *outputs, vec<tree, va_gc> *clobbers,
+ vec<tree, va_gc> *labels)
{
gimple p;
unsigned i;
p = gimple_build_asm_1 (string,
- VEC_length (tree, inputs),
- VEC_length (tree, outputs),
- VEC_length (tree, clobbers),
- VEC_length (tree, labels));
+ vec_safe_length (inputs),
+ vec_safe_length (outputs),
+ vec_safe_length (clobbers),
+ vec_safe_length (labels));
- for (i = 0; i < VEC_length (tree, inputs); i++)
- gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
+ for (i = 0; i < vec_safe_length (inputs); i++)
+ gimple_asm_set_input_op (p, i, (*inputs)[i]);
- for (i = 0; i < VEC_length (tree, outputs); i++)
- gimple_asm_set_output_op (p, i, VEC_index (tree, outputs, i));
+ for (i = 0; i < vec_safe_length (outputs); i++)
+ gimple_asm_set_output_op (p, i, (*outputs)[i]);
- for (i = 0; i < VEC_length (tree, clobbers); i++)
- gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
+ for (i = 0; i < vec_safe_length (clobbers); i++)
+ gimple_asm_set_clobber_op (p, i, (*clobbers)[i]);
- for (i = 0; i < VEC_length (tree, labels); i++)
- gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
+ for (i = 0; i < vec_safe_length (labels); i++)
+ gimple_asm_set_label_op (p, i, (*labels)[i]);
return p;
}
@@ -819,15 +819,15 @@ gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
ARGS is a vector of labels excluding the default. */
gimple
-gimple_build_switch (tree index, tree default_label, VEC(tree, heap) *args)
+gimple_build_switch (tree index, tree default_label, vec<tree> args)
{
- unsigned i, nlabels = VEC_length (tree, args);
+ unsigned i, nlabels = args.length ();
gimple p = gimple_build_switch_nlabels (nlabels, index, default_label);
/* Copy the labels from the vector to the switch statement. */
for (i = 0; i < nlabels; i++)
- gimple_switch_set_label (p, i + 1, VEC_index (tree, args, i));
+ gimple_switch_set_label (p, i + 1, args[i]);
return p;
}
@@ -2038,18 +2038,17 @@ gimple_set_bb (gimple stmt, basic_block bb)
uid = LABEL_DECL_UID (t);
if (uid == -1)
{
- unsigned old_len = VEC_length (basic_block, label_to_block_map);
+ unsigned old_len = vec_safe_length (label_to_block_map);
LABEL_DECL_UID (t) = uid = cfun->cfg->last_label_uid++;
if (old_len <= (unsigned) uid)
{
unsigned new_len = 3 * uid / 2 + 1;
- VEC_safe_grow_cleared (basic_block, gc, label_to_block_map,
- new_len);
+ vec_safe_grow_cleared (label_to_block_map, new_len);
}
}
- VEC_replace (basic_block, label_to_block_map, uid, bb);
+ (*label_to_block_map)[uid] = bb;
}
}
@@ -2974,19 +2973,20 @@ gimple_call_copy_skip_args (gimple stmt, bitmap args_to_skip)
{
int i;
int nargs = gimple_call_num_args (stmt);
- VEC(tree, heap) *vargs = VEC_alloc (tree, heap, nargs);
+ vec<tree> vargs;
+ vargs.create (nargs);
gimple new_stmt;
for (i = 0; i < nargs; i++)
if (!bitmap_bit_p (args_to_skip, i))
- VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
+ vargs.quick_push (gimple_call_arg (stmt, i));
if (gimple_call_internal_p (stmt))
new_stmt = gimple_build_call_internal_vec (gimple_call_internal_fn (stmt),
vargs);
else
new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
- VEC_free (tree, heap, vargs);
+ vargs.release ();
if (gimple_call_lhs (stmt))
gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
diff --git a/gcc/gimple.h b/gcc/gimple.h
index e73fe0d8905..2d6cef4f4d6 100644
--- a/gcc/gimple.h
+++ b/gcc/gimple.h
@@ -24,8 +24,6 @@ along with GCC; see the file COPYING3. If not see
#include "pointer-set.h"
#include "vec.h"
-#include "vecprim.h"
-#include "vecir.h"
#include "ggc.h"
#include "basic-block.h"
#include "tree.h"
@@ -37,9 +35,7 @@ typedef gimple gimple_seq_node;
/* For each block, the PHI nodes that need to be rewritten are stored into
these vectors. */
-typedef VEC(gimple, heap) *gimple_vec;
-DEF_VEC_P (gimple_vec);
-DEF_VEC_ALLOC_P (gimple_vec, heap);
+typedef vec<gimple> gimple_vec;
enum gimple_code {
#define DEFGSCODE(SYM, STRING, STRUCT) SYM,
@@ -753,11 +749,11 @@ gimple gimple_build_debug_source_bind_stat (tree, tree, gimple MEM_STAT_DECL);
#define gimple_build_debug_source_bind(var,val,stmt) \
gimple_build_debug_source_bind_stat ((var), (val), (stmt) MEM_STAT_INFO)
-gimple gimple_build_call_vec (tree, VEC(tree, heap) *);
+gimple gimple_build_call_vec (tree, vec<tree> );
gimple gimple_build_call (tree, unsigned, ...);
gimple gimple_build_call_valist (tree, unsigned, va_list);
gimple gimple_build_call_internal (enum internal_fn, unsigned, ...);
-gimple gimple_build_call_internal_vec (enum internal_fn, VEC(tree, heap) *);
+gimple gimple_build_call_internal_vec (enum internal_fn, vec<tree> );
gimple gimple_build_call_from_tree (tree);
gimple gimplify_assign (tree, tree, gimple_seq *);
gimple gimple_build_cond (enum tree_code, tree, tree, tree, tree);
@@ -765,8 +761,9 @@ gimple gimple_build_label (tree label);
gimple gimple_build_goto (tree dest);
gimple gimple_build_nop (void);
gimple gimple_build_bind (tree, gimple_seq, tree);
-gimple gimple_build_asm_vec (const char *, VEC(tree,gc) *, VEC(tree,gc) *,
- VEC(tree,gc) *, VEC(tree,gc) *);
+gimple gimple_build_asm_vec (const char *, vec<tree, va_gc> *,
+ vec<tree, va_gc> *, vec<tree, va_gc> *,
+ vec<tree, va_gc> *);
gimple gimple_build_catch (tree, gimple_seq);
gimple gimple_build_eh_filter (tree, gimple_seq);
gimple gimple_build_eh_must_not_throw (tree);
@@ -776,7 +773,7 @@ gimple gimple_build_wce (gimple_seq);
gimple gimple_build_resx (int);
gimple gimple_build_eh_dispatch (int);
gimple gimple_build_switch_nlabels (unsigned, tree, tree);
-gimple gimple_build_switch (tree, tree, VEC(tree,heap) *);
+gimple gimple_build_switch (tree, tree, vec<tree> );
gimple gimple_build_omp_parallel (gimple_seq, tree, tree, tree);
gimple gimple_build_omp_task (gimple_seq, tree, tree, tree, tree, tree, tree);
gimple gimple_build_omp_for (gimple_seq, tree, size_t, gimple_seq);
@@ -795,8 +792,8 @@ gimple gimple_build_omp_atomic_store (tree);
gimple gimple_build_transaction (gimple_seq, tree);
gimple gimple_build_predict (enum br_predictor, enum prediction);
enum gimple_statement_structure_enum gss_for_assign (enum tree_code);
-void sort_case_labels (VEC(tree,heap) *);
-void preprocess_case_label_vec_for_gimple (VEC(tree,heap) *, tree, tree *);
+void sort_case_labels (vec<tree> );
+void preprocess_case_label_vec_for_gimple (vec<tree> , tree, tree *);
void gimple_set_body (tree, gimple_seq);
gimple_seq gimple_body (tree);
bool gimple_has_body_p (tree);
@@ -943,13 +940,13 @@ struct gimplify_ctx
{
struct gimplify_ctx *prev_context;
- VEC(gimple,heap) *bind_expr_stack;
+ vec<gimple> bind_expr_stack;
tree temps;
gimple_seq conditional_cleanups;
tree exit_label;
tree return_temp;
- VEC(tree,heap) *case_labels;
+ vec<tree> case_labels;
/* The formal temporary table. Should this be persistent? */
htab_t temp_htab;
@@ -991,7 +988,7 @@ extern void gimplify_and_add (tree, gimple_seq *);
/* Miscellaneous helpers. */
extern void gimple_add_tmp_var (tree);
extern gimple gimple_current_bind_expr (void);
-extern VEC(gimple, heap) *gimple_bind_expr_stack (void);
+extern vec<gimple> gimple_bind_expr_stack (void);
extern tree voidify_wrapper_expr (tree, tree);
extern tree build_and_jump (tree *);
extern tree force_labels_r (tree *, int *, void *);
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index 0919d9ff76a..8d555f833b9 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -231,9 +231,10 @@ pop_gimplify_context (gimple body)
{
struct gimplify_ctx *c = gimplify_ctxp;
- gcc_assert (c && (c->bind_expr_stack == NULL
- || VEC_empty (gimple, c->bind_expr_stack)));
- VEC_free (gimple, heap, c->bind_expr_stack);
+ gcc_assert (c
+ && (!c->bind_expr_stack.exists ()
+ || c->bind_expr_stack.is_empty ()));
+ c->bind_expr_stack.release ();
gimplify_ctxp = c->prev_context;
if (body)
@@ -250,9 +251,8 @@ pop_gimplify_context (gimple body)
static void
gimple_push_bind_expr (gimple gimple_bind)
{
- if (gimplify_ctxp->bind_expr_stack == NULL)
- gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
- VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
+ gimplify_ctxp->bind_expr_stack.reserve (8);
+ gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
}
/* Pop the first element off the stack of bindings. */
@@ -260,7 +260,7 @@ gimple_push_bind_expr (gimple gimple_bind)
static void
gimple_pop_bind_expr (void)
{
- VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
+ gimplify_ctxp->bind_expr_stack.pop ();
}
/* Return the first element of the stack of bindings. */
@@ -268,12 +268,12 @@ gimple_pop_bind_expr (void)
gimple
gimple_current_bind_expr (void)
{
- return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
+ return gimplify_ctxp->bind_expr_stack.last ();
}
/* Return the stack of bindings created during gimplification. */
-VEC(gimple, heap) *
+vec<gimple>
gimple_bind_expr_stack (void)
{
return gimplify_ctxp->bind_expr_stack;
@@ -1258,7 +1258,8 @@ gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
&& !is_gimple_reg (t)
&& flag_stack_reuse != SR_NONE)
{
- tree clobber = build_constructor (TREE_TYPE (t), NULL);
+ tree clobber = build_constructor (TREE_TYPE (t),
+ NULL);
TREE_THIS_VOLATILE (clobber) = 1;
gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
}
@@ -1569,9 +1570,9 @@ compare_case_labels (const void *p1, const void *p2)
/* Sort the case labels in LABEL_VEC in place in ascending order. */
void
-sort_case_labels (VEC(tree,heap)* label_vec)
+sort_case_labels (vec<tree> label_vec)
{
- VEC_qsort (tree, label_vec, compare_case_labels);
+ label_vec.qsort (compare_case_labels);
}
/* Prepare a vector of case labels to be used in a GIMPLE_SWITCH statement.
@@ -1594,7 +1595,7 @@ sort_case_labels (VEC(tree,heap)* label_vec)
found or not. */
void
-preprocess_case_label_vec_for_gimple (VEC(tree,heap) *labels,
+preprocess_case_label_vec_for_gimple (vec<tree> labels,
tree index_type,
tree *default_casep)
{
@@ -1605,9 +1606,9 @@ preprocess_case_label_vec_for_gimple (VEC(tree,heap) *labels,
i = 0;
min_value = TYPE_MIN_VALUE (index_type);
max_value = TYPE_MAX_VALUE (index_type);
- while (i < VEC_length (tree, labels))
+ while (i < labels.length ())
{
- tree elt = VEC_index (tree, labels, i);
+ tree elt = labels[i];
tree low = CASE_LOW (elt);
tree high = CASE_HIGH (elt);
bool remove_element = FALSE;
@@ -1696,13 +1697,13 @@ preprocess_case_label_vec_for_gimple (VEC(tree,heap) *labels,
}
if (remove_element)
- VEC_ordered_remove (tree, labels, i);
+ labels.ordered_remove (i);
else
i++;
}
len = i;
- if (!VEC_empty (tree, labels))
+ if (!labels.is_empty ())
sort_case_labels (labels);
if (default_casep && !default_case)
@@ -1714,20 +1715,20 @@ preprocess_case_label_vec_for_gimple (VEC(tree,heap) *labels,
if (len
&& TYPE_MIN_VALUE (index_type)
&& TYPE_MAX_VALUE (index_type)
- && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
+ && tree_int_cst_equal (CASE_LOW (labels[0]),
TYPE_MIN_VALUE (index_type)))
{
- tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
+ tree low, high = CASE_HIGH (labels[len - 1]);
if (!high)
- high = CASE_LOW (VEC_index (tree, labels, len - 1));
+ high = CASE_LOW (labels[len - 1]);
if (tree_int_cst_equal (high, TYPE_MAX_VALUE (index_type)))
{
for (i = 1; i < len; i++)
{
- high = CASE_LOW (VEC_index (tree, labels, i));
- low = CASE_HIGH (VEC_index (tree, labels, i - 1));
+ high = CASE_LOW (labels[i]);
+ low = CASE_HIGH (labels[i - 1]);
if (!low)
- low = CASE_LOW (VEC_index (tree, labels, i - 1));
+ low = CASE_LOW (labels[i - 1]);
if ((TREE_INT_CST_LOW (low) + 1
!= TREE_INT_CST_LOW (high))
|| (TREE_INT_CST_HIGH (low)
@@ -1737,7 +1738,7 @@ preprocess_case_label_vec_for_gimple (VEC(tree,heap) *labels,
}
if (i == len)
{
- tree label = CASE_LABEL (VEC_index (tree, labels, 0));
+ tree label = CASE_LABEL (labels[0]);
default_case = build_case_label (NULL_TREE, NULL_TREE,
label);
}
@@ -1769,8 +1770,8 @@ gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
if (SWITCH_BODY (switch_expr))
{
- VEC (tree,heap) *labels;
- VEC (tree,heap) *saved_labels;
+ vec<tree> labels;
+ vec<tree> saved_labels;
tree default_case = NULL_TREE;
gimple gimple_switch;
@@ -1781,7 +1782,7 @@ gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
/* Save old labels, get new ones from body, then restore the old
labels. Save all the things from the switch body to append after. */
saved_labels = gimplify_ctxp->case_labels;
- gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
+ gimplify_ctxp->case_labels.create (8);
gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
labels = gimplify_ctxp->case_labels;
@@ -1805,7 +1806,7 @@ gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
default_case, labels);
gimplify_seq_add_stmt (pre_p, gimple_switch);
gimplify_seq_add_seq (pre_p, switch_body_seq);
- VEC_free(tree, heap, labels);
+ labels.release ();
}
else
gcc_assert (SWITCH_LABELS (switch_expr));
@@ -1825,11 +1826,11 @@ gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
#pragma omp parallel. At least in the C front end, we don't
detect such invalid branches until after gimplification. */
for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
- if (ctxp->case_labels)
+ if (ctxp->case_labels.exists ())
break;
gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
- VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
+ ctxp->case_labels.safe_push (*expr_p);
gimplify_seq_add_stmt (pre_p, gimple_label);
return GS_ALL_DONE;
@@ -2129,7 +2130,7 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
fallback_t fallback)
{
tree *p;
- VEC(tree,heap) *expr_stack;
+ vec<tree> expr_stack;
enum gimplify_status ret = GS_ALL_DONE, tret;
int i;
location_t loc = EXPR_LOCATION (*expr_p);
@@ -2137,7 +2138,7 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
/* Create a stack of the subexpressions so later we can walk them in
order from inner to outer. */
- expr_stack = VEC_alloc (tree, heap, 10);
+ expr_stack.create (10);
/* We can handle anything that get_inner_reference can deal with. */
for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
@@ -2157,10 +2158,10 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
else
break;
- VEC_safe_push (tree, heap, expr_stack, *p);
+ expr_stack.safe_push (*p);
}
- gcc_assert (VEC_length (tree, expr_stack));
+ gcc_assert (expr_stack.length ());
/* Now EXPR_STACK is a stack of pointers to all the refs we've
walked through and P points to the innermost expression.
@@ -2174,9 +2175,9 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
So we do this in three steps. First we deal with the annotations
for any variables in the components, then we gimplify the base,
then we gimplify any indices, from left to right. */
- for (i = VEC_length (tree, expr_stack) - 1; i >= 0; i--)
+ for (i = expr_stack.length () - 1; i >= 0; i--)
{
- tree t = VEC_index (tree, expr_stack, i);
+ tree t = expr_stack[i];
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
{
@@ -2269,9 +2270,9 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
/* And finally, the indices and operands of ARRAY_REF. During this
loop we also remove any useless conversions. */
- for (; VEC_length (tree, expr_stack) > 0; )
+ for (; expr_stack.length () > 0; )
{
- tree t = VEC_pop (tree, expr_stack);
+ tree t = expr_stack.pop ();
if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
{
@@ -2299,7 +2300,7 @@ gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
canonicalize_component_ref (expr_p);
}
- VEC_free (tree, heap, expr_stack);
+ expr_stack.release ();
gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
@@ -3461,7 +3462,7 @@ gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
from = TREE_OPERAND (from, 0);
gcc_assert (TREE_CODE (from) == CONSTRUCTOR
- && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
+ && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
/* Now proceed. */
to = TREE_OPERAND (*expr_p, 0);
@@ -3572,9 +3573,9 @@ gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
{
unsigned HOST_WIDE_INT ix;
constructor_elt *ce;
- VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
+ vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
- FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
+ FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
return;
@@ -3634,7 +3635,7 @@ gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
Note that we never have to deal with SAVE_EXPRs here, because this has
already been taken care of for us, in gimplify_init_ctor_preeval(). */
-static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
+static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
gimple_seq *, bool);
static void
@@ -3720,7 +3721,7 @@ zero_sized_type (const_tree type)
zeroed first. */
static void
-gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
+gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
gimple_seq *pre_p, bool cleared)
{
tree array_elt_type = NULL;
@@ -3876,12 +3877,12 @@ static tree
optimize_compound_literals_in_ctor (tree orig_ctor)
{
tree ctor = orig_ctor;
- VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
- unsigned int idx, num = VEC_length (constructor_elt, elts);
+ vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
+ unsigned int idx, num = vec_safe_length (elts);
for (idx = 0; idx < num; idx++)
{
- tree value = VEC_index (constructor_elt, elts, idx).value;
+ tree value = (*elts)[idx].value;
tree newval = value;
if (TREE_CODE (value) == CONSTRUCTOR)
newval = optimize_compound_literals_in_ctor (value);
@@ -3903,10 +3904,10 @@ optimize_compound_literals_in_ctor (tree orig_ctor)
if (ctor == orig_ctor)
{
ctor = copy_node (orig_ctor);
- CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
+ CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
elts = CONSTRUCTOR_ELTS (ctor);
}
- VEC_index (constructor_elt, elts, idx).value = newval;
+ (*elts)[idx].value = newval;
}
return ctor;
}
@@ -3930,7 +3931,7 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
{
tree object, ctor, type;
enum gimplify_status ret;
- VEC(constructor_elt,gc) *elts;
+ vec<constructor_elt, va_gc> *elts;
gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
@@ -3963,7 +3964,7 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
/* Aggregate types must lower constructors to initialization of
individual elements. The exception is that a CONSTRUCTOR node
with no elements indicates zero-initialization of the whole. */
- if (VEC_empty (constructor_elt, elts))
+ if (vec_safe_is_empty (elts))
{
if (notify_temp_creation)
return GS_OK;
@@ -4099,7 +4100,7 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
if (TREE_THIS_VOLATILE (object)
&& !TREE_ADDRESSABLE (type)
&& num_nonzero_elements > 0
- && VEC_length (constructor_elt, elts) > 1)
+ && vec_safe_length (elts) > 1)
{
tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
TREE_OPERAND (*expr_p, 0) = temp;
@@ -4156,9 +4157,9 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
return GS_OK;
/* Extract the real and imaginary parts out of the ctor. */
- gcc_assert (VEC_length (constructor_elt, elts) == 2);
- r = VEC_index (constructor_elt, elts, 0).value;
- i = VEC_index (constructor_elt, elts, 1).value;
+ gcc_assert (elts->length () == 2);
+ r = (*elts)[0].value;
+ i = (*elts)[1].value;
if (r == NULL || i == NULL)
{
tree zero = build_zero_cst (TREE_TYPE (type));
@@ -4230,7 +4231,7 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
/* Vector types use CONSTRUCTOR all the way through gimple
compilation as a general initializer. */
- FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
+ FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
{
enum gimplify_status tret;
tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
@@ -5223,17 +5224,20 @@ gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
bool allows_mem, allows_reg, is_inout;
enum gimplify_status ret, tret;
gimple stmt;
- VEC(tree, gc) *inputs;
- VEC(tree, gc) *outputs;
- VEC(tree, gc) *clobbers;
- VEC(tree, gc) *labels;
+ vec<tree, va_gc> *inputs;
+ vec<tree, va_gc> *outputs;
+ vec<tree, va_gc> *clobbers;
+ vec<tree, va_gc> *labels;
tree link_next;
expr = *expr_p;
noutputs = list_length (ASM_OUTPUTS (expr));
oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
- inputs = outputs = clobbers = labels = NULL;
+ inputs = NULL;
+ outputs = NULL;
+ clobbers = NULL;
+ labels = NULL;
ret = GS_ALL_DONE;
link_next = NULL_TREE;
@@ -5271,7 +5275,7 @@ gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
ret = tret;
}
- VEC_safe_push (tree, gc, outputs, link);
+ vec_safe_push (outputs, link);
TREE_CHAIN (link) = NULL_TREE;
if (is_inout)
@@ -5418,14 +5422,14 @@ gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
}
TREE_CHAIN (link) = NULL_TREE;
- VEC_safe_push (tree, gc, inputs, link);
+ vec_safe_push (inputs, link);
}
for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
- VEC_safe_push (tree, gc, clobbers, link);
+ vec_safe_push (clobbers, link);
for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
- VEC_safe_push (tree, gc, labels, link);
+ vec_safe_push (labels, link);
/* Do not add ASMs with errors to the gimple IL stream. */
if (ret != GS_ERROR)
@@ -5658,7 +5662,8 @@ gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
&& needs_to_live_in_memory (temp)
&& flag_stack_reuse == SR_ALL)
{
- tree clobber = build_constructor (TREE_TYPE (temp), NULL);
+ tree clobber = build_constructor (TREE_TYPE (temp),
+ NULL);
TREE_THIS_VOLATILE (clobber) = true;
clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
if (cleanup)
@@ -8254,38 +8259,36 @@ gimplify_body (tree fndecl, bool do_parms)
}
typedef char *char_p; /* For DEF_VEC_P. */
-DEF_VEC_P(char_p);
-DEF_VEC_ALLOC_P(char_p,heap);
/* Return whether we should exclude FNDECL from instrumentation. */
static bool
flag_instrument_functions_exclude_p (tree fndecl)
{
- VEC(char_p,heap) *vec;
+ vec<char_p> *v;
- vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
- if (VEC_length (char_p, vec) > 0)
+ v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
+ if (v && v->length () > 0)
{
const char *name;
int i;
char *s;
name = lang_hooks.decl_printable_name (fndecl, 0);
- FOR_EACH_VEC_ELT (char_p, vec, i, s)
+ FOR_EACH_VEC_ELT (*v, i, s)
if (strstr (name, s) != NULL)
return true;
}
- vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
- if (VEC_length (char_p, vec) > 0)
+ v = (vec<char_p> *) flag_instrument_functions_exclude_files;
+ if (v && v->length () > 0)
{
const char *name;
int i;
char *s;
name = DECL_SOURCE_FILE (fndecl);
- FOR_EACH_VEC_ELT (char_p, vec, i, s)
+ FOR_EACH_VEC_ELT (*v, i, s)
if (strstr (name, s) != NULL)
return true;
}
diff --git a/gcc/go/ChangeLog b/gcc/go/ChangeLog
index f9bb9103b48..9a902475148 100644
--- a/gcc/go/ChangeLog
+++ b/gcc/go/ChangeLog
@@ -1,3 +1,9 @@
+2012-11-17 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * go-lang.c: Use new vec API in vec.h.
+
2012-11-16 Ian Lance Taylor <iant@google.com>
* Make-lang.in (gccgo$(exeext)): Add + at start of command.
diff --git a/gcc/go/go-lang.c b/gcc/go/go-lang.c
index f5229bbc451..8e7b8ba09d4 100644
--- a/gcc/go/go-lang.c
+++ b/gcc/go/go-lang.c
@@ -157,16 +157,14 @@ go_langhook_init_options_struct (struct gcc_options *opts)
opts->x_flag_non_call_exceptions = 1;
}
-/* Infrastructure for a VEC of char * pointers. */
+/* Infrastructure for a vector of char * pointers. */
typedef const char *go_char_p;
-DEF_VEC_P(go_char_p);
-DEF_VEC_ALLOC_P(go_char_p, heap);
/* The list of directories to search after all the Go specific
directories have been searched. */
-static VEC(go_char_p, heap) *go_search_dirs;
+static vec<go_char_p> go_search_dirs;
/* Handle Go specific options. Return 0 if we didn't do anything. */
@@ -222,7 +220,7 @@ go_langhook_handle_option (
/* Search ARG too, but only after we've searched to Go
specific directories for all -L arguments. */
- VEC_safe_push (go_char_p, heap, go_search_dirs, arg);
+ go_search_dirs.safe_push (arg);
}
break;
@@ -264,10 +262,9 @@ go_langhook_post_options (const char **pfilename ATTRIBUTE_UNUSED)
gcc_assert (num_in_fnames > 0);
- FOR_EACH_VEC_ELT (go_char_p, go_search_dirs, ix, dir)
+ FOR_EACH_VEC_ELT (go_search_dirs, ix, dir)
go_add_search_path (dir);
- VEC_free (go_char_p, heap, go_search_dirs);
- go_search_dirs = NULL;
+ go_search_dirs.release ();
if (flag_excess_precision_cmdline == EXCESS_PRECISION_DEFAULT)
flag_excess_precision_cmdline = EXCESS_PRECISION_STANDARD;
diff --git a/gcc/godump.c b/gcc/godump.c
index ab1edc620f9..22995fe5d91 100644
--- a/gcc/godump.c
+++ b/gcc/godump.c
@@ -56,7 +56,7 @@ static FILE *go_dump_file;
/* A queue of decls to output. */
-static GTY(()) VEC(tree,gc) *queue;
+static GTY(()) vec<tree, va_gc> *queue;
/* A hash table of macros we have seen. */
@@ -480,7 +480,7 @@ go_decl (tree decl)
|| DECL_IS_BUILTIN (decl)
|| DECL_NAME (decl) == NULL_TREE)
return;
- VEC_safe_push (tree, gc, queue, decl);
+ vec_safe_push (queue, decl);
}
/* A function decl. */
@@ -515,7 +515,7 @@ go_type_decl (tree decl, int local)
|| TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) != IDENTIFIER_NODE)
&& TREE_CODE (TREE_TYPE (decl)) != ENUMERAL_TYPE)
return;
- VEC_safe_push (tree, gc, queue, decl);
+ vec_safe_push (queue, decl);
}
/* A container for the data we pass around when generating information
@@ -1194,7 +1194,7 @@ go_finish (const char *filename)
keyword_hash_init (&container);
- FOR_EACH_VEC_ELT (tree, queue, ix, decl)
+ FOR_EACH_VEC_SAFE_ELT (queue, ix, decl)
{
switch (TREE_CODE (decl))
{
@@ -1228,7 +1228,7 @@ go_finish (const char *filename)
htab_delete (container.keyword_hash);
obstack_free (&container.type_obstack, NULL);
- queue = NULL;
+ vec_free (queue);
if (fclose (go_dump_file) != 0)
error ("could not close Go dump file: %m");
diff --git a/gcc/graph.c b/gcc/graph.c
index f9319f46b31..847aac2dc1a 100644
--- a/gcc/graph.c
+++ b/gcc/graph.c
@@ -238,7 +238,7 @@ print_rtl_graph_with_bb (const char *base, rtx rtx_first)
char *buf = XALLOCAVEC (char, namelen + extlen);
FILE *fp;
- if (basic_block_info == NULL)
+ if (!basic_block_info)
return;
memcpy (buf, base, namelen);
diff --git a/gcc/graphds.c b/gcc/graphds.c
index 4ee71dff904..1614b15265c 100644
--- a/gcc/graphds.c
+++ b/gcc/graphds.c
@@ -24,7 +24,6 @@ along with GCC; see the file COPYING3. If not see
#include "obstack.h"
#include "bitmap.h"
#include "vec.h"
-#include "vecprim.h"
#include "graphds.h"
/* Dumps graph G into F. */
@@ -187,7 +186,7 @@ dfs_next_edge (struct graph_edge *e, bool forward, bitmap subgraph)
of the graph (number of the restarts of DFS). */
int
-graphds_dfs (struct graph *g, int *qs, int nq, VEC (int, heap) **qt,
+graphds_dfs (struct graph *g, int *qs, int nq, vec<int> *qt,
bool forward, bitmap subgraph)
{
int i, tick = 0, v, comp = 0, top;
@@ -236,7 +235,7 @@ graphds_dfs (struct graph *g, int *qs, int nq, VEC (int, heap) **qt,
if (!e)
{
if (qt)
- VEC_safe_push (int, heap, *qt, v);
+ qt->safe_push (v);
g->vertices[v].post = tick++;
if (!top)
@@ -275,7 +274,7 @@ int
graphds_scc (struct graph *g, bitmap subgraph)
{
int *queue = XNEWVEC (int, g->n_vertices);
- VEC (int, heap) *postorder = NULL;
+ vec<int> postorder = vec<int>();
int nq, i, comp;
unsigned v;
bitmap_iterator bi;
@@ -296,14 +295,14 @@ graphds_scc (struct graph *g, bitmap subgraph)
}
graphds_dfs (g, queue, nq, &postorder, false, subgraph);
- gcc_assert (VEC_length (int, postorder) == (unsigned) nq);
+ gcc_assert (postorder.length () == (unsigned) nq);
for (i = 0; i < nq; i++)
- queue[i] = VEC_index (int, postorder, nq - i - 1);
+ queue[i] = postorder[nq - i - 1];
comp = graphds_dfs (g, queue, nq, NULL, true, subgraph);
free (queue);
- VEC_free (int, heap, postorder);
+ postorder.release ();
return comp;
}
@@ -401,7 +400,7 @@ void
graphds_domtree (struct graph *g, int entry,
int *parent, int *son, int *brother)
{
- VEC (int, heap) *postorder = NULL;
+ vec<int> postorder = vec<int>();
int *marks = XCNEWVEC (int, g->n_vertices);
int mark = 1, i, v, idom;
bool changed = true;
@@ -432,8 +431,8 @@ graphds_domtree (struct graph *g, int entry,
brother[i] = -1;
}
graphds_dfs (g, &entry, 1, &postorder, true, NULL);
- gcc_assert (VEC_length (int, postorder) == (unsigned) g->n_vertices);
- gcc_assert (VEC_index (int, postorder, g->n_vertices - 1) == entry);
+ gcc_assert (postorder.length () == (unsigned) g->n_vertices);
+ gcc_assert (postorder[g->n_vertices - 1] == entry);
while (changed)
{
@@ -441,7 +440,7 @@ graphds_domtree (struct graph *g, int entry,
for (i = g->n_vertices - 2; i >= 0; i--)
{
- v = VEC_index (int, postorder, i);
+ v = postorder[i];
idom = -1;
for (e = g->vertices[v].pred; e; e = e->pred_next)
{
@@ -461,7 +460,7 @@ graphds_domtree (struct graph *g, int entry,
}
free (marks);
- VEC_free (int, heap, postorder);
+ postorder.release ();
for (i = 0; i < g->n_vertices; i++)
if (parent[i] != -1)
diff --git a/gcc/graphds.h b/gcc/graphds.h
index bc35479461f..a2afc29c406 100644
--- a/gcc/graphds.h
+++ b/gcc/graphds.h
@@ -55,7 +55,7 @@ void dump_graph (FILE *, struct graph *);
struct graph_edge *add_edge (struct graph *, int, int);
void identify_vertices (struct graph *, int, int);
int graphds_dfs (struct graph *, int *, int,
- VEC (int, heap) **, bool, bitmap);
+ vec<int> *, bool, bitmap);
int graphds_scc (struct graph *, bitmap);
void graphds_domtree (struct graph *, int, int *, int *, int *);
typedef void (*graphds_edge_callback) (struct graph *, struct graph_edge *);
diff --git a/gcc/graphite-blocking.c b/gcc/graphite-blocking.c
index a9ed9f96480..61e6c76af48 100644
--- a/gcc/graphite-blocking.c
+++ b/gcc/graphite-blocking.c
@@ -175,7 +175,7 @@ lst_do_strip_mine_loop (lst_p lst, int depth, int stride)
{
int res = 0;
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (lst), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (lst), i, l)
res += lst_do_strip_mine_loop (l, depth, stride);
return res;
@@ -217,7 +217,7 @@ lst_do_strip_mine (lst_p lst, int stride)
|| !LST_LOOP_P (lst))
return false;
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (lst), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (lst), i, l)
res += lst_do_strip_mine (l, stride);
depth = lst_depth (lst);
diff --git a/gcc/graphite-clast-to-gimple.c b/gcc/graphite-clast-to-gimple.c
index 90ca682f9c6..43cddd54030 100644
--- a/gcc/graphite-clast-to-gimple.c
+++ b/gcc/graphite-clast-to-gimple.c
@@ -287,7 +287,7 @@ eq_clast_name_indexes (const void *e1, const void *e2)
parameter in PARAMS. */
typedef struct ivs_params {
- VEC (tree, heap) *params, **newivs;
+ vec<tree> params, *newivs;
htab_t newivs_index, params_index;
sese region;
} *ivs_params_p;
@@ -300,19 +300,19 @@ clast_name_to_gcc (struct clast_name *name, ivs_params_p ip)
{
int index;
- if (ip->params && ip->params_index)
+ if (ip->params.exists () && ip->params_index)
{
index = clast_name_to_index (name, ip->params_index);
if (index >= 0)
- return VEC_index (tree, ip->params, index);
+ return ip->params[index];
}
- gcc_assert (*(ip->newivs) && ip->newivs_index);
+ gcc_assert (ip->newivs && ip->newivs_index);
index = clast_name_to_index (name, ip->newivs_index);
gcc_assert (index >= 0);
- return VEC_index (tree, *(ip->newivs), index);
+ return (*ip->newivs)[index];
}
/* Returns the maximal precision type for expressions TYPE1 and TYPE2. */
@@ -699,12 +699,12 @@ type_for_clast_name (struct clast_name *name, ivs_params_p ip, mpz_t bound_one,
{
bool found = false;
- if (ip->params && ip->params_index)
+ if (ip->params.exists () && ip->params_index)
found = clast_name_to_lb_ub (name, ip->params_index, bound_one, bound_two);
if (!found)
{
- gcc_assert (*(ip->newivs) && ip->newivs_index);
+ gcc_assert (ip->newivs && ip->newivs_index);
found = clast_name_to_lb_ub (name, ip->newivs_index, bound_one,
bound_two);
gcc_assert (found);
@@ -953,10 +953,10 @@ graphite_create_new_loop (edge entry_edge, struct clast_for *stmt,
mpz_init (up);
compute_bounds_for_loop (stmt, low, up);
save_clast_name_index (ip->newivs_index, stmt->iterator,
- VEC_length (tree, *(ip->newivs)), level, low, up);
+ (*ip->newivs).length (), level, low, up);
mpz_clear (low);
mpz_clear (up);
- VEC_safe_push (tree, heap, *(ip->newivs), iv);
+ (*ip->newivs).safe_push (iv);
return loop;
}
@@ -964,7 +964,7 @@ graphite_create_new_loop (edge entry_edge, struct clast_for *stmt,
induction variables of the loops around GBB in SESE. */
static void
-build_iv_mapping (VEC (tree, heap) *iv_map, struct clast_user_stmt *user_stmt,
+build_iv_mapping (vec<tree> iv_map, struct clast_user_stmt *user_stmt,
ivs_params_p ip)
{
struct clast_stmt *t;
@@ -985,7 +985,7 @@ build_iv_mapping (VEC (tree, heap) *iv_map, struct clast_user_stmt *user_stmt,
tree new_name = clast_to_gcc_expression (type, expr, ip);
loop_p old_loop = gbb_loop_at_index (gbb, ip->region, depth);
- VEC_replace (tree, iv_map, old_loop->num, new_name);
+ iv_map[old_loop->num] = new_name;
}
mpz_clear (bound_one);
@@ -1045,7 +1045,7 @@ find_pbb_via_hash (htab_t bb_pbb_mapping, basic_block bb)
scop_p
get_loop_body_pbbs (loop_p loop, htab_t bb_pbb_mapping,
- VEC (poly_bb_p, heap) **pbbs)
+ vec<poly_bb_p> *pbbs)
{
unsigned i;
basic_block *bbs = get_loop_body_in_dom_order (loop);
@@ -1059,7 +1059,7 @@ get_loop_body_pbbs (loop_p loop, htab_t bb_pbb_mapping,
continue;
scop = PBB_SCOP (pbb);
- VEC_safe_push (poly_bb_p, heap, *pbbs, pbb);
+ (*pbbs).safe_push (pbb);
}
free (bbs);
@@ -1080,20 +1080,20 @@ translate_clast_user (struct clast_user_stmt *stmt, edge next_e,
basic_block new_bb;
poly_bb_p pbb = (poly_bb_p) stmt->statement->usr;
gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
- VEC (tree, heap) *iv_map;
+ vec<tree> iv_map;
if (GBB_BB (gbb) == ENTRY_BLOCK_PTR)
return next_e;
nb_loops = number_of_loops ();
- iv_map = VEC_alloc (tree, heap, nb_loops);
+ iv_map.create (nb_loops);
for (i = 0; i < nb_loops; i++)
- VEC_quick_push (tree, iv_map, NULL_TREE);
+ iv_map.quick_push (NULL_TREE);
build_iv_mapping (iv_map, stmt, ip);
next_e = copy_bb_and_scalar_dependences (GBB_BB (gbb), ip->region,
next_e, iv_map, &gloog_error);
- VEC_free (tree, heap, iv_map);
+ iv_map.release ();
new_bb = next_e->src;
mark_bb_with_pbb (pbb, new_bb, bb_pbb_mapping);
@@ -1226,9 +1226,9 @@ translate_clast_assignment (struct clast_assignment *stmt, edge next_e,
}
save_clast_name_index (ip->newivs_index, stmt->LHS,
- VEC_length (tree, *(ip->newivs)), level,
+ (*ip->newivs).length (), level,
bound_one, bound_two);
- VEC_safe_push (tree, heap, *(ip->newivs), new_name);
+ (*ip->newivs).safe_push (new_name);
mpz_clear (bound_one);
mpz_clear (bound_two);
@@ -1309,7 +1309,7 @@ add_names_to_union_domain (scop_p scop, CloogUnionDomain *union_domain,
sese region = SCOP_REGION (scop);
int i;
int nb_iterators = scop_max_loop_depth (scop);
- int nb_parameters = VEC_length (tree, SESE_PARAMS (region));
+ int nb_parameters = SESE_PARAMS (region).length ();
mpz_t bound_one, bound_two;
mpz_init (bound_one);
@@ -1317,7 +1317,7 @@ add_names_to_union_domain (scop_p scop, CloogUnionDomain *union_domain,
for (i = 0; i < nb_parameters; i++)
{
- tree param = VEC_index (tree, SESE_PARAMS (region), i);
+ tree param = SESE_PARAMS (region)[i];
const char *name = get_name (param);
int len;
char *parameter;
@@ -1439,7 +1439,7 @@ build_cloog_union_domain (scop_p scop, int nb_scattering_dims)
CloogUnionDomain *union_domain =
cloog_union_domain_alloc (scop_nb_params (scop));
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
{
CloogDomain *domain;
CloogScattering *scattering;
@@ -1536,7 +1536,7 @@ int get_max_scattering_dimensions (scop_p scop)
poly_bb_p pbb;
int scattering_dims = 0;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
{
int pbb_scatt_dims = isl_map_dim (pbb->transformed, isl_dim_out);
if (pbb_scatt_dims > scattering_dims)
@@ -1631,7 +1631,8 @@ debug_generated_program (scop_p scop)
bool
gloog (scop_p scop, htab_t bb_pbb_mapping)
{
- VEC (tree, heap) *newivs = VEC_alloc (tree, heap, 10);
+ vec<tree> newivs;
+ newivs.create (10);
loop_p context_loop;
sese region = SCOP_REGION (scop);
ifsese if_region = NULL;
@@ -1691,7 +1692,7 @@ gloog (scop_p scop, htab_t bb_pbb_mapping)
htab_delete (newivs_index);
htab_delete (params_index);
- VEC_free (tree, heap, newivs);
+ newivs.release ();
cloog_clast_free (clast);
timevar_pop (TV_GRAPHITE_CODE_GEN);
diff --git a/gcc/graphite-dependences.c b/gcc/graphite-dependences.c
index 3d7a96f5363..947eb40199b 100644
--- a/gcc/graphite-dependences.c
+++ b/gcc/graphite-dependences.c
@@ -71,7 +71,7 @@ add_pdr_constraints (poly_dr_p pdr, poly_bb_p pbb)
/* Returns all the memory reads in SCOP. */
static isl_union_map *
-scop_get_reads (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
+scop_get_reads (scop_p scop, vec<poly_bb_p> pbbs)
{
int i, j;
poly_bb_p pbb;
@@ -79,9 +79,9 @@ scop_get_reads (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
isl_space *space = isl_set_get_space (scop->context);
isl_union_map *res = isl_union_map_empty (space);
- FOR_EACH_VEC_ELT (poly_bb_p, pbbs, i, pbb)
+ FOR_EACH_VEC_ELT (pbbs, i, pbb)
{
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), j, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), j, pdr)
if (pdr_read_p (pdr))
res = isl_union_map_add_map (res, add_pdr_constraints (pdr, pbb));
}
@@ -92,7 +92,7 @@ scop_get_reads (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
/* Returns all the memory must writes in SCOP. */
static isl_union_map *
-scop_get_must_writes (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
+scop_get_must_writes (scop_p scop, vec<poly_bb_p> pbbs)
{
int i, j;
poly_bb_p pbb;
@@ -100,9 +100,9 @@ scop_get_must_writes (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
isl_space *space = isl_set_get_space (scop->context);
isl_union_map *res = isl_union_map_empty (space);
- FOR_EACH_VEC_ELT (poly_bb_p, pbbs, i, pbb)
+ FOR_EACH_VEC_ELT (pbbs, i, pbb)
{
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), j, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), j, pdr)
if (pdr_write_p (pdr))
res = isl_union_map_add_map (res, add_pdr_constraints (pdr, pbb));
}
@@ -113,7 +113,7 @@ scop_get_must_writes (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
/* Returns all the memory may writes in SCOP. */
static isl_union_map *
-scop_get_may_writes (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
+scop_get_may_writes (scop_p scop, vec<poly_bb_p> pbbs)
{
int i, j;
poly_bb_p pbb;
@@ -121,9 +121,9 @@ scop_get_may_writes (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
isl_space *space = isl_set_get_space (scop->context);
isl_union_map *res = isl_union_map_empty (space);
- FOR_EACH_VEC_ELT (poly_bb_p, pbbs, i, pbb)
+ FOR_EACH_VEC_ELT (pbbs, i, pbb)
{
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), j, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), j, pdr)
if (pdr_may_write_p (pdr))
res = isl_union_map_add_map (res, add_pdr_constraints (pdr, pbb));
}
@@ -134,14 +134,14 @@ scop_get_may_writes (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
/* Returns all the original schedules in SCOP. */
static isl_union_map *
-scop_get_original_schedule (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
+scop_get_original_schedule (scop_p scop, vec<poly_bb_p> pbbs)
{
int i;
poly_bb_p pbb;
isl_space *space = isl_set_get_space (scop->context);
isl_union_map *res = isl_union_map_empty (space);
- FOR_EACH_VEC_ELT (poly_bb_p, pbbs, i, pbb)
+ FOR_EACH_VEC_ELT (pbbs, i, pbb)
{
res = isl_union_map_add_map
(res, constrain_domain (isl_map_copy (pbb->schedule),
@@ -154,14 +154,14 @@ scop_get_original_schedule (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
/* Returns all the transformed schedules in SCOP. */
static isl_union_map *
-scop_get_transformed_schedule (scop_p scop, VEC (poly_bb_p, heap) *pbbs)
+scop_get_transformed_schedule (scop_p scop, vec<poly_bb_p> pbbs)
{
int i;
poly_bb_p pbb;
isl_space *space = isl_set_get_space (scop->context);
isl_union_map *res = isl_union_map_empty (space);
- FOR_EACH_VEC_ELT (poly_bb_p, pbbs, i, pbb)
+ FOR_EACH_VEC_ELT (pbbs, i, pbb)
{
res = isl_union_map_add_map
(res, constrain_domain (isl_map_copy (pbb->transformed),
@@ -334,7 +334,7 @@ carries_deps (__isl_keep isl_union_map *schedule,
static void
subtract_commutative_associative_deps (scop_p scop,
- VEC (poly_bb_p, heap) *pbbs,
+ vec<poly_bb_p> pbbs,
isl_union_map *original,
isl_union_map **must_raw,
isl_union_map **may_raw,
@@ -354,7 +354,7 @@ subtract_commutative_associative_deps (scop_p scop,
poly_dr_p pdr;
isl_space *space = isl_set_get_space (scop->context);
- FOR_EACH_VEC_ELT (poly_bb_p, pbbs, i, pbb)
+ FOR_EACH_VEC_ELT (pbbs, i, pbb)
if (PBB_IS_REDUCTION (pbb))
{
int res;
@@ -376,16 +376,16 @@ subtract_commutative_associative_deps (scop_p scop,
isl_union_map *x_must_waw_no_source;
isl_union_map *x_may_waw_no_source;
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), j, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), j, pdr)
if (pdr_read_p (pdr))
r = isl_union_map_add_map (r, add_pdr_constraints (pdr, pbb));
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), j, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), j, pdr)
if (pdr_write_p (pdr))
must_w = isl_union_map_add_map (must_w,
add_pdr_constraints (pdr, pbb));
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), j, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), j, pdr)
if (pdr_may_write_p (pdr))
may_w = isl_union_map_add_map (may_w,
add_pdr_constraints (pdr, pbb));
@@ -444,7 +444,7 @@ subtract_commutative_associative_deps (scop_p scop,
writes in PBBS. */
void
-compute_deps (scop_p scop, VEC (poly_bb_p, heap) *pbbs,
+compute_deps (scop_p scop, vec<poly_bb_p> pbbs,
isl_union_map **must_raw,
isl_union_map **may_raw,
isl_union_map **must_raw_no_source,
@@ -543,7 +543,7 @@ graphite_legal_transform (scop_p scop)
the body of the loop. */
static bool
-loop_level_carries_dependences (scop_p scop, VEC (poly_bb_p, heap) *body,
+loop_level_carries_dependences (scop_p scop, vec<poly_bb_p> body,
int depth)
{
isl_union_map *transform = scop_get_transformed_schedule (scop, body);
@@ -583,12 +583,13 @@ loop_is_parallel_p (loop_p loop, htab_t bb_pbb_mapping, int depth)
{
bool dependences;
scop_p scop;
- VEC (poly_bb_p, heap) *body = VEC_alloc (poly_bb_p, heap, 3);
+ vec<poly_bb_p> body;
+ body.create (3);
timevar_push (TV_GRAPHITE_DATA_DEPS);
scop = get_loop_body_pbbs (loop, bb_pbb_mapping, &body);
dependences = loop_level_carries_dependences (scop, body, depth);
- VEC_free (poly_bb_p, heap, body);
+ body.release ();
timevar_pop (TV_GRAPHITE_DATA_DEPS);
return !dependences;
diff --git a/gcc/graphite-interchange.c b/gcc/graphite-interchange.c
index 0a8a3bae58a..eb1a8b5bc29 100644
--- a/gcc/graphite-interchange.c
+++ b/gcc/graphite-interchange.c
@@ -266,11 +266,11 @@ memory_strides_in_loop_1 (lst_p loop, graphite_dim_t depth, mpz_t strides)
mpz_init (s);
mpz_init (n);
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (loop), j, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (loop), j, l)
if (LST_LOOP_P (l))
memory_strides_in_loop_1 (l, depth, strides);
else
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (LST_PBB (l)), i, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (LST_PBB (l)), i, pdr)
{
pdr_stride_in_loop (s, depth, pdr);
mpz_set_si (n, PDR_NB_REFS (pdr));
@@ -441,7 +441,7 @@ lst_apply_interchange (lst_p lst, int depth1, int depth2)
int i;
lst_p l;
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (lst), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (lst), i, l)
lst_apply_interchange (l, depth1, depth2);
}
else
@@ -460,8 +460,8 @@ lst_perfectly_nested_p (lst_p loop1, lst_p loop2)
if (!LST_LOOP_P (loop1))
return false;
- return VEC_length (lst_p, LST_SEQ (loop1)) == 1
- && lst_perfectly_nested_p (VEC_index (lst_p, LST_SEQ (loop1), 0), loop2);
+ return LST_SEQ (loop1).length () == 1
+ && lst_perfectly_nested_p (LST_SEQ (loop1)[0], loop2);
}
/* Transform the loop nest between LOOP1 and LOOP2 into a perfect
@@ -581,13 +581,13 @@ lst_interchange_select_inner (scop_p scop, lst_p outer_father, int outer,
gcc_assert (outer_father
&& LST_LOOP_P (outer_father)
- && LST_LOOP_P (VEC_index (lst_p, LST_SEQ (outer_father), outer))
+ && LST_LOOP_P (LST_SEQ (outer_father)[outer])
&& inner_father
&& LST_LOOP_P (inner_father));
- loop1 = VEC_index (lst_p, LST_SEQ (outer_father), outer);
+ loop1 = LST_SEQ (outer_father)[outer];
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (inner_father), inner, loop2)
+ FOR_EACH_VEC_ELT (LST_SEQ (inner_father), inner, loop2)
if (LST_LOOP_P (loop2)
&& (lst_try_interchange_loops (scop, loop1, loop2)
|| lst_interchange_select_inner (scop, outer_father, outer, loop2)))
@@ -618,12 +618,12 @@ lst_interchange_select_outer (scop_p scop, lst_p loop, int outer)
while (lst_interchange_select_inner (scop, father, outer, loop))
{
res++;
- loop = VEC_index (lst_p, LST_SEQ (father), outer);
+ loop = LST_SEQ (father)[outer];
}
}
if (LST_LOOP_P (loop))
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (loop), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (loop), i, l)
if (LST_LOOP_P (l))
res += lst_interchange_select_outer (scop, l, i);
diff --git a/gcc/graphite-optimize-isl.c b/gcc/graphite-optimize-isl.c
index 9c17bed6118..5e15a7c148f 100644
--- a/gcc/graphite-optimize-isl.c
+++ b/gcc/graphite-optimize-isl.c
@@ -51,7 +51,7 @@ scop_get_domains (scop_p scop ATTRIBUTE_UNUSED)
isl_space *space = isl_set_get_space (scop->context);
isl_union_set *res = isl_union_set_empty (space);
- FOR_EACH_VEC_ELT (poly_bb_p, scop->bbs, i, pbb)
+ FOR_EACH_VEC_ELT (scop->bbs, i, pbb)
res = isl_union_set_add_set (res, isl_set_copy (pbb->domain));
return res;
@@ -414,7 +414,7 @@ apply_schedule_map_to_scop (scop_p scop, isl_union_map *schedule_map)
int i;
poly_bb_p pbb;
- FOR_EACH_VEC_ELT (poly_bb_p, scop->bbs, i, pbb)
+ FOR_EACH_VEC_ELT (scop->bbs, i, pbb)
{
isl_set *domain = isl_set_copy (pbb->domain);
isl_union_map *stmtBand;
diff --git a/gcc/graphite-poly.c b/gcc/graphite-poly.c
index b66dd507cbf..21bab940f3e 100644
--- a/gcc/graphite-poly.c
+++ b/gcc/graphite-poly.c
@@ -72,7 +72,7 @@ scop_max_loop_depth (scop_p scop)
poly_bb_p pbb;
int max_nb_loops = 0;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
{
int nb_loops = pbb_dim_iter_domain (pbb);
if (max_nb_loops < nb_loops)
@@ -168,7 +168,7 @@ print_scattering_functions (FILE *file, scop_p scop, int verbosity)
int i;
poly_bb_p pbb;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
print_scattering_function (file, pbb, verbosity);
}
@@ -181,7 +181,7 @@ print_iteration_domains (FILE *file, scop_p scop, int verbosity)
int i;
poly_bb_p pbb;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
print_iteration_domain (file, pbb, verbosity);
}
@@ -279,7 +279,7 @@ new_poly_dr (poly_bb_p pbb, int dr_base_object_set,
PDR_TYPE (pdr) = type;
PDR_CDR (pdr) = cdr;
PDR_NB_SUBSCRIPTS (pdr) = nb_subscripts;
- VEC_safe_push (poly_dr_p, heap, PBB_DRS (pbb), pdr);
+ PBB_DRS (pbb).safe_push (pdr);
}
/* Free polyhedral data reference PDR. */
@@ -308,7 +308,7 @@ new_poly_bb (scop_p scop, void *black_box)
PBB_TRANSFORMED (pbb) = NULL;
PBB_SAVED (pbb) = NULL;
PBB_ORIGINAL (pbb) = NULL;
- PBB_DRS (pbb) = VEC_alloc (poly_dr_p, heap, 3);
+ PBB_DRS (pbb).create (3);
PBB_IS_REDUCTION (pbb) = false;
GBB_PBB ((gimple_bb_p) black_box) = pbb;
@@ -328,11 +328,11 @@ free_poly_bb (poly_bb_p pbb)
isl_map_free (pbb->transformed);
isl_map_free (pbb->saved);
- if (PBB_DRS (pbb))
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), i, pdr)
+ if (PBB_DRS (pbb).exists ())
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), i, pdr)
free_poly_dr (pdr);
- VEC_free (poly_dr_p, heap, PBB_DRS (pbb));
+ PBB_DRS (pbb).release ();
XDELETE (pbb);
}
@@ -433,7 +433,7 @@ new_scop (void *region)
scop->must_waw_no_source = NULL;
scop->may_waw_no_source = NULL;
scop_set_region (scop, region);
- SCOP_BBS (scop) = VEC_alloc (poly_bb_p, heap, 3);
+ SCOP_BBS (scop).create (3);
SCOP_ORIGINAL_SCHEDULE (scop) = NULL;
SCOP_TRANSFORMED_SCHEDULE (scop) = NULL;
SCOP_SAVED_SCHEDULE (scop) = NULL;
@@ -450,10 +450,10 @@ free_scop (scop_p scop)
int i;
poly_bb_p pbb;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
free_poly_bb (pbb);
- VEC_free (poly_bb_p, heap, SCOP_BBS (scop));
+ SCOP_BBS (scop).release ();
isl_set_free (scop->context);
isl_union_map_free (scop->must_raw);
@@ -521,18 +521,18 @@ dump_gbb_cases (FILE *file, gimple_bb_p gbb)
{
int i;
gimple stmt;
- VEC (gimple, heap) *cases;
+ vec<gimple> cases;
if (!gbb)
return;
cases = GBB_CONDITION_CASES (gbb);
- if (VEC_empty (gimple, cases))
+ if (cases.is_empty ())
return;
fprintf (file, "# cases bb_%d (\n", GBB_BB (gbb)->index);
- FOR_EACH_VEC_ELT (gimple, cases, i, stmt)
+ FOR_EACH_VEC_ELT (cases, i, stmt)
{
fprintf (file, "# ");
print_gimple_stmt (file, stmt, 0, 0);
@@ -548,18 +548,18 @@ dump_gbb_conditions (FILE *file, gimple_bb_p gbb)
{
int i;
gimple stmt;
- VEC (gimple, heap) *conditions;
+ vec<gimple> conditions;
if (!gbb)
return;
conditions = GBB_CONDITIONS (gbb);
- if (VEC_empty (gimple, conditions))
+ if (conditions.is_empty ())
return;
fprintf (file, "# conditions bb_%d (\n", GBB_BB (gbb)->index);
- FOR_EACH_VEC_ELT (gimple, conditions, i, stmt)
+ FOR_EACH_VEC_ELT (conditions, i, stmt)
{
fprintf (file, "# ");
print_gimple_stmt (file, stmt, 0, 0);
@@ -579,7 +579,7 @@ print_pdrs (FILE *file, poly_bb_p pbb, int verbosity)
int nb_reads = 0;
int nb_writes = 0;
- if (VEC_length (poly_dr_p, PBB_DRS (pbb)) == 0)
+ if (PBB_DRS (pbb).length () == 0)
{
if (verbosity > 0)
fprintf (file, "# Access informations are not provided\n");\
@@ -594,7 +594,7 @@ print_pdrs (FILE *file, poly_bb_p pbb, int verbosity)
fprintf (file, "# Access informations are provided\n");
fprintf (file, "1\n");
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), i, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), i, pdr)
if (PDR_TYPE (pdr) == PDR_READ)
nb_reads++;
else
@@ -607,7 +607,7 @@ print_pdrs (FILE *file, poly_bb_p pbb, int verbosity)
fprintf (file, "# Read access informations\n");
fprintf (file, "%d\n", nb_reads);
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), i, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), i, pdr)
if (PDR_TYPE (pdr) == PDR_READ)
print_pdr (file, pdr, verbosity);
@@ -621,7 +621,7 @@ print_pdrs (FILE *file, poly_bb_p pbb, int verbosity)
fprintf (file, "# Write access informations\n");
fprintf (file, "%d\n", nb_writes);
- FOR_EACH_VEC_ELT (poly_dr_p, PBB_DRS (pbb), i, pdr)
+ FOR_EACH_VEC_ELT (PBB_DRS (pbb), i, pdr)
if (PDR_TYPE (pdr) != PDR_READ)
print_pdr (file, pdr, verbosity);
@@ -713,7 +713,7 @@ print_scop_params (FILE *file, scop_p scop, int verbosity)
if (verbosity > 1)
fprintf (file, "# parameters (\n");
- if (VEC_length (tree, SESE_PARAMS (SCOP_REGION (scop))))
+ if (SESE_PARAMS (SCOP_REGION (scop)).length ())
{
if (verbosity > 0)
fprintf (file, "# Parameter names are provided\n");
@@ -730,7 +730,7 @@ print_scop_params (FILE *file, scop_p scop, int verbosity)
fprintf (file, "0\n");
}
- FOR_EACH_VEC_ELT (tree, SESE_PARAMS (SCOP_REGION (scop)), i, t)
+ FOR_EACH_VEC_ELT (SESE_PARAMS (SCOP_REGION (scop)), i, t)
{
print_generic_expr (file, t, 0);
fprintf (file, " ");
@@ -815,9 +815,9 @@ print_scop (FILE *file, scop_p scop, int verbosity)
if (verbosity > 0)
fprintf (file, "# Number of statements\n");
- fprintf (file, "%d\n",VEC_length (poly_bb_p, SCOP_BBS (scop)));
+ fprintf (file, "%d\n", SCOP_BBS (scop).length ());
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
print_pbb (file, pbb, verbosity);
if (verbosity > 1)
@@ -854,9 +854,9 @@ print_cloog (FILE *file, scop_p scop, int verbosity)
if (verbosity > 0)
fprintf (file, "# Number of statements\n");
- fprintf (file, "%d\n", VEC_length (poly_bb_p, SCOP_BBS (scop)));
+ fprintf (file, "%d\n", SCOP_BBS (scop).length ());
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
{
if (verbosity > 1)
fprintf (file, "# pbb_%d (\n", pbb_index (pbb));
@@ -882,9 +882,9 @@ print_cloog (FILE *file, scop_p scop, int verbosity)
if (verbosity > 0)
fprintf (file, "# Number of scattering functions\n");
- fprintf (file, "%d\n", VEC_length (poly_bb_p, SCOP_BBS (scop)));
+ fprintf (file, "%d\n", SCOP_BBS (scop).length ());
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
{
if (!(pbb->transformed || pbb->schedule))
continue;
@@ -1059,12 +1059,13 @@ pbb_number_of_iterations_at_time (poly_bb_p pbb,
/* Translates LOOP to LST. */
static lst_p
-loop_to_lst (loop_p loop, VEC (poly_bb_p, heap) *bbs, int *i)
+loop_to_lst (loop_p loop, vec<poly_bb_p> bbs, int *i)
{
poly_bb_p pbb;
- VEC (lst_p, heap) *seq = VEC_alloc (lst_p, heap, 5);
+ vec<lst_p> seq;
+ seq.create (5);
- for (; VEC_iterate (poly_bb_p, bbs, *i, pbb); (*i)++)
+ for (; bbs.iterate (*i, &pbb); (*i)++)
{
lst_p stmt;
basic_block bb = GBB_BB (PBB_BLACK_BOX (pbb));
@@ -1086,7 +1087,7 @@ loop_to_lst (loop_p loop, VEC (poly_bb_p, heap) *bbs, int *i)
return new_lst_loop (seq);
}
- VEC_safe_push (lst_p, heap, seq, stmt);
+ seq.safe_push (stmt);
}
return new_lst_loop (seq);
@@ -1099,13 +1100,14 @@ void
scop_to_lst (scop_p scop)
{
lst_p res;
- int i, n = VEC_length (poly_bb_p, SCOP_BBS (scop));
- VEC (lst_p, heap) *seq = VEC_alloc (lst_p, heap, 5);
+ int i, n = SCOP_BBS (scop).length ();
+ vec<lst_p> seq;
+ seq.create (5);
sese region = SCOP_REGION (scop);
for (i = 0; i < n; i++)
{
- poly_bb_p pbb = VEC_index (poly_bb_p, SCOP_BBS (scop), i);
+ poly_bb_p pbb = SCOP_BBS (scop)[i];
loop_p loop = outermost_loop_in_sese (region, GBB_BB (PBB_BLACK_BOX (pbb)));
if (loop_in_sese_p (loop, region))
@@ -1113,7 +1115,7 @@ scop_to_lst (scop_p scop)
else
res = new_lst_stmt (pbb);
- VEC_safe_push (lst_p, heap, seq, res);
+ seq.safe_push (res);
}
res = new_lst_loop (seq);
@@ -1155,7 +1157,7 @@ print_lst (FILE *file, lst_p lst, int indent)
else
fprintf (file, "#(root");
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (lst), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (lst), i, l)
print_lst (file, l, indent + 2);
fprintf (file, ")");
@@ -1196,7 +1198,7 @@ dot_lst_1 (FILE *file, lst_p lst)
lst_depth (lst),
lst_dewey_number (lst));
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (lst), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (lst), i, l)
dot_lst_1 (file, l);
}
@@ -1277,14 +1279,14 @@ reverse_loop_at_level (poly_bb_p pbb, int depth)
/* Reverse the loop at level DEPTH for all the PBBS. */
isl_union_map *
-reverse_loop_for_pbbs (scop_p scop, VEC (poly_bb_p, heap) *pbbs, int depth)
+reverse_loop_for_pbbs (scop_p scop, vec<poly_bb_p> pbbs, int depth)
{
poly_bb_p pbb;
int i;
isl_space *space = isl_space_from_domain (isl_set_get_space (scop->context));
isl_union_map *res = isl_union_map_empty (space);
- for (i = 0; VEC_iterate (poly_bb_p, pbbs, i, pbb); i++)
+ for (i = 0; pbbs.iterate (i, &pbb); i++)
res = isl_union_map_add_map (res, reverse_loop_at_level (pbb, depth));
return res;
diff --git a/gcc/graphite-poly.h b/gcc/graphite-poly.h
index de1f06ff639..85980eb2e61 100644
--- a/gcc/graphite-poly.h
+++ b/gcc/graphite-poly.h
@@ -23,16 +23,10 @@ along with GCC; see the file COPYING3. If not see
#define GCC_GRAPHITE_POLY_H
typedef struct poly_dr *poly_dr_p;
-DEF_VEC_P(poly_dr_p);
-DEF_VEC_ALLOC_P (poly_dr_p, heap);
typedef struct poly_bb *poly_bb_p;
-DEF_VEC_P(poly_bb_p);
-DEF_VEC_ALLOC_P (poly_bb_p, heap);
typedef struct scop *scop_p;
-DEF_VEC_P(scop_p);
-DEF_VEC_ALLOC_P (scop_p, heap);
typedef unsigned graphite_dim_t;
@@ -341,7 +335,7 @@ struct poly_bb
isl_set *domain;
/* The data references we access. */
- VEC (poly_dr_p, heap) *drs;
+ vec<poly_dr_p> drs;
/* The original scattering. */
poly_scattering_p _original;
@@ -422,7 +416,7 @@ number_of_write_pdrs (poly_bb_p pbb)
int i;
poly_dr_p pdr;
- for (i = 0; VEC_iterate (poly_dr_p, PBB_DRS (pbb), i, pdr); i++)
+ for (i = 0; PBB_DRS (pbb).iterate (i, &pdr); i++)
if (PDR_TYPE (pdr) == PDR_WRITE)
res++;
@@ -672,8 +666,6 @@ psct_add_local_variable (poly_bb_p pbb ATTRIBUTE_UNUSED)
}
typedef struct lst *lst_p;
-DEF_VEC_P(lst_p);
-DEF_VEC_ALLOC_P (lst_p, heap);
/* Loops and Statements Tree. */
struct lst {
@@ -691,7 +683,7 @@ struct lst {
contain a pointer to their polyhedral representation PBB. */
union {
poly_bb_p pbb;
- VEC (lst_p, heap) *seq;
+ vec<lst_p> seq;
} node;
};
@@ -709,7 +701,7 @@ void dot_lst (lst_p);
/* Creates a new LST loop with SEQ. */
static inline lst_p
-new_lst_loop (VEC (lst_p, heap) *seq)
+new_lst_loop (vec<lst_p> seq)
{
lst_p lst = XNEW (struct lst);
int i;
@@ -721,7 +713,7 @@ new_lst_loop (VEC (lst_p, heap) *seq)
mpz_init (LST_LOOP_MEMORY_STRIDES (lst));
mpz_set_si (LST_LOOP_MEMORY_STRIDES (lst), -1);
- for (i = 0; VEC_iterate (lst_p, seq, i, l); i++)
+ for (i = 0; seq.iterate (i, &l); i++)
LST_LOOP_FATHER (l) = lst;
return lst;
@@ -753,11 +745,11 @@ free_lst (lst_p lst)
int i;
lst_p l;
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (lst), i, l); i++)
+ for (i = 0; LST_SEQ (lst).iterate (i, &l); i++)
free_lst (l);
mpz_clear (LST_LOOP_MEMORY_STRIDES (lst));
- VEC_free (lst_p, heap, LST_SEQ (lst));
+ LST_SEQ (lst).release ();
}
free (lst);
@@ -775,10 +767,11 @@ copy_lst (lst_p lst)
{
int i;
lst_p l;
- VEC (lst_p, heap) *seq = VEC_alloc (lst_p, heap, 5);
+ vec<lst_p> seq;
+ seq.create (5);
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (lst), i, l); i++)
- VEC_safe_push (lst_p, heap, seq, copy_lst (l));
+ for (i = 0; LST_SEQ (lst).iterate (i, &l); i++)
+ seq.safe_push (copy_lst (l));
return new_lst_loop (seq);
}
@@ -791,13 +784,14 @@ copy_lst (lst_p lst)
static inline void
lst_add_loop_under_loop (lst_p lst)
{
- VEC (lst_p, heap) *seq = VEC_alloc (lst_p, heap, 1);
+ vec<lst_p> seq;
+ seq.create (1);
lst_p l = new_lst_loop (LST_SEQ (lst));
gcc_assert (LST_LOOP_P (lst));
LST_LOOP_FATHER (l) = lst;
- VEC_quick_push (lst_p, seq, l);
+ seq.quick_push (l);
LST_SEQ (lst) = seq;
}
@@ -832,7 +826,7 @@ lst_dewey_number (lst_p lst)
if (!LST_LOOP_FATHER (lst))
return 0;
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (LST_LOOP_FATHER (lst)), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (LST_LOOP_FATHER (lst)), i, l)
if (l == lst)
return i;
@@ -869,7 +863,7 @@ lst_pred (lst_p lst)
return NULL;
father = LST_LOOP_FATHER (lst);
- return VEC_index (lst_p, LST_SEQ (father), dewey - 1);
+ return LST_SEQ (father)[dewey - 1];
}
/* Returns the successor of LST in the sequence of its loop father.
@@ -887,10 +881,10 @@ lst_succ (lst_p lst)
dewey = lst_dewey_number (lst);
father = LST_LOOP_FATHER (lst);
- if (VEC_length (lst_p, LST_SEQ (father)) == (unsigned) dewey + 1)
+ if (LST_SEQ (father).length () == (unsigned) dewey + 1)
return NULL;
- return VEC_index (lst_p, LST_SEQ (father), dewey + 1);
+ return LST_SEQ (father)[dewey + 1];
}
@@ -908,7 +902,7 @@ lst_find_pbb (lst_p lst, poly_bb_p pbb)
if (!LST_LOOP_P (lst))
return (pbb == LST_PBB (lst)) ? lst : NULL;
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (lst), i, l); i++)
+ for (i = 0; LST_SEQ (lst).iterate (i, &l); i++)
{
lst_p res = lst_find_pbb (l, pbb);
if (res)
@@ -948,7 +942,7 @@ lst_find_first_pbb (lst_p lst)
if (!LST_LOOP_P (lst))
return lst;
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (lst), i, l); i++)
+ for (i = 0; LST_SEQ (lst).iterate (i, &l); i++)
{
lst_p res = lst_find_first_pbb (l);
if (res)
@@ -981,7 +975,7 @@ lst_find_last_pbb (lst_p lst)
if (!LST_LOOP_P (lst))
return lst;
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (lst), i, l); i++)
+ for (i = 0; LST_SEQ (lst).iterate (i, &l); i++)
{
lst_p last = lst_find_last_pbb (l);
@@ -1023,14 +1017,14 @@ static inline lst_p
lst_create_nest (int nb_loops, lst_p lst)
{
lst_p res, loop;
- VEC (lst_p, heap) *seq;
+ vec<lst_p> seq;
if (nb_loops == 0)
return lst;
- seq = VEC_alloc (lst_p, heap, 1);
+ seq.create (1);
loop = lst_create_nest (nb_loops - 1, lst);
- VEC_quick_push (lst_p, seq, loop);
+ seq.quick_push (loop);
res = new_lst_loop (seq);
LST_LOOP_FATHER (loop) = res;
@@ -1047,7 +1041,7 @@ lst_remove_from_sequence (lst_p lst)
gcc_assert (lst && father && dewey >= 0);
- VEC_ordered_remove (lst_p, LST_SEQ (father), dewey);
+ LST_SEQ (father).ordered_remove (dewey);
LST_LOOP_FATHER (lst) = NULL;
}
@@ -1061,12 +1055,12 @@ lst_remove_loop_and_inline_stmts_in_loop_father (lst_p lst)
gcc_assert (lst && father && dewey >= 0);
- VEC_ordered_remove (lst_p, LST_SEQ (father), dewey);
+ LST_SEQ (father).ordered_remove (dewey);
LST_LOOP_FATHER (lst) = NULL;
- FOR_EACH_VEC_ELT (lst_p, LST_SEQ (lst), i, l)
+ FOR_EACH_VEC_ELT (LST_SEQ (lst), i, l)
{
- VEC_safe_insert (lst_p, heap, LST_SEQ (father), dewey + i, l);
+ LST_SEQ (father).safe_insert (dewey + i, l);
LST_LOOP_FATHER (l) = father;
}
}
@@ -1118,7 +1112,7 @@ lst_update_scattering_under (lst_p lst, int level, int dewey)
gcc_assert (lst && level >= 0 && dewey >= 0);
if (LST_LOOP_P (lst))
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (lst), i, l); i++)
+ for (i = 0; LST_SEQ (lst).iterate (i, &l); i++)
lst_update_scattering_under (l, level, dewey);
else
pbb_update_scattering (LST_PBB (lst), level, dewey);
@@ -1144,12 +1138,12 @@ lst_update_scattering (lst_p lst)
gcc_assert (lst && father && dewey >= 0 && level >= 0);
- for (i = dewey; VEC_iterate (lst_p, LST_SEQ (father), i, l); i++)
+ for (i = dewey; LST_SEQ (father).iterate (i, &l); i++)
lst_update_scattering_under (l, level, i);
}
if (LST_LOOP_P (lst))
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (lst), i, l); i++)
+ for (i = 0; LST_SEQ (lst).iterate (i, &l); i++)
lst_update_scattering (l);
}
@@ -1171,8 +1165,7 @@ lst_insert_in_sequence (lst_p lst1, lst_p lst2, bool before)
gcc_assert (lst2 && father && dewey >= 0);
- VEC_safe_insert (lst_p, heap, LST_SEQ (father), before ? dewey : dewey + 1,
- lst1);
+ LST_SEQ (father).safe_insert (before ? dewey : dewey + 1, lst1);
LST_LOOP_FATHER (lst1) = father;
}
@@ -1190,7 +1183,7 @@ lst_replace (lst_p lst1, lst_p lst2)
father = LST_LOOP_FATHER (lst1);
dewey = lst_dewey_number (lst1);
LST_LOOP_FATHER (lst2) = father;
- VEC_replace (lst_p, LST_SEQ (father), dewey, lst2);
+ LST_SEQ (father)[dewey] = lst2;
}
/* Returns a copy of ROOT where LST has been replaced by a copy of the
@@ -1201,7 +1194,7 @@ lst_substitute_3 (lst_p root, lst_p lst, lst_p a, lst_p b, lst_p c)
{
int i;
lst_p l;
- VEC (lst_p, heap) *seq;
+ vec<lst_p> seq;
if (!root)
return NULL;
@@ -1211,19 +1204,19 @@ lst_substitute_3 (lst_p root, lst_p lst, lst_p a, lst_p b, lst_p c)
if (!LST_LOOP_P (root))
return new_lst_stmt (LST_PBB (root));
- seq = VEC_alloc (lst_p, heap, 5);
+ seq.create (5);
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (root), i, l); i++)
+ for (i = 0; LST_SEQ (root).iterate (i, &l); i++)
if (l != lst)
- VEC_safe_push (lst_p, heap, seq, lst_substitute_3 (l, lst, a, b, c));
+ seq.safe_push (lst_substitute_3 (l, lst, a, b, c));
else
{
if (!lst_empty_p (a))
- VEC_safe_push (lst_p, heap, seq, copy_lst (a));
+ seq.safe_push (copy_lst (a));
if (!lst_empty_p (b))
- VEC_safe_push (lst_p, heap, seq, copy_lst (b));
+ seq.safe_push (copy_lst (b));
if (!lst_empty_p (c))
- VEC_safe_push (lst_p, heap, seq, copy_lst (c));
+ seq.safe_push (copy_lst (c));
}
return new_lst_loop (seq);
@@ -1258,14 +1251,14 @@ lst_remove_all_before_including_pbb (lst_p loop, poly_bb_p pbb, bool before)
if (!loop || !LST_LOOP_P (loop))
return before;
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (loop), i, l);)
+ for (i = 0; LST_SEQ (loop).iterate (i, &l);)
if (LST_LOOP_P (l))
{
before = lst_remove_all_before_including_pbb (l, pbb, before);
- if (VEC_length (lst_p, LST_SEQ (l)) == 0)
+ if (LST_SEQ (l).length () == 0)
{
- VEC_ordered_remove (lst_p, LST_SEQ (loop), i);
+ LST_SEQ (loop).ordered_remove (i);
free_lst (l);
}
else
@@ -1278,13 +1271,13 @@ lst_remove_all_before_including_pbb (lst_p loop, poly_bb_p pbb, bool before)
if (LST_PBB (l) == pbb)
before = false;
- VEC_ordered_remove (lst_p, LST_SEQ (loop), i);
+ LST_SEQ (loop).ordered_remove (i);
free_lst (l);
}
else if (LST_PBB (l) == pbb)
{
before = true;
- VEC_ordered_remove (lst_p, LST_SEQ (loop), i);
+ LST_SEQ (loop).ordered_remove (i);
free_lst (l);
}
else
@@ -1307,14 +1300,14 @@ lst_remove_all_before_excluding_pbb (lst_p loop, poly_bb_p pbb, bool before)
if (!loop || !LST_LOOP_P (loop))
return before;
- for (i = 0; VEC_iterate (lst_p, LST_SEQ (loop), i, l);)
+ for (i = 0; LST_SEQ (loop).iterate (i, &l);)
if (LST_LOOP_P (l))
{
before = lst_remove_all_before_excluding_pbb (l, pbb, before);
- if (VEC_length (lst_p, LST_SEQ (l)) == 0)
+ if (LST_SEQ (l).length () == 0)
{
- VEC_ordered_remove (lst_p, LST_SEQ (loop), i);
+ LST_SEQ (loop).ordered_remove (i);
free_lst (l);
continue;
}
@@ -1325,7 +1318,7 @@ lst_remove_all_before_excluding_pbb (lst_p loop, poly_bb_p pbb, bool before)
{
if (before && LST_PBB (l) != pbb)
{
- VEC_ordered_remove (lst_p, LST_SEQ (loop), i);
+ LST_SEQ (loop).ordered_remove (i);
free_lst (l);
continue;
}
@@ -1352,7 +1345,7 @@ struct scop
/* All the basic blocks in this scop that contain memory references
and that will be represented as statements in the polyhedral
representation. */
- VEC (poly_bb_p, heap) *bbs;
+ vec<poly_bb_p> bbs;
/* Original, transformed and saved schedules. */
lst_p original_schedule, transformed_schedule, saved_schedule;
@@ -1403,7 +1396,7 @@ struct scop
extern scop_p new_scop (void *);
extern void free_scop (scop_p);
-extern void free_scops (VEC (scop_p, heap) *);
+extern void free_scops (vec<scop_p> );
extern void print_generated_program (FILE *, scop_p);
extern void debug_generated_program (scop_p);
extern void print_scattering_function (FILE *, poly_bb_p, int);
@@ -1511,7 +1504,7 @@ store_scattering (scop_p scop)
int i;
poly_bb_p pbb;
- for (i = 0; VEC_iterate (poly_bb_p, SCOP_BBS (scop), i, pbb); i++)
+ for (i = 0; SCOP_BBS (scop).iterate (i, &pbb); i++)
store_scattering_pbb (pbb);
store_lst_schedule (scop);
@@ -1536,7 +1529,7 @@ restore_scattering (scop_p scop)
int i;
poly_bb_p pbb;
- for (i = 0; VEC_iterate (poly_bb_p, SCOP_BBS (scop), i, pbb); i++)
+ for (i = 0; SCOP_BBS (scop).iterate (i, &pbb); i++)
restore_scattering_pbb (pbb);
restore_lst_schedule (scop);
@@ -1545,14 +1538,14 @@ restore_scattering (scop_p scop)
bool graphite_legal_transform (scop_p);
poly_bb_p find_pbb_via_hash (htab_t, basic_block);
bool loop_is_parallel_p (loop_p, htab_t, int);
-scop_p get_loop_body_pbbs (loop_p, htab_t, VEC (poly_bb_p, heap) **);
+scop_p get_loop_body_pbbs (loop_p, htab_t, vec<poly_bb_p> *);
isl_map *reverse_loop_at_level (poly_bb_p, int);
-isl_union_map *reverse_loop_for_pbbs (scop_p, VEC (poly_bb_p, heap) *, int);
+isl_union_map *reverse_loop_for_pbbs (scop_p, vec<poly_bb_p> , int);
__isl_give isl_union_map *extend_schedule (__isl_take isl_union_map *);
void
-compute_deps (scop_p scop, VEC (poly_bb_p, heap) *pbbs,
+compute_deps (scop_p scop, vec<poly_bb_p> pbbs,
isl_union_map **must_raw,
isl_union_map **may_raw,
isl_union_map **must_raw_no_source,
diff --git a/gcc/graphite-scop-detection.c b/gcc/graphite-scop-detection.c
index 0ea9e6a473d..c6413589e0d 100644
--- a/gcc/graphite-scop-detection.c
+++ b/gcc/graphite-scop-detection.c
@@ -66,7 +66,7 @@ typedef enum gbb_type {
static gbb_type
get_bb_type (basic_block bb, struct loop *last_loop)
{
- VEC (basic_block, heap) *dom;
+ vec<basic_block> dom;
int nb_dom;
struct loop *loop = bb->loop_father;
@@ -82,8 +82,8 @@ get_bb_type (basic_block bb, struct loop *last_loop)
}
dom = get_dominated_by (CDI_DOMINATORS, bb);
- nb_dom = VEC_length (basic_block, dom);
- VEC_free (basic_block, heap, dom);
+ nb_dom = dom.length ();
+ dom.release ();
if (nb_dom == 0)
return GBB_LAST;
@@ -129,23 +129,20 @@ typedef struct sd_region_p
basic_block exit;
} sd_region;
-DEF_VEC_O(sd_region);
-DEF_VEC_ALLOC_O(sd_region, heap);
/* Moves the scops from SOURCE to TARGET and clean up SOURCE. */
static void
-move_sd_regions (VEC (sd_region, heap) **source,
- VEC (sd_region, heap) **target)
+move_sd_regions (vec<sd_region> *source, vec<sd_region> *target)
{
sd_region *s;
int i;
- FOR_EACH_VEC_ELT (sd_region, *source, i, s)
- VEC_safe_push (sd_region, heap, *target, *s);
+ FOR_EACH_VEC_ELT (*source, i, s)
+ target->safe_push (*s);
- VEC_free (sd_region, heap, *source);
+ source->release ();
}
/* Something like "n * m" is not allowed. */
@@ -270,7 +267,7 @@ stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
unsigned i;
int j;
bool res = true;
- VEC (data_reference_p, heap) *drs = NULL;
+ vec<data_reference_p> drs = vec<data_reference_p>();
loop_p outer;
for (outer = loop_containing_stmt (stmt); outer; outer = loop_outer (outer))
@@ -279,7 +276,7 @@ stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
loop_containing_stmt (stmt),
stmt, &drs);
- FOR_EACH_VEC_ELT (data_reference_p, drs, j, dr)
+ FOR_EACH_VEC_ELT (drs, j, dr)
for (i = 0; i < DR_NUM_DIMENSIONS (dr); i++)
if (!graphite_can_represent_scev (DR_ACCESS_FN (dr, i)))
{
@@ -288,7 +285,7 @@ stmt_has_simple_data_refs_p (loop_p outermost_loop ATTRIBUTE_UNUSED,
}
free_data_refs (drs);
- drs = NULL;
+ drs.create (0);
}
done:
@@ -426,14 +423,14 @@ struct scopdet_info
};
static struct scopdet_info build_scops_1 (basic_block, loop_p,
- VEC (sd_region, heap) **, loop_p);
+ vec<sd_region> *, loop_p);
/* Calculates BB infos. If bb is difficult we add valid SCoPs dominated by BB
to SCOPS. TYPE is the gbb_type of BB. */
static struct scopdet_info
scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
- VEC (sd_region, heap) **scops, gbb_type type)
+ vec<sd_region> *scops, gbb_type type)
{
loop_p loop = bb->loop_father;
struct scopdet_info result;
@@ -468,7 +465,8 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
case GBB_LOOP_SING_EXIT_HEADER:
{
- VEC (sd_region, heap) *regions = VEC_alloc (sd_region, heap, 3);
+ vec<sd_region> regions;
+ regions.create (3);
struct scopdet_info sinfo;
edge exit_e = single_exit (loop);
@@ -485,8 +483,8 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
{
outermost_loop = loop;
- VEC_free (sd_region, heap, regions);
- regions = VEC_alloc (sd_region, heap, 3);
+ regions.release ();
+ regions.create (3);
sinfo = scopdet_basic_block_info (bb, outermost_loop, scops, type);
@@ -500,8 +498,8 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
sd_region open_scop;
open_scop.entry = bb;
open_scop.exit = exit_e->dest;
- VEC_safe_push (sd_region, heap, *scops, open_scop);
- VEC_free (sd_region, heap, regions);
+ scops->safe_push (open_scop);
+ regions.release ();
}
}
else
@@ -523,7 +521,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
if (result.difficult)
move_sd_regions (&regions, scops);
else
- VEC_free (sd_region, heap, regions);
+ regions.release ();
}
break;
@@ -533,8 +531,9 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
{
/* XXX: For now we just do not join loops with multiple exits. If the
exits lead to the same bb it may be possible to join the loop. */
- VEC (sd_region, heap) *regions = VEC_alloc (sd_region, heap, 3);
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<sd_region> regions;
+ regions.create (3);
+ vec<edge> exits = get_loop_exit_edges (loop);
edge e;
int i;
build_scops_1 (bb, loop, &regions, loop);
@@ -549,7 +548,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
- The exit destinations are dominated by another bb inside
the loop.
- The loop dominates bbs, that are not exit destinations. */
- FOR_EACH_VEC_ELT (edge, exits, i, e)
+ FOR_EACH_VEC_ELT (exits, i, e)
if (e->src->loop_father == loop
&& dominated_by_p (CDI_DOMINATORS, e->dest, e->src))
{
@@ -571,14 +570,15 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
result.difficult = true;
result.exits = false;
move_sd_regions (&regions, scops);
- VEC_free (edge, heap, exits);
+ exits.release ();
break;
}
case GBB_COND_HEADER:
{
- VEC (sd_region, heap) *regions = VEC_alloc (sd_region, heap, 3);
+ vec<sd_region> regions;
+ regions.create (3);
struct scopdet_info sinfo;
- VEC (basic_block, heap) *dominated;
+ vec<basic_block> dominated;
int i;
basic_block dom_bb;
basic_block last_exit = NULL;
@@ -587,7 +587,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
/* First check the successors of BB, and check if it is
possible to join the different branches. */
- FOR_EACH_VEC_ELT (edge, bb->succs, i, e)
+ FOR_EACH_VEC_SAFE_ELT (bb->succs, i, e)
{
/* Ignore loop exits. They will be handled after the loop
body. */
@@ -666,14 +666,14 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
result.exit = last_exit;
- VEC_free (sd_region, heap, regions);
+ regions.release ();
break;
}
/* Scan remaining bbs dominated by BB. */
dominated = get_dominated_by (CDI_DOMINATORS, bb);
- FOR_EACH_VEC_ELT (basic_block, dominated, i, dom_bb)
+ FOR_EACH_VEC_ELT (dominated, i, dom_bb)
{
/* Ignore loop exits: they will be handled after the loop body. */
if (loop_depth (find_common_loop (loop, dom_bb->loop_father))
@@ -698,7 +698,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
result.exit = NULL;
}
- VEC_free (basic_block, heap, dominated);
+ dominated.release ();
result.next = NULL;
move_sd_regions (&regions, scops);
@@ -723,7 +723,7 @@ scopdet_basic_block_info (basic_block bb, loop_p outermost_loop,
static struct scopdet_info
build_scops_1 (basic_block current, loop_p outermost_loop,
- VEC (sd_region, heap) **scops, loop_p loop)
+ vec<sd_region> *scops, loop_p loop)
{
bool in_scop = false;
sd_region open_scop;
@@ -756,7 +756,7 @@ build_scops_1 (basic_block current, loop_p outermost_loop,
else if (in_scop && (sinfo.exits || sinfo.difficult))
{
open_scop.exit = current;
- VEC_safe_push (sd_region, heap, *scops, open_scop);
+ scops->safe_push (open_scop);
in_scop = false;
}
@@ -771,7 +771,7 @@ build_scops_1 (basic_block current, loop_p outermost_loop,
{
open_scop.exit = sinfo.exit;
gcc_assert (open_scop.exit);
- VEC_safe_push (sd_region, heap, *scops, open_scop);
+ scops->safe_push (open_scop);
}
result.exit = sinfo.exit;
@@ -976,14 +976,14 @@ create_single_exit_edge (sd_region *region)
See comment in "create_single_exit_edge". */
static void
-unmark_exit_edges (VEC (sd_region, heap) *regions)
+unmark_exit_edges (vec<sd_region> regions)
{
int i;
sd_region *s;
edge e;
edge_iterator ei;
- FOR_EACH_VEC_ELT (sd_region, regions, i, s)
+ FOR_EACH_VEC_ELT (regions, i, s)
FOR_EACH_EDGE (e, ei, s->exit->preds)
e->aux = NULL;
}
@@ -993,14 +993,14 @@ unmark_exit_edges (VEC (sd_region, heap) *regions)
See comment in "create_single_exit_edge". */
static void
-mark_exit_edges (VEC (sd_region, heap) *regions)
+mark_exit_edges (vec<sd_region> regions)
{
int i;
sd_region *s;
edge e;
edge_iterator ei;
- FOR_EACH_VEC_ELT (sd_region, regions, i, s)
+ FOR_EACH_VEC_ELT (regions, i, s)
FOR_EACH_EDGE (e, ei, s->exit->preds)
if (bb_in_sd_region (e->src, s))
e->aux = s;
@@ -1009,17 +1009,17 @@ mark_exit_edges (VEC (sd_region, heap) *regions)
/* Create for all scop regions a single entry and a single exit edge. */
static void
-create_sese_edges (VEC (sd_region, heap) *regions)
+create_sese_edges (vec<sd_region> regions)
{
int i;
sd_region *s;
- FOR_EACH_VEC_ELT (sd_region, regions, i, s)
+ FOR_EACH_VEC_ELT (regions, i, s)
create_single_entry_edge (s);
mark_exit_edges (regions);
- FOR_EACH_VEC_ELT (sd_region, regions, i, s)
+ FOR_EACH_VEC_ELT (regions, i, s)
/* Don't handle multiple edges exiting the function. */
if (!find_single_exit_edge (s)
&& s->exit != EXIT_BLOCK_PTR)
@@ -1039,13 +1039,13 @@ create_sese_edges (VEC (sd_region, heap) *regions)
/* Create graphite SCoPs from an array of scop detection REGIONS. */
static void
-build_graphite_scops (VEC (sd_region, heap) *regions,
- VEC (scop_p, heap) **scops)
+build_graphite_scops (vec<sd_region> regions,
+ vec<scop_p> *scops)
{
int i;
sd_region *s;
- FOR_EACH_VEC_ELT (sd_region, regions, i, s)
+ FOR_EACH_VEC_ELT (regions, i, s)
{
edge entry = find_single_entry_edge (s);
edge exit = find_single_exit_edge (s);
@@ -1055,7 +1055,7 @@ build_graphite_scops (VEC (sd_region, heap) *regions,
continue;
scop = new_scop (new_sese (entry, exit));
- VEC_safe_push (scop_p, heap, *scops, scop);
+ scops->safe_push (scop);
/* Are there overlapping SCoPs? */
#ifdef ENABLE_CHECKING
@@ -1063,7 +1063,7 @@ build_graphite_scops (VEC (sd_region, heap) *regions,
int j;
sd_region *s2;
- FOR_EACH_VEC_ELT (sd_region, regions, j, s2)
+ FOR_EACH_VEC_ELT (regions, j, s2)
if (s != s2)
gcc_assert (!bb_in_sd_region (s->entry, s2));
}
@@ -1147,12 +1147,12 @@ print_graphite_scop_statistics (FILE* file, scop_p scop)
/* Print statistics for SCOPS to FILE. */
static void
-print_graphite_statistics (FILE* file, VEC (scop_p, heap) *scops)
+print_graphite_statistics (FILE* file, vec<scop_p> scops)
{
int i;
scop_p scop;
- FOR_EACH_VEC_ELT (scop_p, scops, i, scop)
+ FOR_EACH_VEC_ELT (scops, i, scop)
print_graphite_scop_statistics (file, scop);
}
@@ -1177,21 +1177,22 @@ print_graphite_statistics (FILE* file, VEC (scop_p, heap) *scops)
SCoP frontiers. */
static void
-limit_scops (VEC (scop_p, heap) **scops)
+limit_scops (vec<scop_p> *scops)
{
- VEC (sd_region, heap) *regions = VEC_alloc (sd_region, heap, 3);
+ vec<sd_region> regions;
+ regions.create (3);
int i;
scop_p scop;
- FOR_EACH_VEC_ELT (scop_p, *scops, i, scop)
+ FOR_EACH_VEC_ELT (*scops, i, scop)
{
int j;
loop_p loop;
sese region = SCOP_REGION (scop);
build_sese_loop_nests (region);
- FOR_EACH_VEC_ELT (loop_p, SESE_LOOP_NEST (region), j, loop)
+ FOR_EACH_VEC_ELT (SESE_LOOP_NEST (region), j, loop)
if (!loop_in_sese_p (loop_outer (loop), region)
&& single_exit (loop))
{
@@ -1205,16 +1206,16 @@ limit_scops (VEC (scop_p, heap) **scops)
&& contains_only_close_phi_nodes (open_scop.exit))
open_scop.exit = single_succ_edge (open_scop.exit)->dest;
- VEC_safe_push (sd_region, heap, regions, open_scop);
+ regions.safe_push (open_scop);
}
}
free_scops (*scops);
- *scops = VEC_alloc (scop_p, heap, 3);
+ scops->create (3);
create_sese_edges (regions);
build_graphite_scops (regions, scops);
- VEC_free (sd_region, heap, regions);
+ regions.release ();
}
/* Returns true when P1 and P2 are close phis with the same
@@ -1390,10 +1391,11 @@ canonicalize_loop_closed_ssa_form (void)
them to SCOPS. */
void
-build_scops (VEC (scop_p, heap) **scops)
+build_scops (vec<scop_p> *scops)
{
struct loop *loop = current_loops->tree_root;
- VEC (sd_region, heap) *regions = VEC_alloc (sd_region, heap, 3);
+ vec<sd_region> regions;
+ regions.create (3);
canonicalize_loop_closed_ssa_form ();
build_scops_1 (single_succ (ENTRY_BLOCK_PTR), ENTRY_BLOCK_PTR->loop_father,
@@ -1405,11 +1407,11 @@ build_scops (VEC (scop_p, heap) **scops)
print_graphite_statistics (dump_file, *scops);
limit_scops (scops);
- VEC_free (sd_region, heap, regions);
+ regions.release ();
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\nnumber of SCoPs: %d\n",
- VEC_length (scop_p, *scops));
+ scops ? scops->length () : 0);
}
/* Pretty print to FILE all the SCoPs in DOT format and mark them with
@@ -1423,7 +1425,7 @@ build_scops (VEC (scop_p, heap) **scops)
exit nodes of the SCOP. These are not part of SCoP. */
static void
-dot_all_scops_1 (FILE *file, VEC (scop_p, heap) *scops)
+dot_all_scops_1 (FILE *file, vec<scop_p> scops)
{
basic_block bb;
edge e;
@@ -1450,7 +1452,7 @@ dot_all_scops_1 (FILE *file, VEC (scop_p, heap) *scops)
fprintf (file, "CELLSPACING=\"0\">\n");
/* Select color for SCoP. */
- FOR_EACH_VEC_ELT (scop_p, scops, i, scop)
+ FOR_EACH_VEC_ELT (scops, i, scop)
{
sese region = SCOP_REGION (scop);
if (bb_in_sese_p (bb, region)
@@ -1560,7 +1562,7 @@ dot_all_scops_1 (FILE *file, VEC (scop_p, heap) *scops)
/* Display all SCoPs using dotty. */
DEBUG_FUNCTION void
-dot_all_scops (VEC (scop_p, heap) *scops)
+dot_all_scops (vec<scop_p> scops)
{
/* When debugging, enable the following code. This cannot be used
in production compilers because it calls "system". */
@@ -1583,10 +1585,10 @@ dot_all_scops (VEC (scop_p, heap) *scops)
DEBUG_FUNCTION void
dot_scop (scop_p scop)
{
- VEC (scop_p, heap) *scops = NULL;
+ vec<scop_p> scops = vec<scop_p>();
if (scop)
- VEC_safe_push (scop_p, heap, scops, scop);
+ scops.safe_push (scop);
/* When debugging, enable the following code. This cannot be used
in production compilers because it calls "system". */
@@ -1604,7 +1606,7 @@ dot_scop (scop_p scop)
dot_all_scops_1 (stderr, scops);
#endif
- VEC_free (scop_p, heap, scops);
+ scops.release ();
}
#endif
diff --git a/gcc/graphite-scop-detection.h b/gcc/graphite-scop-detection.h
index 44c2223588d..f888464073a 100644
--- a/gcc/graphite-scop-detection.h
+++ b/gcc/graphite-scop-detection.h
@@ -20,6 +20,6 @@ along with GCC; see the file COPYING3. If not see
<http://www.gnu.org/licenses/>. */
-extern void build_scops (VEC (scop_p, heap) **);
-extern void dot_all_scops (VEC (scop_p, heap) *);
+extern void build_scops (vec<scop_p> *);
+extern void dot_all_scops (vec<scop_p> );
extern void dot_scop (scop_p);
diff --git a/gcc/graphite-sese-to-poly.c b/gcc/graphite-sese-to-poly.c
index 1c048702fbc..4e95f78bbfa 100644
--- a/gcc/graphite-sese-to-poly.c
+++ b/gcc/graphite-sese-to-poly.c
@@ -180,7 +180,7 @@ reduction_phi_p (sese region, gimple_stmt_iterator *psi)
/* Store the GRAPHITE representation of BB. */
static gimple_bb_p
-new_gimple_bb (basic_block bb, VEC (data_reference_p, heap) *drs)
+new_gimple_bb (basic_block bb, vec<data_reference_p> drs)
{
struct gimple_bb *gbb;
@@ -188,19 +188,19 @@ new_gimple_bb (basic_block bb, VEC (data_reference_p, heap) *drs)
bb->aux = gbb;
GBB_BB (gbb) = bb;
GBB_DATA_REFS (gbb) = drs;
- GBB_CONDITIONS (gbb) = NULL;
- GBB_CONDITION_CASES (gbb) = NULL;
+ GBB_CONDITIONS (gbb).create (0);
+ GBB_CONDITION_CASES (gbb).create (0);
return gbb;
}
static void
-free_data_refs_aux (VEC (data_reference_p, heap) *datarefs)
+free_data_refs_aux (vec<data_reference_p> datarefs)
{
unsigned int i;
struct data_reference *dr;
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
if (dr->aux)
{
base_alias_pair *bap = (base_alias_pair *)(dr->aux);
@@ -219,8 +219,8 @@ free_gimple_bb (struct gimple_bb *gbb)
free_data_refs_aux (GBB_DATA_REFS (gbb));
free_data_refs (GBB_DATA_REFS (gbb));
- VEC_free (gimple, heap, GBB_CONDITIONS (gbb));
- VEC_free (gimple, heap, GBB_CONDITION_CASES (gbb));
+ GBB_CONDITIONS (gbb).release ();
+ GBB_CONDITION_CASES (gbb).release ();
GBB_BB (gbb)->aux = 0;
XDELETE (gbb);
}
@@ -233,26 +233,26 @@ remove_gbbs_in_scop (scop_p scop)
int i;
poly_bb_p pbb;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
free_gimple_bb (PBB_BLACK_BOX (pbb));
}
/* Deletes all scops in SCOPS. */
void
-free_scops (VEC (scop_p, heap) *scops)
+free_scops (vec<scop_p> scops)
{
int i;
scop_p scop;
- FOR_EACH_VEC_ELT (scop_p, scops, i, scop)
+ FOR_EACH_VEC_ELT (scops, i, scop)
{
remove_gbbs_in_scop (scop);
free_sese (SCOP_REGION (scop));
free_scop (scop);
}
- VEC_free (scop_p, heap, scops);
+ scops.release ();
}
/* Same as outermost_loop_in_sese, returns the outermost loop
@@ -287,7 +287,8 @@ outermost_loop_in_sese_1 (sese region, basic_block bb)
static gimple_bb_p
try_generate_gimple_bb (scop_p scop, basic_block bb)
{
- VEC (data_reference_p, heap) *drs = VEC_alloc (data_reference_p, heap, 5);
+ vec<data_reference_p> drs;
+ drs.create (5);
sese region = SCOP_REGION (scop);
loop_p nest = outermost_loop_in_sese_1 (region, bb);
gimple_stmt_iterator gsi;
@@ -351,9 +352,9 @@ compare_bb_depths (const void *p1, const void *p2)
a deepest loop level. */
static void
-graphite_sort_dominated_info (VEC (basic_block, heap) *dom)
+graphite_sort_dominated_info (vec<basic_block> dom)
{
- VEC_qsort (basic_block, dom, compare_bb_depths);
+ dom.qsort (compare_bb_depths);
}
/* Recursive helper function for build_scops_bbs. */
@@ -362,7 +363,7 @@ static void
build_scop_bbs_1 (scop_p scop, sbitmap visited, basic_block bb)
{
sese region = SCOP_REGION (scop);
- VEC (basic_block, heap) *dom;
+ vec<basic_block> dom;
poly_bb_p pbb;
if (bitmap_bit_p (visited, bb->index)
@@ -370,31 +371,31 @@ build_scop_bbs_1 (scop_p scop, sbitmap visited, basic_block bb)
return;
pbb = new_poly_bb (scop, try_generate_gimple_bb (scop, bb));
- VEC_safe_push (poly_bb_p, heap, SCOP_BBS (scop), pbb);
+ SCOP_BBS (scop).safe_push (pbb);
bitmap_set_bit (visited, bb->index);
dom = get_dominated_by (CDI_DOMINATORS, bb);
- if (dom == NULL)
+ if (!dom.exists ())
return;
graphite_sort_dominated_info (dom);
- while (!VEC_empty (basic_block, dom))
+ while (!dom.is_empty ())
{
int i;
basic_block dom_bb;
- FOR_EACH_VEC_ELT (basic_block, dom, i, dom_bb)
+ FOR_EACH_VEC_ELT (dom, i, dom_bb)
if (all_non_dominated_preds_marked_p (dom_bb, visited))
{
build_scop_bbs_1 (scop, visited, dom_bb);
- VEC_unordered_remove (basic_block, dom, i);
+ dom.unordered_remove (i);
break;
}
}
- VEC_free (basic_block, heap, dom);
+ dom.release ();
}
/* Gather the basic blocks belonging to the SCOP. */
@@ -558,7 +559,7 @@ build_scop_scattering (scop_p scop)
incremented before copying. */
static_sched = isl_aff_add_coefficient_si (static_sched, isl_dim_in, 0, -1);
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
{
gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
int prefix;
@@ -735,7 +736,7 @@ parameter_index_in_region_1 (tree name, sese region)
gcc_assert (TREE_CODE (name) == SSA_NAME);
- FOR_EACH_VEC_ELT (tree, SESE_PARAMS (region), i, p)
+ FOR_EACH_VEC_ELT (SESE_PARAMS (region), i, p)
if (p == name)
return i;
@@ -759,8 +760,8 @@ parameter_index_in_region (tree name, sese region)
gcc_assert (SESE_ADD_PARAMS (region));
- i = VEC_length (tree, SESE_PARAMS (region));
- VEC_safe_push (tree, heap, SESE_PARAMS (region), name);
+ i = SESE_PARAMS (region).length ();
+ SESE_PARAMS (region).safe_push (name);
return i;
}
@@ -899,12 +900,12 @@ find_params_in_bb (sese region, gimple_bb_p gbb)
loop_p loop = GBB_BB (gbb)->loop_father;
/* Find parameters in the access functions of data references. */
- FOR_EACH_VEC_ELT (data_reference_p, GBB_DATA_REFS (gbb), i, dr)
+ FOR_EACH_VEC_ELT (GBB_DATA_REFS (gbb), i, dr)
for (j = 0; j < DR_NUM_DIMENSIONS (dr); j++)
scan_tree_for_params (region, DR_ACCESS_FN (dr, j));
/* Find parameters in conditional statements. */
- FOR_EACH_VEC_ELT (gimple, GBB_CONDITIONS (gbb), i, stmt)
+ FOR_EACH_VEC_ELT (GBB_CONDITIONS (gbb), i, stmt)
{
tree lhs = scalar_evolution_in_region (region, loop,
gimple_cond_lhs (stmt));
@@ -929,7 +930,7 @@ find_scop_parameters (scop_p scop)
int nbp;
/* Find the parameters used in the loop bounds. */
- FOR_EACH_VEC_ELT (loop_p, SESE_LOOP_NEST (region), i, loop)
+ FOR_EACH_VEC_ELT (SESE_LOOP_NEST (region), i, loop)
{
tree nb_iters = number_of_latch_executions (loop);
@@ -941,7 +942,7 @@ find_scop_parameters (scop_p scop)
}
/* Find the parameters used in data accesses. */
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
find_params_in_bb (region, PBB_BLACK_BOX (pbb));
nbp = sese_nb_params (region);
@@ -952,7 +953,7 @@ find_scop_parameters (scop_p scop)
tree e;
isl_space *space = isl_space_set_alloc (scop->ctx, nbp, 0);
- FOR_EACH_VEC_ELT (tree, SESE_PARAMS (region), i, e)
+ FOR_EACH_VEC_ELT (SESE_PARAMS (region), i, e)
space = isl_space_set_dim_id (space, isl_dim_param, i,
isl_id_for_ssa_name (scop, e));
@@ -1149,10 +1150,10 @@ add_conditions_to_domain (poly_bb_p pbb)
gimple stmt;
gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
- if (VEC_empty (gimple, GBB_CONDITIONS (gbb)))
+ if (GBB_CONDITIONS (gbb).is_empty ())
return;
- FOR_EACH_VEC_ELT (gimple, GBB_CONDITIONS (gbb), i, stmt)
+ FOR_EACH_VEC_ELT (GBB_CONDITIONS (gbb), i, stmt)
switch (gimple_code (stmt))
{
case GIMPLE_COND:
@@ -1160,7 +1161,7 @@ add_conditions_to_domain (poly_bb_p pbb)
enum tree_code code = gimple_cond_code (stmt);
/* The conditions for ELSE-branches are inverted. */
- if (!VEC_index (gimple, GBB_CONDITION_CASES (gbb), i))
+ if (!GBB_CONDITION_CASES (gbb)[i])
code = invert_tree_comparison (code, false);
add_condition_to_pbb (pbb, stmt, code);
@@ -1185,7 +1186,7 @@ add_conditions_to_constraints (scop_p scop)
int i;
poly_bb_p pbb;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
add_conditions_to_domain (pbb);
}
@@ -1193,7 +1194,7 @@ add_conditions_to_constraints (scop_p scop)
struct bsc
{
- VEC (gimple, heap) **conditions, **cases;
+ vec<gimple> *conditions, *cases;
sese region;
};
@@ -1230,8 +1231,8 @@ build_sese_conditions_before (struct dom_walk_data *dw_data,
basic_block bb)
{
struct bsc *data = (struct bsc *) dw_data->global_data;
- VEC (gimple, heap) **conditions = data->conditions;
- VEC (gimple, heap) **cases = data->cases;
+ vec<gimple> *conditions = data->conditions;
+ vec<gimple> *cases = data->cases;
gimple_bb_p gbb;
gimple stmt;
@@ -1244,20 +1245,20 @@ build_sese_conditions_before (struct dom_walk_data *dw_data,
{
edge e = single_pred_edge (bb);
- VEC_safe_push (gimple, heap, *conditions, stmt);
+ conditions->safe_push (stmt);
if (e->flags & EDGE_TRUE_VALUE)
- VEC_safe_push (gimple, heap, *cases, stmt);
+ cases->safe_push (stmt);
else
- VEC_safe_push (gimple, heap, *cases, NULL);
+ cases->safe_push (NULL);
}
gbb = gbb_from_bb (bb);
if (gbb)
{
- GBB_CONDITIONS (gbb) = VEC_copy (gimple, heap, *conditions);
- GBB_CONDITION_CASES (gbb) = VEC_copy (gimple, heap, *cases);
+ GBB_CONDITIONS (gbb) = conditions->copy ();
+ GBB_CONDITION_CASES (gbb) = cases->copy ();
}
}
@@ -1269,16 +1270,16 @@ build_sese_conditions_after (struct dom_walk_data *dw_data,
basic_block bb)
{
struct bsc *data = (struct bsc *) dw_data->global_data;
- VEC (gimple, heap) **conditions = data->conditions;
- VEC (gimple, heap) **cases = data->cases;
+ vec<gimple> *conditions = data->conditions;
+ vec<gimple> *cases = data->cases;
if (!bb_in_sese_p (bb, data->region))
return;
if (single_pred_cond_non_loop_exit (bb))
{
- VEC_pop (gimple, *conditions);
- VEC_pop (gimple, *cases);
+ conditions->pop ();
+ cases->pop ();
}
}
@@ -1288,8 +1289,10 @@ static void
build_sese_conditions (sese region)
{
struct dom_walk_data walk_data;
- VEC (gimple, heap) *conditions = VEC_alloc (gimple, heap, 3);
- VEC (gimple, heap) *cases = VEC_alloc (gimple, heap, 3);
+ vec<gimple> conditions;
+ conditions.create (3);
+ vec<gimple> cases;
+ cases.create (3);
struct bsc data;
data.conditions = &conditions;
@@ -1307,8 +1310,8 @@ build_sese_conditions (sese region)
walk_dominator_tree (&walk_data, SESE_ENTRY_BB (region));
fini_walk_dominator_tree (&walk_data);
- VEC_free (gimple, heap, conditions);
- VEC_free (gimple, heap, cases);
+ conditions.release ();
+ cases.release ();
}
/* Add constraints on the possible values of parameter P from the type
@@ -1317,7 +1320,7 @@ build_sese_conditions (sese region)
static void
add_param_constraints (scop_p scop, graphite_dim_t p)
{
- tree parameter = VEC_index (tree, SESE_PARAMS (SCOP_REGION (scop)), p);
+ tree parameter = SESE_PARAMS (SCOP_REGION (scop))[p];
tree type = TREE_TYPE (parameter);
tree lb = NULL_TREE;
tree ub = NULL_TREE;
@@ -1402,12 +1405,12 @@ build_scop_iteration_domain (scop_p scop)
int nb_loops = number_of_loops ();
isl_set **doms = XCNEWVEC (isl_set *, nb_loops);
- FOR_EACH_VEC_ELT (loop_p, SESE_LOOP_NEST (region), i, loop)
+ FOR_EACH_VEC_ELT (SESE_LOOP_NEST (region), i, loop)
if (!loop_in_sese_p (loop_outer (loop), region))
build_loop_iteration_domains (scop, loop, 0,
isl_set_copy (scop->context), doms);
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
{
loop = pbb_loop (pbb);
@@ -1616,9 +1619,9 @@ build_poly_dr (data_reference_p dr, poly_bb_p pbb)
static inline bool
write_alias_graph_to_ascii_dimacs (FILE *file, char *comment,
- VEC (data_reference_p, heap) *drs)
+ vec<data_reference_p> drs)
{
- int num_vertex = VEC_length (data_reference_p, drs);
+ int num_vertex = drs.length ();
int edge_num = 0;
data_reference_p dr1, dr2;
int i, j;
@@ -1626,8 +1629,8 @@ write_alias_graph_to_ascii_dimacs (FILE *file, char *comment,
if (num_vertex == 0)
return true;
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
- for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
+ FOR_EACH_VEC_ELT (drs, i, dr1)
+ for (j = i + 1; drs.iterate (j, &dr2); j++)
if (dr_may_alias_p (dr1, dr2, true))
edge_num++;
@@ -1638,8 +1641,8 @@ write_alias_graph_to_ascii_dimacs (FILE *file, char *comment,
fprintf (file, "p edge %d %d\n", num_vertex, edge_num);
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
- for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
+ FOR_EACH_VEC_ELT (drs, i, dr1)
+ for (j = i + 1; drs.iterate (j, &dr2); j++)
if (dr_may_alias_p (dr1, dr2, true))
fprintf (file, "e %d %d\n", i + 1, j + 1);
@@ -1650,9 +1653,9 @@ write_alias_graph_to_ascii_dimacs (FILE *file, char *comment,
static inline bool
write_alias_graph_to_ascii_dot (FILE *file, char *comment,
- VEC (data_reference_p, heap) *drs)
+ vec<data_reference_p> drs)
{
- int num_vertex = VEC_length (data_reference_p, drs);
+ int num_vertex = drs.length ();
data_reference_p dr1, dr2;
int i, j;
@@ -1665,11 +1668,11 @@ write_alias_graph_to_ascii_dot (FILE *file, char *comment,
fprintf (file, "c %s\n", comment);
/* First print all the vertices. */
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
+ FOR_EACH_VEC_ELT (drs, i, dr1)
fprintf (file, "n%d;\n", i);
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
- for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
+ FOR_EACH_VEC_ELT (drs, i, dr1)
+ for (j = i + 1; drs.iterate (j, &dr2); j++)
if (dr_may_alias_p (dr1, dr2, true))
fprintf (file, "n%d n%d\n", i, j);
@@ -1680,9 +1683,9 @@ write_alias_graph_to_ascii_dot (FILE *file, char *comment,
static inline bool
write_alias_graph_to_ascii_ecc (FILE *file, char *comment,
- VEC (data_reference_p, heap) *drs)
+ vec<data_reference_p> drs)
{
- int num_vertex = VEC_length (data_reference_p, drs);
+ int num_vertex = drs.length ();
data_reference_p dr1, dr2;
int i, j;
@@ -1694,8 +1697,8 @@ write_alias_graph_to_ascii_ecc (FILE *file, char *comment,
if (comment)
fprintf (file, "c %s\n", comment);
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
- for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
+ FOR_EACH_VEC_ELT (drs, i, dr1)
+ for (j = i + 1; drs.iterate (j, &dr2); j++)
if (dr_may_alias_p (dr1, dr2, true))
fprintf (file, "%d %d\n", i, j);
@@ -1716,9 +1719,9 @@ dr_same_base_object_p (const struct data_reference *dr1,
true (1) if the above test is true, and false (0) otherwise. */
static int
-build_alias_set_optimal_p (VEC (data_reference_p, heap) *drs)
+build_alias_set_optimal_p (vec<data_reference_p> drs)
{
- int num_vertices = VEC_length (data_reference_p, drs);
+ int num_vertices = drs.length ();
struct graph *g = new_graph (num_vertices);
data_reference_p dr1, dr2;
int i, j;
@@ -1730,8 +1733,8 @@ build_alias_set_optimal_p (VEC (data_reference_p, heap) *drs)
int this_component_is_clique;
int all_components_are_cliques = 1;
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
- for (j = i+1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
+ FOR_EACH_VEC_ELT (drs, i, dr1)
+ for (j = i+1; drs.iterate (j, &dr2); j++)
if (dr_may_alias_p (dr1, dr2, true))
{
add_edge (g, i, j);
@@ -1747,7 +1750,7 @@ build_alias_set_optimal_p (VEC (data_reference_p, heap) *drs)
NULL, true, NULL);
for (i = 0; i < g->n_vertices; i++)
{
- data_reference_p dr = VEC_index (data_reference_p, drs, i);
+ data_reference_p dr = drs[i];
base_alias_pair *bap;
gcc_assert (dr->aux);
@@ -1802,16 +1805,16 @@ build_alias_set_optimal_p (VEC (data_reference_p, heap) *drs)
/* Group each data reference in DRS with its base object set num. */
static void
-build_base_obj_set_for_drs (VEC (data_reference_p, heap) *drs)
+build_base_obj_set_for_drs (vec<data_reference_p> drs)
{
- int num_vertex = VEC_length (data_reference_p, drs);
+ int num_vertex = drs.length ();
struct graph *g = new_graph (num_vertex);
data_reference_p dr1, dr2;
int i, j;
int *queue;
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr1)
- for (j = i + 1; VEC_iterate (data_reference_p, drs, j, dr2); j++)
+ FOR_EACH_VEC_ELT (drs, i, dr1)
+ for (j = i + 1; drs.iterate (j, &dr2); j++)
if (dr_same_base_object_p (dr1, dr2))
{
add_edge (g, i, j);
@@ -1826,7 +1829,7 @@ build_base_obj_set_for_drs (VEC (data_reference_p, heap) *drs)
for (i = 0; i < g->n_vertices; i++)
{
- data_reference_p dr = VEC_index (data_reference_p, drs, i);
+ data_reference_p dr = drs[i];
base_alias_pair *bap;
gcc_assert (dr->aux);
@@ -1846,16 +1849,16 @@ build_pbb_drs (poly_bb_p pbb)
{
int j;
data_reference_p dr;
- VEC (data_reference_p, heap) *gbb_drs = GBB_DATA_REFS (PBB_BLACK_BOX (pbb));
+ vec<data_reference_p> gbb_drs = GBB_DATA_REFS (PBB_BLACK_BOX (pbb));
- FOR_EACH_VEC_ELT (data_reference_p, gbb_drs, j, dr)
+ FOR_EACH_VEC_ELT (gbb_drs, j, dr)
build_poly_dr (dr, pbb);
}
/* Dump to file the alias graphs for the data references in DRS. */
static void
-dump_alias_graphs (VEC (data_reference_p, heap) *drs)
+dump_alias_graphs (vec<data_reference_p> drs)
{
char comment[100];
FILE *file_dimacs, *file_ecc, *file_dot;
@@ -1896,25 +1899,25 @@ build_scop_drs (scop_p scop)
int i, j;
poly_bb_p pbb;
data_reference_p dr;
- VEC (data_reference_p, heap) *drs = VEC_alloc (data_reference_p, heap, 3);
+ vec<data_reference_p> drs;
+ drs.create (3);
/* Remove all the PBBs that do not have data references: these basic
blocks are not handled in the polyhedral representation. */
- for (i = 0; VEC_iterate (poly_bb_p, SCOP_BBS (scop), i, pbb); i++)
- if (VEC_empty (data_reference_p, GBB_DATA_REFS (PBB_BLACK_BOX (pbb))))
+ for (i = 0; SCOP_BBS (scop).iterate (i, &pbb); i++)
+ if (GBB_DATA_REFS (PBB_BLACK_BOX (pbb)).is_empty ())
{
free_gimple_bb (PBB_BLACK_BOX (pbb));
free_poly_bb (pbb);
- VEC_ordered_remove (poly_bb_p, SCOP_BBS (scop), i);
+ SCOP_BBS (scop).ordered_remove (i);
i--;
}
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
- for (j = 0; VEC_iterate (data_reference_p,
- GBB_DATA_REFS (PBB_BLACK_BOX (pbb)), j, dr); j++)
- VEC_safe_push (data_reference_p, heap, drs, dr);
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
+ for (j = 0; GBB_DATA_REFS (PBB_BLACK_BOX (pbb)).iterate (j, &dr); j++)
+ drs.safe_push (dr);
- FOR_EACH_VEC_ELT (data_reference_p, drs, i, dr)
+ FOR_EACH_VEC_ELT (drs, i, dr)
dr->aux = XNEW (base_alias_pair);
if (!build_alias_set_optimal_p (drs))
@@ -1930,9 +1933,9 @@ build_scop_drs (scop_p scop)
if (0)
dump_alias_graphs (drs);
- VEC_free (data_reference_p, heap, drs);
+ drs.release ();
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
build_pbb_drs (pbb);
}
@@ -1956,7 +1959,7 @@ gsi_for_phi_node (gimple stmt)
GBB_DATA_REFS vector of BB. */
static void
-analyze_drs_in_stmts (scop_p scop, basic_block bb, VEC (gimple, heap) *stmts)
+analyze_drs_in_stmts (scop_p scop, basic_block bb, vec<gimple> stmts)
{
loop_p nest;
gimple_bb_p gbb;
@@ -1970,7 +1973,7 @@ analyze_drs_in_stmts (scop_p scop, basic_block bb, VEC (gimple, heap) *stmts)
nest = outermost_loop_in_sese_1 (region, bb);
gbb = gbb_from_bb (bb);
- FOR_EACH_VEC_ELT (gimple, stmts, i, stmt)
+ FOR_EACH_VEC_ELT (stmts, i, stmt)
{
loop_p loop;
@@ -1995,15 +1998,16 @@ insert_stmts (scop_p scop, gimple stmt, gimple_seq stmts,
gimple_stmt_iterator insert_gsi)
{
gimple_stmt_iterator gsi;
- VEC (gimple, heap) *x = VEC_alloc (gimple, heap, 3);
+ vec<gimple> x;
+ x.create (3);
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
- VEC_safe_push (gimple, heap, x, gsi_stmt (gsi));
+ x.safe_push (gsi_stmt (gsi));
gsi_insert_seq_before (&insert_gsi, stmts, GSI_SAME_STMT);
analyze_drs_in_stmts (scop, gsi_bb (insert_gsi), x);
- VEC_free (gimple, heap, x);
+ x.release ();
}
/* Insert the assignment "RES := EXPR" just after AFTER_STMT. */
@@ -2015,11 +2019,12 @@ insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple after_stmt)
gimple_stmt_iterator gsi;
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
gimple stmt = gimple_build_assign (res, var);
- VEC (gimple, heap) *x = VEC_alloc (gimple, heap, 3);
+ vec<gimple> x;
+ x.create (3);
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
- VEC_safe_push (gimple, heap, x, gsi_stmt (gsi));
+ x.safe_push (gsi_stmt (gsi));
if (gimple_code (after_stmt) == GIMPLE_PHI)
{
@@ -2033,7 +2038,7 @@ insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple after_stmt)
}
analyze_drs_in_stmts (scop, gimple_bb (after_stmt), x);
- VEC_free (gimple, heap, x);
+ x.release ();
}
/* Creates a poly_bb_p for basic_block BB from the existing PBB. */
@@ -2041,23 +2046,24 @@ insert_out_of_ssa_copy (scop_p scop, tree res, tree expr, gimple after_stmt)
static void
new_pbb_from_pbb (scop_p scop, poly_bb_p pbb, basic_block bb)
{
- VEC (data_reference_p, heap) *drs = VEC_alloc (data_reference_p, heap, 3);
+ vec<data_reference_p> drs;
+ drs.create (3);
gimple_bb_p gbb = PBB_BLACK_BOX (pbb);
gimple_bb_p gbb1 = new_gimple_bb (bb, drs);
poly_bb_p pbb1 = new_poly_bb (scop, gbb1);
- int index, n = VEC_length (poly_bb_p, SCOP_BBS (scop));
+ int index, n = SCOP_BBS (scop).length ();
/* The INDEX of PBB in SCOP_BBS. */
for (index = 0; index < n; index++)
- if (VEC_index (poly_bb_p, SCOP_BBS (scop), index) == pbb)
+ if (SCOP_BBS (scop)[index] == pbb)
break;
pbb1->domain = isl_set_copy (pbb->domain);
GBB_PBB (gbb1) = pbb1;
- GBB_CONDITIONS (gbb1) = VEC_copy (gimple, heap, GBB_CONDITIONS (gbb));
- GBB_CONDITION_CASES (gbb1) = VEC_copy (gimple, heap, GBB_CONDITION_CASES (gbb));
- VEC_safe_insert (poly_bb_p, heap, SCOP_BBS (scop), index + 1, pbb1);
+ GBB_CONDITIONS (gbb1) = GBB_CONDITIONS (gbb).copy ();
+ GBB_CONDITION_CASES (gbb1) = GBB_CONDITION_CASES (gbb).copy ();
+ SCOP_BBS (scop).safe_insert (index + 1, pbb1);
}
/* Insert on edge E the assignment "RES := EXPR". */
@@ -2070,11 +2076,12 @@ insert_out_of_ssa_copy_on_edge (scop_p scop, edge e, tree res, tree expr)
tree var = force_gimple_operand (expr, &stmts, true, NULL_TREE);
gimple stmt = gimple_build_assign (res, var);
basic_block bb;
- VEC (gimple, heap) *x = VEC_alloc (gimple, heap, 3);
+ vec<gimple> x;
+ x.create (3);
gimple_seq_add_stmt (&stmts, stmt);
for (gsi = gsi_start (stmts); !gsi_end_p (gsi); gsi_next (&gsi))
- VEC_safe_push (gimple, heap, x, gsi_stmt (gsi));
+ x.safe_push (gsi_stmt (gsi));
gsi_insert_seq_on_edge (e, stmts);
gsi_commit_edge_inserts ();
@@ -2087,7 +2094,7 @@ insert_out_of_ssa_copy_on_edge (scop_p scop, edge e, tree res, tree expr)
new_pbb_from_pbb (scop, pbb_from_bb (e->src), bb);
analyze_drs_in_stmts (scop, bb, x);
- VEC_free (gimple, heap, x);
+ x.release ();
}
/* Creates a zero dimension array of the same type as VAR. */
@@ -2531,7 +2538,7 @@ nb_pbbs_in_loops (scop_p scop)
poly_bb_p pbb;
int res = 0;
- FOR_EACH_VEC_ELT (poly_bb_p, SCOP_BBS (scop), i, pbb)
+ FOR_EACH_VEC_ELT (SCOP_BBS (scop), i, pbb)
if (loop_in_sese_p (gbb_loop (PBB_BLACK_BOX (pbb)), SCOP_REGION (scop)))
res++;
@@ -2599,15 +2606,15 @@ split_reduction_stmt (scop_p scop, gimple stmt)
/* A part of the data references will end in a different basic block
after the split: move the DRs from the original GBB to the newly
created GBB1. */
- FOR_EACH_VEC_ELT (data_reference_p, GBB_DATA_REFS (gbb), i, dr)
+ FOR_EACH_VEC_ELT (GBB_DATA_REFS (gbb), i, dr)
{
basic_block bb1 = gimple_bb (DR_STMT (dr));
if (bb1 != bb)
{
gimple_bb_p gbb1 = gbb_from_bb (bb1);
- VEC_safe_push (data_reference_p, heap, GBB_DATA_REFS (gbb1), dr);
- VEC_ordered_remove (data_reference_p, GBB_DATA_REFS (gbb), i);
+ GBB_DATA_REFS (gbb1).safe_push (dr);
+ GBB_DATA_REFS (gbb).ordered_remove (i);
i--;
}
}
@@ -2689,16 +2696,16 @@ follow_ssa_with_commutative_ops (tree arg, tree lhs)
static gimple
detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
- VEC (gimple, heap) **in,
- VEC (gimple, heap) **out)
+ vec<gimple> *in,
+ vec<gimple> *out)
{
gimple phi = follow_ssa_with_commutative_ops (arg, lhs);
if (!phi)
return NULL;
- VEC_safe_push (gimple, heap, *in, stmt);
- VEC_safe_push (gimple, heap, *out, stmt);
+ in->safe_push (stmt);
+ out->safe_push (stmt);
return phi;
}
@@ -2706,8 +2713,8 @@ detect_commutative_reduction_arg (tree lhs, gimple stmt, tree arg,
STMT. Return the phi node of the reduction cycle, or NULL. */
static gimple
-detect_commutative_reduction_assign (gimple stmt, VEC (gimple, heap) **in,
- VEC (gimple, heap) **out)
+detect_commutative_reduction_assign (gimple stmt, vec<gimple> *in,
+ vec<gimple> *out)
{
tree lhs = gimple_assign_lhs (stmt);
@@ -2819,8 +2826,8 @@ used_outside_reduction (tree def, gimple loop_phi)
node of the reduction cycle, or NULL. */
static gimple
-detect_commutative_reduction (scop_p scop, gimple stmt, VEC (gimple, heap) **in,
- VEC (gimple, heap) **out)
+detect_commutative_reduction (scop_p scop, gimple stmt, vec<gimple> *in,
+ vec<gimple> *out)
{
if (scalar_close_phi_node_p (stmt))
{
@@ -2848,8 +2855,8 @@ detect_commutative_reduction (scop_p scop, gimple stmt, VEC (gimple, heap) **in,
|| !has_single_use (gimple_phi_result (phi))))
return NULL;
- VEC_safe_push (gimple, heap, *in, loop_phi);
- VEC_safe_push (gimple, heap, *out, close_phi);
+ in->safe_push (loop_phi);
+ out->safe_push (close_phi);
return phi;
}
@@ -2888,7 +2895,8 @@ remove_phi (gimple phi)
tree def;
use_operand_p use_p;
gimple_stmt_iterator gsi;
- VEC (gimple, heap) *update = VEC_alloc (gimple, heap, 3);
+ vec<gimple> update;
+ update.create (3);
unsigned int i;
gimple stmt;
@@ -2900,14 +2908,14 @@ remove_phi (gimple phi)
if (is_gimple_debug (stmt))
{
gimple_debug_bind_reset_value (stmt);
- VEC_safe_push (gimple, heap, update, stmt);
+ update.safe_push (stmt);
}
}
- FOR_EACH_VEC_ELT (gimple, update, i, stmt)
+ FOR_EACH_VEC_ELT (update, i, stmt)
update_stmt (stmt);
- VEC_free (gimple, heap, update);
+ update.release ();
gsi = gsi_for_phi_node (phi);
remove_phi_node (&gsi, false);
@@ -2999,16 +3007,16 @@ close_phi_written_to_memory (gimple close_phi)
static void
translate_scalar_reduction_to_array (scop_p scop,
- VEC (gimple, heap) *in,
- VEC (gimple, heap) *out)
+ vec<gimple> in,
+ vec<gimple> out)
{
gimple loop_phi;
- unsigned int i = VEC_length (gimple, out) - 1;
- tree red = close_phi_written_to_memory (VEC_index (gimple, out, i));
+ unsigned int i = out.length () - 1;
+ tree red = close_phi_written_to_memory (out[i]);
- FOR_EACH_VEC_ELT (gimple, in, i, loop_phi)
+ FOR_EACH_VEC_ELT (in, i, loop_phi)
{
- gimple close_phi = VEC_index (gimple, out, i);
+ gimple close_phi = out[i];
if (i == 0)
{
@@ -3022,12 +3030,11 @@ translate_scalar_reduction_to_array (scop_p scop,
red = create_zero_dim_array
(gimple_assign_lhs (stmt), "Commutative_Associative_Reduction");
- translate_scalar_reduction_to_array_for_stmt
- (scop, red, stmt, VEC_index (gimple, in, 1));
+ translate_scalar_reduction_to_array_for_stmt (scop, red, stmt, in[1]);
continue;
}
- if (i == VEC_length (gimple, in) - 1)
+ if (i == in.length () - 1)
{
insert_out_of_ssa_copy (scop, gimple_phi_result (close_phi),
unshare_expr (red), close_phi);
@@ -3049,16 +3056,18 @@ rewrite_commutative_reductions_out_of_ssa_close_phi (scop_p scop,
gimple close_phi)
{
bool res;
- VEC (gimple, heap) *in = VEC_alloc (gimple, heap, 10);
- VEC (gimple, heap) *out = VEC_alloc (gimple, heap, 10);
+ vec<gimple> in;
+ in.create (10);
+ vec<gimple> out;
+ out.create (10);
detect_commutative_reduction (scop, close_phi, &in, &out);
- res = VEC_length (gimple, in) > 1;
+ res = in.length () > 1;
if (res)
translate_scalar_reduction_to_array (scop, in, out);
- VEC_free (gimple, heap, in);
- VEC_free (gimple, heap, out);
+ in.release ();
+ out.release ();
return res;
}
diff --git a/gcc/graphite.c b/gcc/graphite.c
index 0eb1ca191d5..beb94a773a9 100644
--- a/gcc/graphite.c
+++ b/gcc/graphite.c
@@ -183,13 +183,13 @@ print_graphite_scop_statistics (FILE* file, scop_p scop)
/* Print statistics for SCOPS to FILE. */
static void
-print_graphite_statistics (FILE* file, VEC (scop_p, heap) *scops)
+print_graphite_statistics (FILE* file, vec<scop_p> scops)
{
int i;
scop_p scop;
- FOR_EACH_VEC_ELT (scop_p, scops, i, scop)
+ FOR_EACH_VEC_ELT (scops, i, scop)
print_graphite_scop_statistics (file, scop);
}
@@ -255,7 +255,7 @@ graphite_transform_loops (void)
int i;
scop_p scop;
bool need_cfg_cleanup_p = false;
- VEC (scop_p, heap) *scops = NULL;
+ vec<scop_p> scops = vec<scop_p>();
htab_t bb_pbb_mapping;
isl_ctx *ctx;
@@ -280,7 +280,7 @@ graphite_transform_loops (void)
bb_pbb_mapping = htab_create (10, bb_pbb_map_hash, eq_bb_pbb_map, free);
- FOR_EACH_VEC_ELT (scop_p, scops, i, scop)
+ FOR_EACH_VEC_ELT (scops, i, scop)
if (dbg_cnt (graphite_scop))
{
scop->ctx = ctx;
diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c
index fa80f246b42..48e15f8875b 100644
--- a/gcc/haifa-sched.c
+++ b/gcc/haifa-sched.c
@@ -143,7 +143,6 @@ along with GCC; see the file COPYING3. If not see
#include "target.h"
#include "common/common-target.h"
#include "params.h"
-#include "vecprim.h"
#include "dbgcnt.h"
#include "cfgloop.h"
#include "ira.h"
@@ -355,7 +354,7 @@ int cycle_issued_insns;
/* This records the actual schedule. It is built up during the main phase
of schedule_block, and afterwards used to reorder the insns in the RTL. */
-static VEC(rtx, heap) *scheduled_insns;
+static vec<rtx> scheduled_insns;
static int may_trap_exp (const_rtx, int);
@@ -384,13 +383,13 @@ const struct common_sched_info_def haifa_common_sched_info =
};
/* Mapping from instruction UID to its Logical UID. */
-VEC (int, heap) *sched_luids = NULL;
+vec<int> sched_luids = vec<int>();
/* Next LUID to assign to an instruction. */
int sched_max_luid = 1;
/* Haifa Instruction Data. */
-VEC (haifa_insn_data_def, heap) *h_i_d = NULL;
+vec<haifa_insn_data_def> h_i_d = vec<haifa_insn_data_def>();
void (* sched_init_only_bb) (basic_block, basic_block);
@@ -863,7 +862,7 @@ static void dump_new_block_header (int, basic_block, rtx, rtx);
static void restore_bb_notes (basic_block);
static void fix_jump_move (rtx);
static void move_block_after_check (rtx);
-static void move_succs (VEC(edge,gc) **, basic_block);
+static void move_succs (vec<edge, va_gc> **, basic_block);
static void sched_remove_insn (rtx);
static void clear_priorities (rtx, rtx_vec_t *);
static void calc_priorities (rtx_vec_t);
@@ -1142,15 +1141,13 @@ update_insn_after_change (rtx insn)
INSN_TICK (insn) = INVALID_TICK;
}
-DEF_VEC_P(dep_t);
-DEF_VEC_ALLOC_P(dep_t, heap);
/* Two VECs, one to hold dependencies for which pattern replacements
need to be applied or restored at the start of the next cycle, and
another to hold an integer that is either one, to apply the
corresponding replacement, or zero to restore it. */
-static VEC(dep_t, heap) *next_cycle_replace_deps;
-static VEC(int, heap) *next_cycle_apply;
+static vec<dep_t> next_cycle_replace_deps;
+static vec<int> next_cycle_apply;
static void apply_replacement (dep_t, bool);
static void restore_pattern (dep_t, bool);
@@ -1272,7 +1269,7 @@ recompute_todo_spec (rtx next, bool for_backtrack)
REG_DEP_CONTROL; if the condition register isn't modified after it,
we know that it still has the right value. */
if (QUEUE_INDEX (pro) == QUEUE_SCHEDULED)
- FOR_EACH_VEC_ELT_REVERSE (rtx, scheduled_insns, i, prev)
+ FOR_EACH_VEC_ELT_REVERSE (scheduled_insns, i, prev)
{
HARD_REG_SET t;
@@ -1863,7 +1860,7 @@ struct model_pressure_group {
/* Index POINT gives the instruction at point POINT of the model schedule.
This array doesn't change during main scheduling. */
-static VEC (rtx, heap) *model_schedule;
+static vec<rtx> model_schedule;
/* The list of instructions in the model worklist, sorted in order of
decreasing priority. */
@@ -1907,7 +1904,7 @@ static unsigned int model_next_priority;
/* The instruction at point POINT of the model schedule. */
#define MODEL_INSN(POINT) \
- (VEC_index (rtx, model_schedule, POINT))
+ (model_schedule[POINT])
/* Return INSN's index in the model schedule, or model_num_insns if it
@@ -3255,8 +3252,8 @@ model_add_to_schedule (rtx insn)
gcc_assert (QUEUE_INDEX (insn) == QUEUE_NOWHERE);
QUEUE_INDEX (insn) = QUEUE_SCHEDULED;
- point = VEC_length (rtx, model_schedule);
- VEC_quick_push (rtx, model_schedule, insn);
+ point = model_schedule.length ();
+ model_schedule.quick_push (insn);
INSN_MODEL_INDEX (insn) = point + 1;
}
@@ -3612,7 +3609,7 @@ model_reset_queue_indices (void)
unsigned int i;
rtx insn;
- FOR_EACH_VEC_ELT (rtx, model_schedule, i, insn)
+ FOR_EACH_VEC_ELT (model_schedule, i, insn)
QUEUE_INDEX (insn) = MODEL_INSN_INFO (insn)->old_queue;
}
@@ -3643,7 +3640,7 @@ model_start_schedule (void)
basic_block bb;
model_next_priority = 1;
- model_schedule = VEC_alloc (rtx, heap, sched_max_luid);
+ model_schedule.create (sched_max_luid);
model_insns = XCNEWVEC (struct model_insn_info, sched_max_luid);
bb = BLOCK_FOR_INSN (NEXT_INSN (current_sched_info->prev_head));
@@ -3653,7 +3650,7 @@ model_start_schedule (void)
model_init_pressure_group (&model_before_pressure);
while (model_worklist)
model_choose_insn ();
- gcc_assert (model_num_insns == (int) VEC_length (rtx, model_schedule));
+ gcc_assert (model_num_insns == (int) model_schedule.length ());
if (sched_verbose >= 2)
fprintf (sched_dump, "\n");
@@ -3682,7 +3679,7 @@ static void
model_end_schedule (void)
{
model_finalize_pressure_group (&model_before_pressure);
- VEC_free (rtx, heap, model_schedule);
+ model_schedule.release ();
}
/* A structure that holds local state for the loop in schedule_block. */
@@ -4027,13 +4024,13 @@ struct haifa_saved_data
/* Describe pattern replacements that occurred since this backtrack point
was queued. */
- VEC (dep_t, heap) *replacement_deps;
- VEC (int, heap) *replace_apply;
+ vec<dep_t> replacement_deps;
+ vec<int> replace_apply;
/* A copy of the next-cycle replacement vectors at the time of the backtrack
point. */
- VEC (dep_t, heap) *next_cycle_deps;
- VEC (int, heap) *next_cycle_apply;
+ vec<dep_t> next_cycle_deps;
+ vec<int> next_cycle_apply;
};
/* A record, in reverse order, of all scheduled insns which have delay slots
@@ -4090,10 +4087,10 @@ save_backtrack_point (struct delay_pair *pair,
save->sched_block = sched_block;
- save->replacement_deps = NULL;
- save->replace_apply = NULL;
- save->next_cycle_deps = VEC_copy (dep_t, heap, next_cycle_replace_deps);
- save->next_cycle_apply = VEC_copy (int, heap, next_cycle_apply);
+ save->replacement_deps.create (0);
+ save->replace_apply.create (0);
+ save->next_cycle_deps = next_cycle_replace_deps.copy ();
+ save->next_cycle_apply = next_cycle_apply.copy ();
if (current_sched_info->save_state)
save->fe_saved_data = (*current_sched_info->save_state) ();
@@ -4169,18 +4166,18 @@ toggle_cancelled_flags (bool set)
static void
undo_replacements_for_backtrack (struct haifa_saved_data *save)
{
- while (!VEC_empty (dep_t, save->replacement_deps))
+ while (!save->replacement_deps.is_empty ())
{
- dep_t dep = VEC_pop (dep_t, save->replacement_deps);
- int apply_p = VEC_pop (int, save->replace_apply);
+ dep_t dep = save->replacement_deps.pop ();
+ int apply_p = save->replace_apply.pop ();
if (apply_p)
restore_pattern (dep, true);
else
apply_replacement (dep, true);
}
- VEC_free (dep_t, heap, save->replacement_deps);
- VEC_free (int, heap, save->replace_apply);
+ save->replacement_deps.release ();
+ save->replace_apply.release ();
}
/* Pop entries from the SCHEDULED_INSNS vector up to and including INSN.
@@ -4190,9 +4187,7 @@ undo_replacements_for_backtrack (struct haifa_saved_data *save)
static void
unschedule_insns_until (rtx insn)
{
- VEC (rtx, heap) *recompute_vec;
-
- recompute_vec = VEC_alloc (rtx, heap, 0);
+ vec<rtx> recompute_vec = vec<rtx>();
/* Make two passes over the insns to be unscheduled. First, we clear out
dependencies and other trivial bookkeeping. */
@@ -4202,7 +4197,7 @@ unschedule_insns_until (rtx insn)
sd_iterator_def sd_it;
dep_t dep;
- last = VEC_pop (rtx, scheduled_insns);
+ last = scheduled_insns.pop ();
/* This will be changed by restore_backtrack_point if the insn is in
any queue. */
@@ -4221,7 +4216,7 @@ unschedule_insns_until (rtx insn)
if (!MUST_RECOMPUTE_SPEC_P (con))
{
MUST_RECOMPUTE_SPEC_P (con) = 1;
- VEC_safe_push (rtx, heap, recompute_vec, con);
+ recompute_vec.safe_push (con);
}
}
@@ -4234,11 +4229,11 @@ unschedule_insns_until (rtx insn)
popped the scheduled_insns vector up to the point where we
restart scheduling, as recompute_todo_spec requires it to be
up-to-date. */
- while (!VEC_empty (rtx, recompute_vec))
+ while (!recompute_vec.is_empty ())
{
rtx con;
- con = VEC_pop (rtx, recompute_vec);
+ con = recompute_vec.pop ();
MUST_RECOMPUTE_SPEC_P (con) = 0;
if (!sd_lists_empty_p (con, SD_LIST_HARD_BACK))
{
@@ -4250,7 +4245,7 @@ unschedule_insns_until (rtx insn)
else if (QUEUE_INDEX (con) != QUEUE_SCHEDULED)
TODO_SPEC (con) = recompute_todo_spec (con, true);
}
- VEC_free (rtx, heap, recompute_vec);
+ recompute_vec.release ();
}
/* Restore scheduler state from the topmost entry on the backtracking queue.
@@ -4353,9 +4348,9 @@ restore_last_backtrack_point (struct sched_block_state *psched_block)
mark_backtrack_feeds (save->delay_pair->i2, 0);
- gcc_assert (VEC_empty (dep_t, next_cycle_replace_deps));
- next_cycle_replace_deps = VEC_copy (dep_t, heap, save->next_cycle_deps);
- next_cycle_apply = VEC_copy (int, heap, save->next_cycle_apply);
+ gcc_assert (next_cycle_replace_deps.is_empty ());
+ next_cycle_replace_deps = save->next_cycle_deps.copy ();
+ next_cycle_apply = save->next_cycle_apply.copy ();
free (save);
@@ -4390,8 +4385,8 @@ free_topmost_backtrack_point (bool reset_tick)
}
else
{
- VEC_free (dep_t, heap, save->replacement_deps);
- VEC_free (int, heap, save->replace_apply);
+ save->replacement_deps.release ();
+ save->replace_apply.release ();
}
if (targetm.sched.free_sched_context)
@@ -4425,8 +4420,8 @@ apply_replacement (dep_t dep, bool immediately)
struct dep_replacement *desc = DEP_REPLACE (dep);
if (!immediately && targetm.sched.exposed_pipeline && reload_completed)
{
- VEC_safe_push (dep_t, heap, next_cycle_replace_deps, dep);
- VEC_safe_push (int, heap, next_cycle_apply, 1);
+ next_cycle_replace_deps.safe_push (dep);
+ next_cycle_apply.safe_push (1);
}
else
{
@@ -4448,8 +4443,8 @@ apply_replacement (dep_t dep, bool immediately)
if (backtrack_queue != NULL)
{
- VEC_safe_push (dep_t, heap, backtrack_queue->replacement_deps, dep);
- VEC_safe_push (int, heap, backtrack_queue->replace_apply, 1);
+ backtrack_queue->replacement_deps.safe_push (dep);
+ backtrack_queue->replace_apply.safe_push (1);
}
}
}
@@ -4471,8 +4466,8 @@ restore_pattern (dep_t dep, bool immediately)
if (!immediately && targetm.sched.exposed_pipeline && reload_completed)
{
- VEC_safe_push (dep_t, heap, next_cycle_replace_deps, dep);
- VEC_safe_push (int, heap, next_cycle_apply, 0);
+ next_cycle_replace_deps.safe_push (dep);
+ next_cycle_apply.safe_push (0);
return;
}
@@ -4499,8 +4494,8 @@ restore_pattern (dep_t dep, bool immediately)
update_insn_after_change (desc->insn);
if (backtrack_queue != NULL)
{
- VEC_safe_push (dep_t, heap, backtrack_queue->replacement_deps, dep);
- VEC_safe_push (int, heap, backtrack_queue->replace_apply, 0);
+ backtrack_queue->replacement_deps.safe_push (dep);
+ backtrack_queue->replace_apply.safe_push (0);
}
}
INSN_TICK (next) = tick;
@@ -4520,16 +4515,16 @@ perform_replacements_new_cycle (void)
{
int i;
dep_t dep;
- FOR_EACH_VEC_ELT (dep_t, next_cycle_replace_deps, i, dep)
+ FOR_EACH_VEC_ELT (next_cycle_replace_deps, i, dep)
{
- int apply_p = VEC_index (int, next_cycle_apply, i);
+ int apply_p = next_cycle_apply[i];
if (apply_p)
apply_replacement (dep, true);
else
restore_pattern (dep, true);
}
- VEC_truncate (dep_t, next_cycle_replace_deps, 0);
- VEC_truncate (int, next_cycle_apply, 0);
+ next_cycle_replace_deps.truncate (0);
+ next_cycle_apply.truncate (0);
}
/* Compute INSN_TICK_ESTIMATE for INSN. PROCESSED is a bitmap of
@@ -4617,7 +4612,7 @@ resolve_dependencies (rtx insn)
if (QUEUE_INDEX (insn) >= 0)
queue_remove (insn);
- VEC_safe_push (rtx, heap, scheduled_insns, insn);
+ scheduled_insns.safe_push (insn);
/* Update dependent instructions. */
for (sd_it = sd_iterator_start (insn, SD_LIST_FORW);
@@ -4797,7 +4792,7 @@ undo_all_replacements (void)
rtx insn;
int i;
- FOR_EACH_VEC_ELT (rtx, scheduled_insns, i, insn)
+ FOR_EACH_VEC_ELT (scheduled_insns, i, insn)
{
sd_iterator_def sd_it;
dep_t dep;
@@ -4930,14 +4925,14 @@ ok_for_early_queue_removal (rtx insn)
{
rtx prev_insn;
int n_cycles;
- int i = VEC_length (rtx, scheduled_insns);
+ int i = scheduled_insns.length ();
for (n_cycles = flag_sched_stalled_insns_dep; n_cycles; n_cycles--)
{
while (i-- > 0)
{
int cost;
- prev_insn = VEC_index (rtx, scheduled_insns, i);
+ prev_insn = scheduled_insns[i];
if (!NOTE_P (prev_insn))
{
@@ -5623,7 +5618,7 @@ commit_schedule (rtx prev_head, rtx tail, basic_block *target_bb)
last_scheduled_insn = prev_head;
for (i = 0;
- VEC_iterate (rtx, scheduled_insns, i, insn);
+ scheduled_insns.iterate (i, &insn);
i++)
{
if (control_flow_insn_p (last_scheduled_insn)
@@ -5652,7 +5647,7 @@ commit_schedule (rtx prev_head, rtx tail, basic_block *target_bb)
last_scheduled_insn = insn;
}
- VEC_truncate (rtx, scheduled_insns, 0);
+ scheduled_insns.truncate (0);
}
/* Examine all insns on the ready list and queue those which can't be
@@ -5971,7 +5966,7 @@ schedule_block (basic_block *target_bb, state_t init_state)
advance = 0;
- gcc_assert (VEC_length (rtx, scheduled_insns) == 0);
+ gcc_assert (scheduled_insns.length () == 0);
sort_p = TRUE;
must_backtrack = false;
modulo_insns_scheduled = 0;
@@ -6082,7 +6077,7 @@ schedule_block (basic_block *target_bb, state_t init_state)
rtx insn = ready_remove_first (&ready);
gcc_assert (DEBUG_INSN_P (insn));
(*current_sched_info->begin_schedule_ready) (insn);
- VEC_safe_push (rtx, heap, scheduled_insns, insn);
+ scheduled_insns.safe_push (insn);
last_scheduled_insn = insn;
advance = schedule_insn (insn);
gcc_assert (advance == 0);
@@ -6233,7 +6228,7 @@ schedule_block (basic_block *target_bb, state_t init_state)
/* Update counters, etc in the scheduler's front end. */
(*current_sched_info->begin_schedule_ready) (insn);
- VEC_safe_push (rtx, heap, scheduled_insns, insn);
+ scheduled_insns.safe_push (insn);
gcc_assert (NONDEBUG_INSN_P (insn));
last_nondebug_scheduled_insn = last_scheduled_insn = insn;
@@ -6433,7 +6428,7 @@ schedule_block (basic_block *target_bb, state_t init_state)
else
last_scheduled_insn = tail;
- VEC_truncate (rtx, scheduled_insns, 0);
+ scheduled_insns.truncate (0);
if (!current_sched_info->queue_must_finish_empty
|| haifa_recovery_bb_recently_added_p)
@@ -6661,7 +6656,7 @@ haifa_sched_init (void)
setup_sched_dump ();
sched_init ();
- scheduled_insns = VEC_alloc (rtx, heap, 0);
+ scheduled_insns.create (0);
if (spec_info != NULL)
{
@@ -6672,19 +6667,20 @@ haifa_sched_init (void)
/* Initialize luids, dependency caches, target and h_i_d for the
whole function. */
{
- bb_vec_t bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
+ bb_vec_t bbs;
+ bbs.create (n_basic_blocks);
basic_block bb;
sched_init_bbs ();
FOR_EACH_BB (bb)
- VEC_quick_push (basic_block, bbs, bb);
+ bbs.quick_push (bb);
sched_init_luids (bbs);
sched_deps_init (true);
sched_extend_target ();
haifa_init_h_i_d (bbs);
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
}
sched_init_only_bb = haifa_init_only_bb;
@@ -6728,7 +6724,7 @@ haifa_sched_finish (void)
c, nr_be_in_control);
}
- VEC_free (rtx, heap, scheduled_insns);
+ scheduled_insns.release ();
/* Finalize h_i_d, dependency caches, and luids for the whole
function. Target will be finalized in md_global_finish (). */
@@ -7093,7 +7089,7 @@ sched_extend_ready_list (int new_sched_ready_n_insns)
{
i = 0;
sched_ready_n_insns = 0;
- VEC_reserve (rtx, heap, scheduled_insns, new_sched_ready_n_insns);
+ scheduled_insns.reserve (new_sched_ready_n_insns);
}
else
i = sched_ready_n_insns + 1;
@@ -7290,7 +7286,7 @@ add_to_speculative_block (rtx insn)
sd_iterator_next (&sd_it);
}
- priorities_roots = NULL;
+ priorities_roots.create (0);
clear_priorities (insn, &priorities_roots);
while (1)
@@ -7384,7 +7380,7 @@ add_to_speculative_block (rtx insn)
}
calc_priorities (priorities_roots);
- VEC_free (rtx, heap, priorities_roots);
+ priorities_roots.release ();
}
/* Extends and fills with zeros (only the new part) array pointed to by P. */
@@ -7494,8 +7490,8 @@ init_before_recovery (basic_block *before_recovery_ptr)
/* Add new blocks to the root loop. */
if (current_loops != NULL)
{
- add_bb_to_loop (single, VEC_index (loop_p, current_loops->larray, 0));
- add_bb_to_loop (empty, VEC_index (loop_p, current_loops->larray, 0));
+ add_bb_to_loop (single, (*current_loops->larray)[0]);
+ add_bb_to_loop (empty, (*current_loops->larray)[0]);
}
single->count = last->count;
@@ -7881,11 +7877,11 @@ create_check_block_twin (rtx insn, bool mutate_p)
/* Fix priorities. If MUTATE_P is nonzero, this is not necessary,
because it'll be done later in add_to_speculative_block. */
{
- rtx_vec_t priorities_roots = NULL;
+ rtx_vec_t priorities_roots = rtx_vec_t();
clear_priorities (twin, &priorities_roots);
calc_priorities (priorities_roots);
- VEC_free (rtx, heap, priorities_roots);
+ priorities_roots.release ();
}
}
@@ -8153,7 +8149,7 @@ static void
move_block_after_check (rtx jump)
{
basic_block bb, jump_bb, jump_bb_next;
- VEC(edge,gc) *t;
+ vec<edge, va_gc> *t;
bb = BLOCK_FOR_INSN (PREV_INSN (jump));
jump_bb = BLOCK_FOR_INSN (jump);
@@ -8183,7 +8179,7 @@ move_block_after_check (rtx jump)
This functions attaches edge vector pointed to by SUCCSP to
block TO. */
static void
-move_succs (VEC(edge,gc) **succsp, basic_block to)
+move_succs (vec<edge, va_gc> **succsp, basic_block to)
{
edge e;
edge_iterator ei;
@@ -8240,7 +8236,7 @@ clear_priorities (rtx insn, rtx_vec_t *roots_ptr)
}
if (insn_is_root_p)
- VEC_safe_push (rtx, heap, *roots_ptr, insn);
+ roots_ptr->safe_push (insn);
}
/* Recompute priorities of instructions, whose priorities might have been
@@ -8252,7 +8248,7 @@ calc_priorities (rtx_vec_t roots)
int i;
rtx insn;
- FOR_EACH_VEC_ELT (rtx, roots, i, insn)
+ FOR_EACH_VEC_ELT (roots, i, insn)
priority (insn);
}
@@ -8287,7 +8283,7 @@ sched_extend_luids (void)
{
int new_luids_max_uid = get_max_uid () + 1;
- VEC_safe_grow_cleared (int, heap, sched_luids, new_luids_max_uid);
+ sched_luids.safe_grow_cleared (new_luids_max_uid);
}
/* Initialize LUID for INSN. */
@@ -8318,7 +8314,7 @@ sched_init_luids (bb_vec_t bbs)
basic_block bb;
sched_extend_luids ();
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
{
rtx insn;
@@ -8331,7 +8327,7 @@ sched_init_luids (bb_vec_t bbs)
void
sched_finish_luids (void)
{
- VEC_free (int, heap, sched_luids);
+ sched_luids.release ();
sched_max_luid = 1;
}
@@ -8355,13 +8351,11 @@ sched_extend_target (void)
static void
extend_h_i_d (void)
{
- int reserve = (get_max_uid () + 1
- - VEC_length (haifa_insn_data_def, h_i_d));
+ int reserve = (get_max_uid () + 1 - h_i_d.length ());
if (reserve > 0
- && ! VEC_space (haifa_insn_data_def, h_i_d, reserve))
+ && ! h_i_d.space (reserve))
{
- VEC_safe_grow_cleared (haifa_insn_data_def, heap, h_i_d,
- 3 * get_max_uid () / 2);
+ h_i_d.safe_grow_cleared (3 * get_max_uid () / 2);
sched_extend_target ();
}
}
@@ -8390,7 +8384,7 @@ haifa_init_h_i_d (bb_vec_t bbs)
basic_block bb;
extend_h_i_d ();
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
{
rtx insn;
@@ -8407,7 +8401,7 @@ haifa_finish_h_i_d (void)
haifa_insn_data_t data;
struct reg_use_data *use, *next;
- FOR_EACH_VEC_ELT (haifa_insn_data_def, h_i_d, i, data)
+ FOR_EACH_VEC_ELT (h_i_d, i, data)
{
free (data->max_reg_pressure);
free (data->reg_pressure);
@@ -8417,7 +8411,7 @@ haifa_finish_h_i_d (void)
free (use);
}
}
- VEC_free (haifa_insn_data_def, heap, h_i_d);
+ h_i_d.release ();
}
/* Init data for the new insn INSN. */
@@ -8491,7 +8485,7 @@ sched_emit_insn (rtx pat)
current_sched_info->add_remove_insn (insn, 0);
(*current_sched_info->begin_schedule_ready) (insn);
- VEC_safe_push (rtx, heap, scheduled_insns, insn);
+ scheduled_insns.safe_push (insn);
last_scheduled_insn = insn;
return insn;
diff --git a/gcc/hw-doloop.c b/gcc/hw-doloop.c
index cd9b3f28b73..c4681ad7524 100644
--- a/gcc/hw-doloop.c
+++ b/gcc/hw-doloop.c
@@ -58,12 +58,12 @@ dump_hwloops (hwloop_info loops)
loop->depth, REGNO (loop->iter_reg));
fprintf (dump_file, " blocks: [ ");
- for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, b); ix++)
+ for (ix = 0; loop->blocks.iterate (ix, &b); ix++)
fprintf (dump_file, "%d ", b->index);
fprintf (dump_file, "] ");
fprintf (dump_file, " inner loops: [ ");
- for (ix = 0; VEC_iterate (hwloop_info, loop->loops, ix, i); ix++)
+ for (ix = 0; loop->loops.iterate (ix, &i); ix++)
fprintf (dump_file, "%d ", i->loop_no);
fprintf (dump_file, "]\n");
}
@@ -92,7 +92,7 @@ scan_loop (hwloop_info loop)
REGNO (loop->iter_reg)))
loop->iter_reg_used_outside = true;
- for (ix = 0; VEC_iterate (basic_block, loop->blocks, ix, bb); ix++)
+ for (ix = 0; loop->blocks.iterate (ix, &bb); ix++)
{
rtx insn;
edge e;
@@ -211,11 +211,11 @@ add_forwarder_blocks (hwloop_info loop)
fprintf (dump_file,
";; Adding forwarder block %d to loop %d and retrying\n",
e->src->index, loop->loop_no);
- VEC_safe_push (basic_block, heap, loop->blocks, e->src);
+ loop->blocks.safe_push (e->src);
bitmap_set_bit (loop->block_bitmap, e->src->index);
FOR_EACH_EDGE (e2, ei2, e->src->preds)
- VEC_safe_push (edge, gc, loop->incoming, e2);
- VEC_unordered_remove (edge, loop->incoming, ei.index);
+ vec_safe_push (loop->incoming, e2);
+ loop->incoming->unordered_remove (ei.index);
return true;
}
}
@@ -237,12 +237,12 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
bool found_tail;
unsigned dwork = 0;
basic_block bb;
- VEC (basic_block,heap) *works;
+ vec<basic_block> works;
loop->tail = tail_bb;
loop->loop_end = tail_insn;
loop->iter_reg = reg;
- loop->incoming = VEC_alloc (edge, gc, 2);
+ vec_alloc (loop->incoming, 2);
loop->start_label = JUMP_LABEL (tail_insn);
if (EDGE_COUNT (tail_bb->succs) != 2)
@@ -253,11 +253,11 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
loop->head = BRANCH_EDGE (tail_bb)->dest;
loop->successor = FALLTHRU_EDGE (tail_bb)->dest;
- works = VEC_alloc (basic_block, heap, 20);
- VEC_safe_push (basic_block, heap, works, loop->head);
+ works.create (20);
+ works.safe_push (loop->head);
found_tail = false;
- for (dwork = 0; VEC_iterate (basic_block, works, dwork, bb); dwork++)
+ for (dwork = 0; works.iterate (dwork, &bb); dwork++)
{
edge e;
edge_iterator ei;
@@ -277,7 +277,7 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
/* We've not seen this block before. Add it to the loop's
list and then add each successor to the work list. */
- VEC_safe_push (basic_block, heap, loop->blocks, bb);
+ loop->blocks.safe_push (bb);
bitmap_set_bit (loop->block_bitmap, bb->index);
if (bb == tail_bb)
@@ -289,7 +289,7 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
basic_block succ = EDGE_SUCC (bb, ei.index)->dest;
if (REGNO_REG_SET_P (df_get_live_in (succ),
REGNO (loop->iter_reg)))
- VEC_safe_push (basic_block, heap, works, succ);
+ works.safe_push (succ);
}
}
}
@@ -300,7 +300,7 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
/* Find the predecessor, and make sure nothing else jumps into this loop. */
if (!loop->bad)
{
- FOR_EACH_VEC_ELT (basic_block, loop->blocks, dwork, bb)
+ FOR_EACH_VEC_ELT (loop->blocks, dwork, bb)
{
edge e;
edge_iterator ei;
@@ -314,7 +314,7 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
fprintf (dump_file, ";; Loop %d: incoming edge %d -> %d\n",
loop->loop_no, pred->index,
e->dest->index);
- VEC_safe_push (edge, gc, loop->incoming, e);
+ vec_safe_push (loop->incoming, e);
}
}
}
@@ -341,7 +341,7 @@ discover_loop (hwloop_info loop, basic_block tail_bb, rtx tail_insn, rtx reg)
}
}
- VEC_free (basic_block, heap, works);
+ works.release ();
}
/* Analyze the structure of the loops in the current function. Use
@@ -405,7 +405,7 @@ discover_loops (bitmap_obstack *loop_stack, struct hw_doloop_hooks *hooks)
loop->next = loops;
loops = loop;
loop->loop_no = nloops++;
- loop->blocks = VEC_alloc (basic_block, heap, 20);
+ loop->blocks.create (20);
loop->block_bitmap = BITMAP_ALLOC (loop_stack);
if (dump_file)
@@ -437,10 +437,10 @@ discover_loops (bitmap_obstack *loop_stack, struct hw_doloop_hooks *hooks)
continue;
if (!bitmap_intersect_compl_p (other->block_bitmap,
loop->block_bitmap))
- VEC_safe_push (hwloop_info, heap, loop->loops, other);
+ loop->loops.safe_push (other);
else if (!bitmap_intersect_compl_p (loop->block_bitmap,
other->block_bitmap))
- VEC_safe_push (hwloop_info, heap, other->loops, loop);
+ other->loops.safe_push (loop);
else
{
if (dump_file)
@@ -466,8 +466,8 @@ free_loops (hwloop_info loops)
{
hwloop_info loop = loops;
loops = loop->next;
- VEC_free (hwloop_info, heap, loop->loops);
- VEC_free (basic_block, heap, loop->blocks);
+ loop->loops.release ();
+ loop->blocks.release ();
BITMAP_FREE (loop->block_bitmap);
XDELETE (loop);
}
@@ -580,7 +580,7 @@ optimize_loop (hwloop_info loop, struct hw_doloop_hooks *hooks)
a depth-first search here and never visit a loop more than once.
Recursion depth is effectively limited by the number of available
hardware registers. */
- for (ix = 0; VEC_iterate (hwloop_info, loop->loops, ix, inner); ix++)
+ for (ix = 0; loop->loops.iterate (ix, &inner); ix++)
{
optimize_loop (inner, hooks);
diff --git a/gcc/hw-doloop.h b/gcc/hw-doloop.h
index 006b67953b2..59695405c48 100644
--- a/gcc/hw-doloop.h
+++ b/gcc/hw-doloop.h
@@ -20,8 +20,6 @@ along with GCC; see the file COPYING3. If not see
/* We need to keep a vector of loops */
typedef struct hwloop_info_d *hwloop_info;
-DEF_VEC_P (hwloop_info);
-DEF_VEC_ALLOC_P (hwloop_info,heap);
/* Information about a loop we have found (or are in the process of
finding). */
@@ -35,17 +33,17 @@ struct GTY (()) hwloop_info_d
/* Vector of blocks only within the loop, including those within
inner loops. */
- VEC (basic_block, heap) *blocks;
+ vec<basic_block> blocks;
/* Same information in a bitmap. */
bitmap block_bitmap;
/* Vector of inner loops within this loop. Includes loops of every
nesting level. */
- VEC (hwloop_info, heap) *loops;
+ vec<hwloop_info> loops;
/* All edges that jump into the loop. */
- VEC(edge, gc) *incoming;
+ vec<edge, va_gc> *incoming;
/* The ports currently using this infrastructure can typically
handle two cases: all incoming edges have the same destination
diff --git a/gcc/ifcvt.c b/gcc/ifcvt.c
index 2365e74a41a..f9ddfd6b8d3 100644
--- a/gcc/ifcvt.c
+++ b/gcc/ifcvt.c
@@ -44,7 +44,6 @@
#include "df.h"
#include "vec.h"
#include "pointer-set.h"
-#include "vecprim.h"
#include "dbgcnt.h"
#ifndef HAVE_conditional_move
@@ -2695,7 +2694,7 @@ noce_process_if_block (struct noce_if_info *if_info)
static int
check_cond_move_block (basic_block bb,
struct pointer_map_t *vals,
- VEC (rtx, heap) **regs,
+ vec<rtx> *regs,
rtx cond)
{
rtx insn;
@@ -2760,7 +2759,7 @@ check_cond_move_block (basic_block bb,
slot = pointer_map_insert (vals, (void *) dest);
*slot = (void *) src;
- VEC_safe_push (rtx, heap, *regs, dest);
+ regs->safe_push (dest);
}
return TRUE;
@@ -2851,8 +2850,8 @@ cond_move_process_if_block (struct noce_if_info *if_info)
int c;
struct pointer_map_t *then_vals;
struct pointer_map_t *else_vals;
- VEC (rtx, heap) *then_regs = NULL;
- VEC (rtx, heap) *else_regs = NULL;
+ vec<rtx> then_regs = vec<rtx>();
+ vec<rtx> else_regs = vec<rtx>();
unsigned int i;
int success_p = FALSE;
@@ -2874,7 +2873,7 @@ cond_move_process_if_block (struct noce_if_info *if_info)
source register does not change after the assignment. Also count
the number of registers set in only one of the blocks. */
c = 0;
- FOR_EACH_VEC_ELT (rtx, then_regs, i, reg)
+ FOR_EACH_VEC_ELT (then_regs, i, reg)
{
void **then_slot = pointer_map_contains (then_vals, reg);
void **else_slot = pointer_map_contains (else_vals, reg);
@@ -2893,7 +2892,7 @@ cond_move_process_if_block (struct noce_if_info *if_info)
}
/* Finish off c for MAX_CONDITIONAL_EXECUTE. */
- FOR_EACH_VEC_ELT (rtx, else_regs, i, reg)
+ FOR_EACH_VEC_ELT (else_regs, i, reg)
{
gcc_checking_assert (pointer_map_contains (else_vals, reg));
if (!pointer_map_contains (then_vals, reg))
@@ -2957,8 +2956,8 @@ cond_move_process_if_block (struct noce_if_info *if_info)
done:
pointer_map_destroy (then_vals);
pointer_map_destroy (else_vals);
- VEC_free (rtx, heap, then_regs);
- VEC_free (rtx, heap, else_regs);
+ then_regs.release ();
+ else_regs.release ();
return success_p;
}
diff --git a/gcc/insn-addr.h b/gcc/insn-addr.h
index 0cf292cb10d..1ef9a93749a 100644
--- a/gcc/insn-addr.h
+++ b/gcc/insn-addr.h
@@ -20,24 +20,22 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_INSN_ADDR_H
#define GCC_INSN_ADDR_H
-#include "vecprim.h"
-
-extern VEC(int,heap) *insn_addresses_;
+extern vec<int> insn_addresses_;
extern int insn_current_address;
-#define INSN_ADDRESSES(id) (*&(VEC_address (int, insn_addresses_) [id]))
+#define INSN_ADDRESSES(id) (insn_addresses_[id])
#define INSN_ADDRESSES_ALLOC(size) \
do \
{ \
- insn_addresses_ = VEC_alloc (int, heap, size); \
- VEC_safe_grow (int, heap, insn_addresses_, size); \
- memset (VEC_address (int, insn_addresses_), \
+ insn_addresses_.create (size); \
+ insn_addresses_.safe_grow_cleared (size); \
+ memset (insn_addresses_.address (), \
0, sizeof (int) * size); \
} \
while (0)
-#define INSN_ADDRESSES_FREE() (VEC_free (int, heap, insn_addresses_))
-#define INSN_ADDRESSES_SET_P() (insn_addresses_ != 0)
-#define INSN_ADDRESSES_SIZE() (VEC_length (int, insn_addresses_))
+#define INSN_ADDRESSES_FREE() (insn_addresses_.release ())
+#define INSN_ADDRESSES_SET_P() (insn_addresses_.exists ())
+#define INSN_ADDRESSES_SIZE() (insn_addresses_.length ())
static inline void
insn_addresses_new (rtx insn, int insn_addr)
@@ -50,8 +48,8 @@ insn_addresses_new (rtx insn, int insn_addr)
if (size <= insn_uid)
{
int *p;
- VEC_safe_grow (int, heap, insn_addresses_, insn_uid + 1);
- p = VEC_address (int, insn_addresses_);
+ insn_addresses_.safe_grow (insn_uid + 1);
+ p = insn_addresses_.address ();
memset (&p[size],
0, sizeof (int) * (insn_uid + 1 - size));
}
diff --git a/gcc/ipa-cp.c b/gcc/ipa-cp.c
index 82b0b53ce25..57382b5a338 100644
--- a/gcc/ipa-cp.c
+++ b/gcc/ipa-cp.c
@@ -830,7 +830,7 @@ ipa_value_from_jfunc (struct ipa_node_params *info, struct ipa_jump_func *jfunc)
idx = ipa_get_jf_ancestor_formal_id (jfunc);
if (info->ipcp_orig_node)
- input = VEC_index (tree, info->known_vals, idx);
+ input = info->known_vals[idx];
else
{
struct ipcp_lattice *lat;
@@ -1390,7 +1390,7 @@ propagate_aggs_accross_jump_function (struct cgraph_edge *cs,
if (set_check_aggs_by_ref (dest_plats, jfunc->agg.by_ref))
return true;
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jfunc->agg.items, i, item)
+ FOR_EACH_VEC_ELT (*jfunc->agg.items, i, item)
{
HOST_WIDE_INT val_size;
@@ -1483,9 +1483,9 @@ propagate_constants_accross_call (struct cgraph_edge *cs)
tree
ipa_get_indirect_edge_target (struct cgraph_edge *ie,
- VEC (tree, heap) *known_vals,
- VEC (tree, heap) *known_binfos,
- VEC (ipa_agg_jump_function_p, heap) *known_aggs)
+ vec<tree> known_vals,
+ vec<tree> known_binfos,
+ vec<ipa_agg_jump_function_p> known_aggs)
{
int param_index = ie->indirect_info->param_index;
HOST_WIDE_INT token, anc_offset;
@@ -1501,12 +1501,11 @@ ipa_get_indirect_edge_target (struct cgraph_edge *ie,
if (ie->indirect_info->agg_contents)
{
- if (VEC_length (ipa_agg_jump_function_p, known_aggs)
+ if (known_aggs.length ()
> (unsigned int) param_index)
{
struct ipa_agg_jump_function *agg;
- agg = VEC_index (ipa_agg_jump_function_p, known_aggs,
- param_index);
+ agg = known_aggs[param_index];
t = ipa_find_agg_cst_for_param (agg, ie->indirect_info->offset,
ie->indirect_info->by_ref);
}
@@ -1514,8 +1513,8 @@ ipa_get_indirect_edge_target (struct cgraph_edge *ie,
t = NULL;
}
else
- t = (VEC_length (tree, known_vals) > (unsigned int) param_index
- ? VEC_index (tree, known_vals, param_index) : NULL);
+ t = (known_vals.length () > (unsigned int) param_index
+ ? known_vals[param_index] : NULL);
if (t &&
TREE_CODE (t) == ADDR_EXPR
@@ -1530,10 +1529,9 @@ ipa_get_indirect_edge_target (struct cgraph_edge *ie,
anc_offset = ie->indirect_info->offset;
otr_type = ie->indirect_info->otr_type;
- t = VEC_index (tree, known_vals, param_index);
- if (!t && known_binfos
- && VEC_length (tree, known_binfos) > (unsigned int) param_index)
- t = VEC_index (tree, known_binfos, param_index);
+ t = known_vals[param_index];
+ if (!t && known_binfos.length () > (unsigned int) param_index)
+ t = known_binfos[param_index];
if (!t)
return NULL_TREE;
@@ -1564,8 +1562,8 @@ ipa_get_indirect_edge_target (struct cgraph_edge *ie,
static int
devirtualization_time_bonus (struct cgraph_node *node,
- VEC (tree, heap) *known_csts,
- VEC (tree, heap) *known_binfos)
+ vec<tree> known_csts,
+ vec<tree> known_binfos)
{
struct cgraph_edge *ie;
int res = 0;
@@ -1577,7 +1575,7 @@ devirtualization_time_bonus (struct cgraph_node *node,
tree target;
target = ipa_get_indirect_edge_target (ie, known_csts, known_binfos,
- NULL);
+ vec<ipa_agg_jump_function_p>());
if (!target)
continue;
@@ -1664,10 +1662,10 @@ good_cloning_opportunity_p (struct cgraph_node *node, int time_benefit,
/* Return all context independent values from aggregate lattices in PLATS in a
vector. Return NULL if there are none. */
-static VEC (ipa_agg_jf_item_t, gc) *
+static vec<ipa_agg_jf_item_t, va_gc> *
context_independent_aggregate_values (struct ipcp_param_lattices *plats)
{
- VEC (ipa_agg_jf_item_t, gc) *res = NULL;
+ vec<ipa_agg_jf_item_t, va_gc> *res = NULL;
if (plats->aggs_bottom
|| plats->aggs_contain_variable
@@ -1682,7 +1680,7 @@ context_independent_aggregate_values (struct ipcp_param_lattices *plats)
struct ipa_agg_jf_item item;
item.offset = aglat->offset;
item.value = aglat->values->value;
- VEC_safe_push (ipa_agg_jf_item_t, gc, res, item);
+ vec_safe_push (res, item);
}
return res;
}
@@ -1694,22 +1692,22 @@ context_independent_aggregate_values (struct ipcp_param_lattices *plats)
static bool
gather_context_independent_values (struct ipa_node_params *info,
- VEC (tree, heap) **known_csts,
- VEC (tree, heap) **known_binfos,
- VEC (ipa_agg_jump_function_t, heap) **known_aggs,
+ vec<tree> *known_csts,
+ vec<tree> *known_binfos,
+ vec<ipa_agg_jump_function_t> *known_aggs,
int *removable_params_cost)
{
int i, count = ipa_get_param_count (info);
bool ret = false;
- *known_csts = NULL;
- *known_binfos = NULL;
- VEC_safe_grow_cleared (tree, heap, *known_csts, count);
- VEC_safe_grow_cleared (tree, heap, *known_binfos, count);
+ known_csts->create (0);
+ known_binfos->create (0);
+ known_csts->safe_grow_cleared (count);
+ known_binfos->safe_grow_cleared (count);
if (known_aggs)
{
- *known_aggs = NULL;
- VEC_safe_grow_cleared (ipa_agg_jump_function_t, heap, *known_aggs, count);
+ known_aggs->create (0);
+ known_aggs->safe_grow_cleared (count);
}
if (removable_params_cost)
@@ -1725,7 +1723,7 @@ gather_context_independent_values (struct ipa_node_params *info,
struct ipcp_value *val = lat->values;
if (TREE_CODE (val->value) != TREE_BINFO)
{
- VEC_replace (tree, *known_csts, i, val->value);
+ (*known_csts)[i] = val->value;
if (removable_params_cost)
*removable_params_cost
+= estimate_move_cost (TREE_TYPE (val->value));
@@ -1733,7 +1731,7 @@ gather_context_independent_values (struct ipa_node_params *info,
}
else if (plats->virt_call)
{
- VEC_replace (tree, *known_binfos, i, val->value);
+ (*known_binfos)[i] = val->value;
ret = true;
}
else if (removable_params_cost
@@ -1748,11 +1746,11 @@ gather_context_independent_values (struct ipa_node_params *info,
if (known_aggs)
{
- VEC (ipa_agg_jf_item_t, gc) *agg_items;
+ vec<ipa_agg_jf_item_t, va_gc> *agg_items;
struct ipa_agg_jump_function *ajf;
agg_items = context_independent_aggregate_values (plats);
- ajf = &VEC_index (ipa_agg_jump_function_t, *known_aggs, i);
+ ajf = &(*known_aggs)[i];
ajf->items = agg_items;
ajf->by_ref = plats->aggs_by_ref;
ret |= agg_items != NULL;
@@ -1769,17 +1767,16 @@ gather_context_independent_values (struct ipa_node_params *info,
I'd like to discuss how to change it first and this demonstrates the
issue. */
-static VEC (ipa_agg_jump_function_p, heap) *
-agg_jmp_p_vec_for_t_vec (VEC (ipa_agg_jump_function_t, heap) *known_aggs)
+static vec<ipa_agg_jump_function_p>
+agg_jmp_p_vec_for_t_vec (vec<ipa_agg_jump_function_t> known_aggs)
{
- VEC (ipa_agg_jump_function_p, heap) *ret;
+ vec<ipa_agg_jump_function_p> ret;
struct ipa_agg_jump_function *ajf;
int i;
- ret = VEC_alloc (ipa_agg_jump_function_p, heap,
- VEC_length (ipa_agg_jump_function_t, known_aggs));
- FOR_EACH_VEC_ELT (ipa_agg_jump_function_t, known_aggs, i, ajf)
- VEC_quick_push (ipa_agg_jump_function_p, ret, ajf);
+ ret.create (known_aggs.length ());
+ FOR_EACH_VEC_ELT (known_aggs, i, ajf)
+ ret.quick_push (ajf);
return ret;
}
@@ -1791,9 +1788,9 @@ estimate_local_effects (struct cgraph_node *node)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
int i, count = ipa_get_param_count (info);
- VEC (tree, heap) *known_csts, *known_binfos;
- VEC (ipa_agg_jump_function_t, heap) *known_aggs;
- VEC (ipa_agg_jump_function_p, heap) *known_aggs_ptrs;
+ vec<tree> known_csts, known_binfos;
+ vec<ipa_agg_jump_function_t> known_aggs;
+ vec<ipa_agg_jump_function_p> known_aggs_ptrs;
bool always_const;
int base_time = inline_summary (node)->time;
int removable_params_cost;
@@ -1868,8 +1865,8 @@ estimate_local_effects (struct cgraph_node *node)
if (lat->bottom
|| !lat->values
- || VEC_index (tree, known_csts, i)
- || VEC_index (tree, known_binfos, i))
+ || known_csts[i]
+ || known_binfos[i])
continue;
for (val = lat->values; val; val = val->next)
@@ -1879,14 +1876,14 @@ estimate_local_effects (struct cgraph_node *node)
if (TREE_CODE (val->value) != TREE_BINFO)
{
- VEC_replace (tree, known_csts, i, val->value);
- VEC_replace (tree, known_binfos, i, NULL_TREE);
+ known_csts[i] = val->value;
+ known_binfos[i] = NULL_TREE;
emc = estimate_move_cost (TREE_TYPE (val->value));
}
else if (plats->virt_call)
{
- VEC_replace (tree, known_csts, i, NULL_TREE);
- VEC_replace (tree, known_binfos, i, val->value);
+ known_csts[i] = NULL_TREE;
+ known_binfos[i] = val->value;
emc = 0;
}
else
@@ -1921,8 +1918,8 @@ estimate_local_effects (struct cgraph_node *node)
val->local_time_benefit = time_benefit;
val->local_size_cost = size;
}
- VEC_replace (tree, known_binfos, i, NULL_TREE);
- VEC_replace (tree, known_csts, i, NULL_TREE);
+ known_binfos[i] = NULL_TREE;
+ known_csts[i] = NULL_TREE;
}
for (i = 0; i < count ; i++)
@@ -1934,7 +1931,7 @@ estimate_local_effects (struct cgraph_node *node)
if (plats->aggs_bottom || !plats->aggs)
continue;
- ajf = &VEC_index (ipa_agg_jump_function_t, known_aggs, i);
+ ajf = &known_aggs[i];
for (aglat = plats->aggs; aglat; aglat = aglat->next)
{
struct ipcp_value *val;
@@ -1952,7 +1949,7 @@ estimate_local_effects (struct cgraph_node *node)
item.offset = aglat->offset;
item.value = val->value;
- VEC_safe_push (ipa_agg_jf_item_t, gc, ajf->items, item);
+ vec_safe_push (ajf->items, item);
estimate_ipcp_clone_size_and_time (node, known_csts, known_binfos,
known_aggs_ptrs, &size, &time,
@@ -1978,22 +1975,18 @@ estimate_local_effects (struct cgraph_node *node)
val->local_time_benefit = time_benefit;
val->local_size_cost = size;
- VEC_pop (ipa_agg_jf_item_t, ajf->items);
+ ajf->items->pop ();
}
}
}
for (i = 0; i < count ; i++)
- {
- VEC_free (ipa_agg_jf_item_t, gc,
- VEC_index (ipa_agg_jump_function_t, known_aggs, i).items);
- VEC_index (ipa_agg_jump_function_t, known_aggs, i).items = NULL;
- }
+ vec_free (known_aggs[i].items);
- VEC_free (tree, heap, known_csts);
- VEC_free (tree, heap, known_binfos);
- VEC_free (ipa_agg_jump_function_t, heap, known_aggs);
- VEC_free (ipa_agg_jump_function_p, heap, known_aggs_ptrs);
+ known_csts.release ();
+ known_binfos.release ();
+ known_aggs.release ();
+ known_aggs_ptrs.release ();
}
@@ -2245,7 +2238,7 @@ ipcp_propagate_stage (struct topo_info *topo)
static void
ipcp_discover_new_direct_edges (struct cgraph_node *node,
- VEC (tree, heap) *known_vals)
+ vec<tree> known_vals)
{
struct cgraph_edge *ie, *next_ie;
bool found = false;
@@ -2255,7 +2248,9 @@ ipcp_discover_new_direct_edges (struct cgraph_node *node,
tree target;
next_ie = ie->next_callee;
- target = ipa_get_indirect_edge_target (ie, known_vals, NULL, NULL);
+ target = ipa_get_indirect_edge_target (ie, known_vals,
+ vec<tree>(),
+ vec<ipa_agg_jump_function_p>());
if (target)
{
ipa_make_edge_direct_to_target (ie, target);
@@ -2270,15 +2265,14 @@ ipcp_discover_new_direct_edges (struct cgraph_node *node,
/* Vector of pointers which for linked lists of clones of an original crgaph
edge. */
-static VEC (cgraph_edge_p, heap) *next_edge_clone;
+static vec<cgraph_edge_p> next_edge_clone;
static inline void
grow_next_edge_clone_vector (void)
{
- if (VEC_length (cgraph_edge_p, next_edge_clone)
+ if (next_edge_clone.length ()
<= (unsigned) cgraph_edge_max_uid)
- VEC_safe_grow_cleared (cgraph_edge_p, heap, next_edge_clone,
- cgraph_edge_max_uid + 1);
+ next_edge_clone.safe_grow_cleared (cgraph_edge_max_uid + 1);
}
/* Edge duplication hook to grow the appropriate linked list in
@@ -2289,9 +2283,8 @@ ipcp_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
__attribute__((unused)) void *data)
{
grow_next_edge_clone_vector ();
- VEC_replace (cgraph_edge_p, next_edge_clone, dst->uid,
- VEC_index (cgraph_edge_p, next_edge_clone, src->uid));
- VEC_replace (cgraph_edge_p, next_edge_clone, src->uid, dst);
+ next_edge_clone[dst->uid] = next_edge_clone[src->uid];
+ next_edge_clone[src->uid] = dst;
}
/* See if NODE is a clone with a known aggregate value at a given OFFSET of a
@@ -2332,7 +2325,7 @@ cgraph_edge_brings_value_p (struct cgraph_edge *cs,
{
tree t;
if (src->offset == -1)
- t = VEC_index (tree, caller_info->known_vals, src->index);
+ t = caller_info->known_vals[src->index];
else
t = get_clone_agg_value (cs->caller, src->offset, src->index);
return (t != NULL_TREE
@@ -2366,7 +2359,7 @@ cgraph_edge_brings_value_p (struct cgraph_edge *cs,
static inline struct cgraph_edge *
get_next_cgraph_edge_clone (struct cgraph_edge *cs)
{
- return VEC_index (cgraph_edge_p, next_edge_clone, cs->uid);
+ return next_edge_clone[cs->uid];
}
/* Given VAL, iterate over all its sources and if they still hold, add their
@@ -2407,20 +2400,20 @@ get_info_about_necessary_edges (struct ipcp_value *val, int *freq_sum,
/* Return a vector of incoming edges that do bring value VAL. It is assumed
their number is known and equal to CALLER_COUNT. */
-static VEC (cgraph_edge_p,heap) *
+static vec<cgraph_edge_p>
gather_edges_for_value (struct ipcp_value *val, int caller_count)
{
struct ipcp_value_source *src;
- VEC (cgraph_edge_p,heap) *ret;
+ vec<cgraph_edge_p> ret;
- ret = VEC_alloc (cgraph_edge_p, heap, caller_count);
+ ret.create (caller_count);
for (src = val->sources; src; src = src->next)
{
struct cgraph_edge *cs = src->cs;
while (cs)
{
if (cgraph_edge_brings_value_p (cs, src))
- VEC_quick_push (cgraph_edge_p, ret, cs);
+ ret.quick_push (cs);
cs = get_next_cgraph_edge_clone (cs);
}
}
@@ -2604,12 +2597,12 @@ update_specialized_profile (struct cgraph_node *new_node,
static struct cgraph_node *
create_specialized_node (struct cgraph_node *node,
- VEC (tree, heap) *known_vals,
+ vec<tree> known_vals,
struct ipa_agg_replacement_value *aggvals,
- VEC (cgraph_edge_p,heap) *callers)
+ vec<cgraph_edge_p> callers)
{
struct ipa_node_params *new_info, *info = IPA_NODE_REF (node);
- VEC (ipa_replace_map_p,gc)* replace_trees = NULL;
+ vec<ipa_replace_map_p, va_gc> *replace_trees = NULL;
struct cgraph_node *new_node;
int i, count = ipa_get_param_count (info);
bitmap args_to_skip;
@@ -2621,7 +2614,7 @@ create_specialized_node (struct cgraph_node *node,
args_to_skip = BITMAP_GGC_ALLOC ();
for (i = 0; i < count; i++)
{
- tree t = VEC_index (tree, known_vals, i);
+ tree t = known_vals[i];
if ((t && TREE_CODE (t) != TREE_BINFO)
|| !ipa_is_param_used (info, i))
@@ -2637,14 +2630,14 @@ create_specialized_node (struct cgraph_node *node,
for (i = 0; i < count ; i++)
{
- tree t = VEC_index (tree, known_vals, i);
+ tree t = known_vals[i];
if (t && TREE_CODE (t) != TREE_BINFO)
{
struct ipa_replace_map *replace_map;
replace_map = get_replacement_map (t, ipa_get_param (info, i));
if (replace_map)
- VEC_safe_push (ipa_replace_map_p, gc, replace_trees, replace_map);
+ vec_safe_push (replace_trees, replace_map);
}
}
@@ -2658,9 +2651,8 @@ create_specialized_node (struct cgraph_node *node,
if (aggvals)
ipa_dump_agg_replacement_values (dump_file, aggvals);
}
- gcc_checking_assert (ipa_node_params_vector
- && (VEC_length (ipa_node_params_t,
- ipa_node_params_vector)
+ gcc_checking_assert (ipa_node_params_vector.exists ()
+ && (ipa_node_params_vector.length ()
> (unsigned) cgraph_max_uid));
update_profiling_info (node, new_node);
new_info = IPA_NODE_REF (new_node);
@@ -2669,7 +2661,7 @@ create_specialized_node (struct cgraph_node *node,
ipcp_discover_new_direct_edges (new_node, known_vals);
- VEC_free (cgraph_edge_p, heap, callers);
+ callers.release ();
return new_node;
}
@@ -2679,8 +2671,8 @@ create_specialized_node (struct cgraph_node *node,
static void
find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
- VEC (tree, heap) *known_vals,
- VEC (cgraph_edge_p,heap) *callers)
+ vec<tree> known_vals,
+ vec<cgraph_edge_p> callers)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
int i, count = ipa_get_param_count (info);
@@ -2691,11 +2683,10 @@ find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
tree newval = NULL_TREE;
int j;
- if (ipa_get_scalar_lat (info, i)->bottom
- || VEC_index (tree, known_vals, i))
+ if (ipa_get_scalar_lat (info, i)->bottom || known_vals[i])
continue;
- FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
+ FOR_EACH_VEC_ELT (callers, j, cs)
{
struct ipa_jump_func *jump_func;
tree t;
@@ -2729,7 +2720,7 @@ find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
fprintf (dump_file, "\n");
}
- VEC_replace (tree, known_vals, i, newval);
+ known_vals[i] = newval;
}
}
}
@@ -2737,13 +2728,13 @@ find_more_scalar_values_for_callers_subset (struct cgraph_node *node,
/* Go through PLATS and create a vector of values consisting of values and
offsets (minus OFFSET) of lattices that contain only a single value. */
-static VEC (ipa_agg_jf_item_t, heap) *
+static vec<ipa_agg_jf_item_t>
copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
{
- VEC (ipa_agg_jf_item_t, heap) *res = NULL;
+ vec<ipa_agg_jf_item_t> res = vec<ipa_agg_jf_item_t>();
if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
- return NULL;
+ return vec<ipa_agg_jf_item>();
for (struct ipcp_agg_lattice *aglat = plats->aggs; aglat; aglat = aglat->next)
if (ipa_lat_is_single_const (aglat))
@@ -2751,7 +2742,7 @@ copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
struct ipa_agg_jf_item ti;
ti.offset = aglat->offset - offset;
ti.value = aglat->values->value;
- VEC_safe_push (ipa_agg_jf_item_t, heap, res, ti);
+ res.safe_push (ti);
}
return res;
}
@@ -2761,7 +2752,7 @@ copy_plats_to_inter (struct ipcp_param_lattices *plats, HOST_WIDE_INT offset)
static void
intersect_with_plats (struct ipcp_param_lattices *plats,
- VEC (ipa_agg_jf_item_t, heap) **inter,
+ vec<ipa_agg_jf_item_t> *inter,
HOST_WIDE_INT offset)
{
struct ipcp_agg_lattice *aglat;
@@ -2770,13 +2761,12 @@ intersect_with_plats (struct ipcp_param_lattices *plats,
if (!plats->aggs || plats->aggs_contain_variable || plats->aggs_bottom)
{
- VEC_free (ipa_agg_jf_item_t, heap, *inter);
- *inter = NULL;
+ inter->release ();
return;
}
aglat = plats->aggs;
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, *inter, k, item)
+ FOR_EACH_VEC_ELT (*inter, k, item)
{
bool found = false;
if (!item->value)
@@ -2802,11 +2792,11 @@ intersect_with_plats (struct ipcp_param_lattices *plats,
/* Copy agggregate replacement values of NODE (which is an IPA-CP clone) to the
vector result while subtracting OFFSET from the individual value offsets. */
-static VEC (ipa_agg_jf_item_t, heap) *
+static vec<ipa_agg_jf_item_t>
agg_replacements_to_vector (struct cgraph_node *node, HOST_WIDE_INT offset)
{
struct ipa_agg_replacement_value *av;
- VEC (ipa_agg_jf_item_t, heap) *res = NULL;
+ vec<ipa_agg_jf_item_t> res = vec<ipa_agg_jf_item_t>();
for (av = ipa_get_agg_replacements_for_node (node); av; av = av->next)
{
@@ -2814,7 +2804,7 @@ agg_replacements_to_vector (struct cgraph_node *node, HOST_WIDE_INT offset)
gcc_checking_assert (av->value);
item.offset = av->offset - offset;
item.value = av->value;
- VEC_safe_push (ipa_agg_jf_item_t, heap, res, item);
+ res.safe_push (item);
}
return res;
@@ -2826,7 +2816,7 @@ agg_replacements_to_vector (struct cgraph_node *node, HOST_WIDE_INT offset)
static void
intersect_with_agg_replacements (struct cgraph_node *node, int index,
- VEC (ipa_agg_jf_item_t, heap) **inter,
+ vec<ipa_agg_jf_item_t> *inter,
HOST_WIDE_INT offset)
{
struct ipa_agg_replacement_value *srcvals;
@@ -2836,12 +2826,11 @@ intersect_with_agg_replacements (struct cgraph_node *node, int index,
srcvals = ipa_get_agg_replacements_for_node (node);
if (!srcvals)
{
- VEC_free (ipa_agg_jf_item_t, heap, *inter);
- *inter = NULL;
+ inter->release ();
return;
}
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, *inter, i, item)
+ FOR_EACH_VEC_ELT (*inter, i, item)
{
struct ipa_agg_replacement_value *av;
bool found = false;
@@ -2868,14 +2857,14 @@ intersect_with_agg_replacements (struct cgraph_node *node, int index,
static struct ipa_agg_replacement_value *
find_aggregate_values_for_callers_subset (struct cgraph_node *node,
- VEC (cgraph_edge_p,heap) *callers)
+ vec<cgraph_edge_p> callers)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
struct ipa_agg_replacement_value *res = NULL;
struct cgraph_edge *cs;
int i, j, count = ipa_get_param_count (info);
- FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
+ FOR_EACH_VEC_ELT (callers, j, cs)
{
int c = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
if (c < count)
@@ -2885,7 +2874,7 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
for (i = 0; i < count ; i++)
{
struct cgraph_edge *cs;
- VEC (ipa_agg_jf_item_t, heap) *inter = NULL;
+ vec<ipa_agg_jf_item_t> inter = vec<ipa_agg_jf_item_t>();
struct ipa_agg_jf_item *item;
int j;
@@ -2894,7 +2883,7 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
if (ipa_get_parm_lattices (info, i)->aggs_bottom)
continue;
- FOR_EACH_VEC_ELT (cgraph_edge_p, callers, j, cs)
+ FOR_EACH_VEC_ELT (callers, j, cs)
{
struct ipa_jump_func *jfunc;
jfunc = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
@@ -2912,7 +2901,7 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
src_idx);
if (agg_pass_through_permissible_p (orig_plats, jfunc))
{
- if (!inter)
+ if (!inter.exists ())
inter = agg_replacements_to_vector (cs->caller, 0);
else
intersect_with_agg_replacements (cs->caller, src_idx,
@@ -2928,7 +2917,7 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
/* Currently we do not produce clobber aggregate jump
functions, adjust when we do. */
gcc_checking_assert (!jfunc->agg.items);
- if (!inter)
+ if (!inter.exists ())
inter = copy_plats_to_inter (src_plats, 0);
else
intersect_with_plats (src_plats, &inter, 0);
@@ -2945,7 +2934,7 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
if (info->ipcp_orig_node)
{
- if (!inter)
+ if (!inter.exists ())
inter = agg_replacements_to_vector (cs->caller, delta);
else
intersect_with_agg_replacements (cs->caller, i, &inter,
@@ -2957,7 +2946,7 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
/* Currently we do not produce clobber aggregate jump
functions, adjust when we do. */
gcc_checking_assert (!src_plats->aggs || !jfunc->agg.items);
- if (!inter)
+ if (!inter.exists ())
inter = copy_plats_to_inter (src_plats, delta);
else
intersect_with_plats (src_plats, &inter, delta);
@@ -2967,10 +2956,11 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
{
int k;
- if (!inter)
- inter = VEC_copy (ipa_agg_jf_item, heap, jfunc->agg.items);
+ if (!inter.exists ())
+ for (unsigned i = 0; i < jfunc->agg.items->length (); i++)
+ inter.safe_push ((*jfunc->agg.items)[i]);
else
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, inter, k, item)
+ FOR_EACH_VEC_ELT (inter, k, item)
{
int l = 0;
bool found = false;;
@@ -2978,12 +2968,10 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
if (!item->value)
continue;
- while ((unsigned) l < VEC_length (ipa_agg_jf_item_t,
- jfunc->agg.items))
+ while ((unsigned) l < jfunc->agg.items->length ())
{
struct ipa_agg_jf_item *ti;
- ti = &VEC_index (ipa_agg_jf_item_t,
- jfunc->agg.items, l);
+ ti = &(*jfunc->agg.items)[l];
if (ti->offset > item->offset)
break;
if (ti->offset == item->offset)
@@ -3003,11 +2991,11 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
else
goto next_param;
- if (!inter)
+ if (!inter.exists ())
goto next_param;
}
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, inter, j, item)
+ FOR_EACH_VEC_ELT (inter, j, item)
{
struct ipa_agg_replacement_value *v;
@@ -3023,8 +3011,8 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
}
next_param:
- if (inter)
- VEC_free (ipa_agg_jf_item, heap, inter);
+ if (inter.exists ())
+ inter.release ();
}
return res;
}
@@ -3032,16 +3020,15 @@ find_aggregate_values_for_callers_subset (struct cgraph_node *node,
/* Turn KNOWN_AGGS into a list of aggreate replacement values. */
static struct ipa_agg_replacement_value *
-known_aggs_to_agg_replacement_list (VEC (ipa_agg_jump_function_t,
- heap) *known_aggs)
+known_aggs_to_agg_replacement_list (vec<ipa_agg_jump_function_t> known_aggs)
{
struct ipa_agg_replacement_value *res = NULL;
struct ipa_agg_jump_function *aggjf;
struct ipa_agg_jf_item *item;
int i, j;
- FOR_EACH_VEC_ELT (ipa_agg_jump_function_t, known_aggs, i, aggjf)
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, aggjf->items, j, item)
+ FOR_EACH_VEC_ELT (known_aggs, i, aggjf)
+ FOR_EACH_VEC_SAFE_ELT (aggjf->items, j, item)
{
struct ipa_agg_replacement_value *v;
v = ggc_alloc_ipa_agg_replacement_value ();
@@ -3074,7 +3061,7 @@ cgraph_edge_brings_all_scalars_for_node (struct cgraph_edge *cs,
struct ipa_jump_func *jump_func;
tree val, t;
- val = VEC_index (tree, dest_info->known_vals, i);
+ val = dest_info->known_vals[i];
if (!val)
continue;
@@ -3180,15 +3167,15 @@ perhaps_add_new_callers (struct cgraph_node *node, struct ipcp_value *val)
/* Copy KNOWN_BINFOS to KNOWN_VALS. */
static void
-move_binfos_to_values (VEC (tree, heap) *known_vals,
- VEC (tree, heap) *known_binfos)
+move_binfos_to_values (vec<tree> known_vals,
+ vec<tree> known_binfos)
{
tree t;
int i;
- for (i = 0; VEC_iterate (tree, known_binfos, i, t); i++)
+ for (i = 0; known_binfos.iterate (i, &t); i++)
if (t)
- VEC_replace (tree, known_vals, i, t);
+ known_vals[i] = t;
}
/* Return true if there is a replacement equivalent to VALUE, INDEX and OFFSET
@@ -3216,14 +3203,14 @@ ipcp_val_in_agg_replacements_p (struct ipa_agg_replacement_value *aggvals,
static bool
decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
- struct ipcp_value *val, VEC (tree, heap) *known_csts,
- VEC (tree, heap) *known_binfos)
+ struct ipcp_value *val, vec<tree> known_csts,
+ vec<tree> known_binfos)
{
struct ipa_agg_replacement_value *aggvals;
int freq_sum, caller_count;
gcov_type count_sum;
- VEC (cgraph_edge_p, heap) *callers;
- VEC (tree, heap) *kv;
+ vec<cgraph_edge_p> callers;
+ vec<tree> kv;
if (val->spec_node)
{
@@ -3270,10 +3257,10 @@ decide_about_value (struct cgraph_node *node, int index, HOST_WIDE_INT offset,
cgraph_node_name (node), node->uid);
callers = gather_edges_for_value (val, caller_count);
- kv = VEC_copy (tree, heap, known_csts);
+ kv = known_csts.copy ();
move_binfos_to_values (kv, known_binfos);
if (offset == -1)
- VEC_replace (tree, kv, index, val->value);
+ kv[index] = val->value;
find_more_scalar_values_for_callers_subset (node, kv, callers);
aggvals = find_aggregate_values_for_callers_subset (node, callers);
gcc_checking_assert (offset == -1
@@ -3295,8 +3282,8 @@ decide_whether_version_node (struct cgraph_node *node)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
int i, count = ipa_get_param_count (info);
- VEC (tree, heap) *known_csts, *known_binfos;
- VEC (ipa_agg_jump_function_t, heap) *known_aggs = NULL;
+ vec<tree> known_csts, known_binfos;
+ vec<ipa_agg_jump_function_t> known_aggs = vec<ipa_agg_jump_function_t>();
bool ret = false;
if (count == 0)
@@ -3317,8 +3304,8 @@ decide_whether_version_node (struct cgraph_node *node)
struct ipcp_value *val;
if (!lat->bottom
- && !VEC_index (tree, known_csts, i)
- && !VEC_index (tree, known_binfos, i))
+ && !known_csts[i]
+ && !known_binfos[i])
for (val = lat->values; val; val = val->next)
ret |= decide_about_value (node, i, -1, val, known_csts,
known_binfos);
@@ -3342,7 +3329,7 @@ decide_whether_version_node (struct cgraph_node *node)
if (info->clone_for_all_contexts)
{
- VEC (cgraph_edge_p, heap) *callers;
+ vec<cgraph_edge_p> callers;
if (dump_file)
fprintf (dump_file, " - Creating a specialized node of %s/%i "
@@ -3359,9 +3346,9 @@ decide_whether_version_node (struct cgraph_node *node)
ret = true;
}
else
- VEC_free (tree, heap, known_csts);
+ known_csts.release ();
- VEC_free (tree, heap, known_binfos);
+ known_binfos.release ();
return ret;
}
@@ -3507,7 +3494,7 @@ ipcp_driver (void)
/* Free all IPCP structures. */
free_toporder_info (&topo);
- VEC_free (cgraph_edge_p, heap, next_edge_clone);
+ next_edge_clone.release ();
cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
ipa_free_all_structures_after_ipa_cp ();
if (dump_file)
diff --git a/gcc/ipa-inline-analysis.c b/gcc/ipa-inline-analysis.c
index f7b3af13cf6..61fb48dccb0 100644
--- a/gcc/ipa-inline-analysis.c
+++ b/gcc/ipa-inline-analysis.c
@@ -132,12 +132,12 @@ static void inline_edge_duplication_hook (struct cgraph_edge *,
/* VECtor holding inline summaries.
In GGC memory because conditions might point to constant trees. */
-VEC(inline_summary_t,gc) *inline_summary_vec;
-VEC(inline_edge_summary_t,heap) *inline_edge_summary_vec;
+vec<inline_summary_t, va_gc> *inline_summary_vec;
+vec<inline_edge_summary_t> inline_edge_summary_vec;
/* Cached node/edge growths. */
-VEC(int,heap) *node_growth_cache;
-VEC(edge_growth_cache_entry,heap) *edge_growth_cache;
+vec<int> node_growth_cache;
+vec<edge_growth_cache_entry> edge_growth_cache;
/* Edge predicates goes here. */
static alloc_pool edge_predicate_pool;
@@ -247,7 +247,7 @@ add_condition (struct inline_summary *summary, int operand_num,
}
gcc_checking_assert (operand_num >= 0);
- for (i = 0; VEC_iterate (condition, summary->conds, i, c); i++)
+ for (i = 0; vec_safe_iterate (summary->conds, i, &c); i++)
{
if (c->operand_num == operand_num
&& c->code == code
@@ -266,7 +266,7 @@ add_condition (struct inline_summary *summary, int operand_num,
new_cond.agg_contents = agg_contents;
new_cond.by_ref = by_ref;
new_cond.offset = offset;
- VEC_safe_push (condition, gc, summary->conds, new_cond);
+ vec_safe_push (summary->conds, new_cond);
return single_cond_predicate (i + predicate_first_dynamic_condition);
}
@@ -333,9 +333,7 @@ add_clause (conditions conditions, struct predicate *p, clause_t clause)
condition *cc1;
if (!(clause & (1 << c1)))
continue;
- cc1 = &VEC_index (condition,
- conditions,
- c1 - predicate_first_dynamic_condition);
+ cc1 = &(*conditions)[c1 - predicate_first_dynamic_condition];
/* We have no way to represent !CHANGED and !IS_NOT_CONSTANT
and thus there is no point for looking for them. */
if (cc1->code == CHANGED
@@ -344,12 +342,8 @@ add_clause (conditions conditions, struct predicate *p, clause_t clause)
for (c2 = c1 + 1; c2 <= NUM_CONDITIONS; c2++)
if (clause & (1 << c2))
{
- condition *cc1 = &VEC_index (condition,
- conditions,
- c1 - predicate_first_dynamic_condition);
- condition *cc2 = &VEC_index (condition,
- conditions,
- c2 - predicate_first_dynamic_condition);
+ condition *cc1 = &(*conditions)[c1 - predicate_first_dynamic_condition];
+ condition *cc2 = &(*conditions)[c2 - predicate_first_dynamic_condition];
if (cc1->operand_num == cc2->operand_num
&& cc1->val == cc2->val
&& cc2->code != IS_NOT_CONSTANT
@@ -483,7 +477,7 @@ evaluate_predicate (struct predicate *p, clause_t possible_truths)
static int
predicate_probability (conditions conds,
struct predicate *p, clause_t possible_truths,
- VEC (inline_param_summary_t, heap) *inline_param_summary)
+ vec<inline_param_summary_t> inline_param_summary)
{
int i;
int combined_prob = REG_BR_PROB_BASE;
@@ -507,24 +501,19 @@ predicate_probability (conditions conds,
{
int this_prob = 0;
int i2;
- if (!inline_param_summary)
+ if (!inline_param_summary.exists ())
return REG_BR_PROB_BASE;
for (i2 = 0; i2 < NUM_CONDITIONS; i2++)
if ((p->clause[i] & possible_truths) & (1 << i2))
{
if (i2 >= predicate_first_dynamic_condition)
{
- condition *c = &VEC_index
- (condition, conds,
- i2 - predicate_first_dynamic_condition);
+ condition *c = &(*conds)[i2 - predicate_first_dynamic_condition];
if (c->code == CHANGED
&& (c->operand_num
- < (int) VEC_length (inline_param_summary_t,
- inline_param_summary)))
+ < (int) inline_param_summary.length ()))
{
- int iprob = VEC_index (inline_param_summary_t,
- inline_param_summary,
- c->operand_num).change_prob;
+ int iprob = inline_param_summary[c->operand_num].change_prob;
this_prob = MAX (this_prob, iprob);
}
else
@@ -554,8 +543,7 @@ dump_condition (FILE *f, conditions conditions, int cond)
fprintf (f, "not inlined");
else
{
- c = &VEC_index (condition, conditions,
- cond - predicate_first_dynamic_condition);
+ c = &(*conditions)[cond - predicate_first_dynamic_condition];
fprintf (f, "op%i", c->operand_num);
if (c->agg_contents)
fprintf (f, "[%soffset: " HOST_WIDE_INT_PRINT_DEC "]",
@@ -691,7 +679,7 @@ account_size_time (struct inline_summary *summary, int size, int time,
time = MAX_TIME * INLINE_TIME_SCALE;
gcc_assert (time >= 0);
- for (i = 0; VEC_iterate (size_time_entry, summary->entry, i, e); i++)
+ for (i = 0; vec_safe_iterate (summary->entry, i, &e); i++)
if (predicates_equal_p (&e->predicate, pred))
{
found = true;
@@ -701,7 +689,7 @@ account_size_time (struct inline_summary *summary, int size, int time,
{
i = 0;
found = true;
- e = &VEC_index (size_time_entry, summary->entry, 0);
+ e = &(*summary->entry)[0];
gcc_assert (!e->predicate.clause[0]);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\t\tReached limit on number of entries, ignoring the predicate.");
@@ -720,7 +708,7 @@ account_size_time (struct inline_summary *summary, int size, int time,
new_entry.size = size;
new_entry.time = time;
new_entry.predicate = *pred;
- VEC_safe_push (size_time_entry, gc, summary->entry, new_entry);
+ vec_safe_push (summary->entry, new_entry);
}
else
{
@@ -782,15 +770,15 @@ set_hint_predicate (struct predicate **p, struct predicate new_predicate)
static clause_t
evaluate_conditions_for_known_args (struct cgraph_node *node,
bool inline_p,
- VEC (tree, heap) *known_vals,
- VEC (ipa_agg_jump_function_p, heap) *known_aggs)
+ vec<tree> known_vals,
+ vec<ipa_agg_jump_function_p> known_aggs)
{
clause_t clause = inline_p ? 0 : 1 << predicate_not_inlined_condition;
struct inline_summary *info = inline_summary (node);
int i;
struct condition *c;
- for (i = 0; VEC_iterate (condition, info->conds, i, c); i++)
+ for (i = 0; vec_safe_iterate (info->conds, i, &c); i++)
{
tree val;
tree res;
@@ -799,11 +787,9 @@ evaluate_conditions_for_known_args (struct cgraph_node *node,
(especially for K&R style programs). So bound check here (we assume
known_aggs vector, if non-NULL, has the same length as
known_vals). */
- gcc_checking_assert (!known_aggs
- || (VEC_length (tree, known_vals)
- == VEC_length (ipa_agg_jump_function_p,
- known_aggs)));
- if (c->operand_num >= (int) VEC_length (tree, known_vals))
+ gcc_checking_assert (!known_aggs.exists ()
+ || (known_vals.length () == known_aggs.length ()));
+ if (c->operand_num >= (int) known_vals.length ())
{
clause |= 1 << (i + predicate_first_dynamic_condition);
continue;
@@ -815,14 +801,13 @@ evaluate_conditions_for_known_args (struct cgraph_node *node,
if (c->code == CHANGED
&& !c->by_ref
- && (VEC_index (tree, known_vals, c->operand_num)
+ && (known_vals[c->operand_num]
== error_mark_node))
continue;
- if (known_aggs)
+ if (known_aggs.exists ())
{
- agg = VEC_index (ipa_agg_jump_function_p, known_aggs,
- c->operand_num);
+ agg = known_aggs[c->operand_num];
val = ipa_find_agg_cst_for_param (agg, c->offset, c->by_ref);
}
else
@@ -830,7 +815,7 @@ evaluate_conditions_for_known_args (struct cgraph_node *node,
}
else
{
- val = VEC_index (tree, known_vals, c->operand_num);
+ val = known_vals[c->operand_num];
if (val == error_mark_node && c->code != CHANGED)
val = NULL_TREE;
}
@@ -856,26 +841,28 @@ evaluate_conditions_for_known_args (struct cgraph_node *node,
static void
evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
- clause_t *clause_ptr,
- VEC (tree, heap) **known_vals_ptr,
- VEC (tree, heap) **known_binfos_ptr,
- VEC (ipa_agg_jump_function_p, heap) **known_aggs_ptr)
+ clause_t *clause_ptr,
+ vec<tree> *known_vals_ptr,
+ vec<tree> *known_binfos_ptr,
+ vec<ipa_agg_jump_function_p> *known_aggs_ptr)
{
struct cgraph_node *callee = cgraph_function_or_thunk_node (e->callee, NULL);
struct inline_summary *info = inline_summary (callee);
- VEC (tree, heap) *known_vals = NULL;
- VEC (ipa_agg_jump_function_p, heap) *known_aggs = NULL;
+ vec<tree> known_vals = vec<tree>();
+ vec<ipa_agg_jump_function_p> known_aggs
+ = vec<ipa_agg_jump_function_p>();
if (clause_ptr)
*clause_ptr = inline_p ? 0 : 1 << predicate_not_inlined_condition;
if (known_vals_ptr)
- *known_vals_ptr = NULL;
+ known_vals_ptr->create (0);
if (known_binfos_ptr)
- *known_binfos_ptr = NULL;
+ known_binfos_ptr->create (0);
- if (ipa_node_params_vector
+ if (ipa_node_params_vector.exists ()
&& !e->call_stmt_cannot_inline_p
- && ((clause_ptr && info->conds) || known_vals_ptr || known_binfos_ptr))
+ && ((clause_ptr && info->conds)
+ || known_vals_ptr || known_binfos_ptr))
{
struct ipa_node_params *parms_info;
struct ipa_edge_args *args = IPA_EDGE_REF (e);
@@ -888,12 +875,11 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
parms_info = IPA_NODE_REF (e->caller);
if (count && (info->conds || known_vals_ptr))
- VEC_safe_grow_cleared (tree, heap, known_vals, count);
+ known_vals.safe_grow_cleared (count);
if (count && (info->conds || known_aggs_ptr))
- VEC_safe_grow_cleared (ipa_agg_jump_function_p, heap, known_aggs,
- count);
+ known_aggs.safe_grow_cleared (count);
if (count && known_binfos_ptr)
- VEC_safe_grow_cleared (tree, heap, *known_binfos_ptr, count);
+ known_binfos_ptr->safe_grow_cleared (count);
for (i = 0; i < count; i++)
{
@@ -901,20 +887,17 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
tree cst = ipa_value_from_jfunc (parms_info, jf);
if (cst)
{
- if (known_vals && TREE_CODE (cst) != TREE_BINFO)
- VEC_replace (tree, known_vals, i, cst);
+ if (known_vals.exists () && TREE_CODE (cst) != TREE_BINFO)
+ known_vals[i] = cst;
else if (known_binfos_ptr != NULL && TREE_CODE (cst) == TREE_BINFO)
- VEC_replace (tree, *known_binfos_ptr, i, cst);
+ (*known_binfos_ptr)[i] = cst;
}
- else if (inline_p
- && !VEC_index (inline_param_summary_t,
- es->param,
- i).change_prob)
- VEC_replace (tree, known_vals, i, error_mark_node);
+ else if (inline_p && !es->param[i].change_prob)
+ known_vals[i] = error_mark_node;
/* TODO: When IPA-CP starts propagating and merging aggregate jump
functions, use its knowledge of the caller too, just like the
scalar case above. */
- VEC_replace (ipa_agg_jump_function_p, known_aggs, i, &jf->agg);
+ known_aggs[i] = &jf->agg;
}
}
@@ -925,12 +908,12 @@ evaluate_properties_for_edge (struct cgraph_edge *e, bool inline_p,
if (known_vals_ptr)
*known_vals_ptr = known_vals;
else
- VEC_free (tree, heap, known_vals);
+ known_vals.release ();
if (known_aggs_ptr)
*known_aggs_ptr = known_aggs;
else
- VEC_free (ipa_agg_jump_function_p, heap, known_aggs);
+ known_aggs.release ();
}
@@ -952,14 +935,10 @@ inline_summary_alloc (void)
edge_duplication_hook_holder =
cgraph_add_edge_duplication_hook (&inline_edge_duplication_hook, NULL);
- if (VEC_length (inline_summary_t, inline_summary_vec)
- <= (unsigned) cgraph_max_uid)
- VEC_safe_grow_cleared (inline_summary_t, gc,
- inline_summary_vec, cgraph_max_uid + 1);
- if (VEC_length (inline_edge_summary_t, inline_edge_summary_vec)
- <= (unsigned) cgraph_edge_max_uid)
- VEC_safe_grow_cleared (inline_edge_summary_t, heap,
- inline_edge_summary_vec, cgraph_edge_max_uid + 1);
+ if (vec_safe_length (inline_summary_vec) <= (unsigned) cgraph_max_uid)
+ vec_safe_grow_cleared (inline_summary_vec, cgraph_max_uid + 1);
+ if (inline_edge_summary_vec.length () <= (unsigned) cgraph_edge_max_uid)
+ inline_edge_summary_vec.safe_grow_cleared (cgraph_edge_max_uid + 1);
if (!edge_predicate_pool)
edge_predicate_pool = create_alloc_pool ("edge predicates",
sizeof (struct predicate),
@@ -972,8 +951,7 @@ inline_summary_alloc (void)
static void
reset_inline_edge_summary (struct cgraph_edge *e)
{
- if (e->uid
- < (int)VEC_length (inline_edge_summary_t, inline_edge_summary_vec))
+ if (e->uid < (int)inline_edge_summary_vec.length ())
{
struct inline_edge_summary *es = inline_edge_summary (e);
@@ -981,7 +959,7 @@ reset_inline_edge_summary (struct cgraph_edge *e)
if (es->predicate)
pool_free (edge_predicate_pool, es->predicate);
es->predicate = NULL;
- VEC_free (inline_param_summary_t, heap, es->param);
+ es->param.release ();
}
}
@@ -1017,8 +995,8 @@ reset_inline_summary (struct cgraph_node *node)
pool_free (edge_predicate_pool, info->array_index);
info->array_index = NULL;
}
- VEC_free (condition, gc, info->conds);
- VEC_free (size_time_entry,gc, info->entry);
+ vec_free (info->conds);
+ vec_free (info->entry);
for (e = node->callees; e; e = e->next_callee)
reset_inline_edge_summary (e);
for (e = node->indirect_calls; e; e = e->next_callee)
@@ -1031,8 +1009,7 @@ static void
inline_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
struct inline_summary *info;
- if (VEC_length (inline_summary_t, inline_summary_vec)
- <= (unsigned)node->uid)
+ if (vec_safe_length (inline_summary_vec) <= (unsigned)node->uid)
return;
info = inline_summary (node);
reset_inline_summary (node);
@@ -1099,16 +1076,17 @@ inline_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
sizeof (struct inline_summary));
/* TODO: as an optimization, we may avoid copying conditions
that are known to be false or true. */
- info->conds = VEC_copy (condition, gc, info->conds);
+ info->conds = vec_safe_copy (info->conds);
/* When there are any replacements in the function body, see if we can figure
out that something was optimized out. */
- if (ipa_node_params_vector && dst->clone.tree_map)
+ if (ipa_node_params_vector.exists ()
+ && dst->clone.tree_map)
{
- VEC(size_time_entry,gc) *entry = info->entry;
+ vec<size_time_entry, va_gc> *entry = info->entry;
/* Use SRC parm info since it may not be copied yet. */
struct ipa_node_params *parms_info = IPA_NODE_REF (src);
- VEC (tree, heap) *known_vals = NULL;
+ vec<tree> known_vals = vec<tree>();
int count = ipa_get_param_count (parms_info);
int i,j;
clause_t possible_truths;
@@ -1119,28 +1097,27 @@ inline_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
struct cgraph_edge *edge;
info->entry = 0;
- VEC_safe_grow_cleared (tree, heap, known_vals, count);
+ known_vals.safe_grow_cleared (count);
for (i = 0; i < count; i++)
{
tree t = ipa_get_param (parms_info, i);
struct ipa_replace_map *r;
- for (j = 0;
- VEC_iterate (ipa_replace_map_p, dst->clone.tree_map, j, r);
- j++)
+ for (j = 0; vec_safe_iterate (dst->clone.tree_map, j, &r); j++)
{
if (r->old_tree == t
&& r->replace_p
&& !r->ref_p)
{
- VEC_replace (tree, known_vals, i, r->new_tree);
+ known_vals[i] = r->new_tree;
break;
}
}
}
possible_truths = evaluate_conditions_for_known_args (dst, false,
- known_vals, NULL);
- VEC_free (tree, heap, known_vals);
+ known_vals,
+ vec<ipa_agg_jump_function_p>());
+ known_vals.release ();
account_size_time (info, 0, 0, &true_pred);
@@ -1149,7 +1126,7 @@ inline_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
to be false.
TODO: as on optimization, we can also eliminate conditions known
to be true. */
- for (i = 0; VEC_iterate (size_time_entry, entry, i, e); i++)
+ for (i = 0; vec_safe_iterate (entry, i, &e); i++)
{
struct predicate new_predicate;
new_predicate = remap_predicate_after_duplication (&e->predicate,
@@ -1224,7 +1201,7 @@ inline_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
}
else
{
- info->entry = VEC_copy (size_time_entry, gc, info->entry);
+ info->entry = vec_safe_copy (info->entry);
if (info->loop_iterations)
{
predicate p = *info->loop_iterations;
@@ -1263,7 +1240,7 @@ inline_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
sizeof (struct inline_edge_summary));
info->predicate = NULL;
edge_set_predicate (dst, srcinfo->predicate);
- info->param = VEC_copy (inline_param_summary_t, heap, srcinfo->param);
+ info->param = srcinfo->param.copy ();
}
@@ -1272,7 +1249,7 @@ inline_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
static void
inline_edge_removal_hook (struct cgraph_edge *edge, void *data ATTRIBUTE_UNUSED)
{
- if (edge_growth_cache)
+ if (edge_growth_cache.exists ())
reset_edge_growth_cache (edge);
reset_inline_edge_summary (edge);
}
@@ -1284,10 +1261,9 @@ void
initialize_growth_caches (void)
{
if (cgraph_edge_max_uid)
- VEC_safe_grow_cleared (edge_growth_cache_entry, heap, edge_growth_cache,
- cgraph_edge_max_uid);
+ edge_growth_cache.safe_grow_cleared (cgraph_edge_max_uid);
if (cgraph_max_uid)
- VEC_safe_grow_cleared (int, heap, node_growth_cache, cgraph_max_uid);
+ node_growth_cache.safe_grow_cleared (cgraph_max_uid);
}
@@ -1296,10 +1272,8 @@ initialize_growth_caches (void)
void
free_growth_caches (void)
{
- VEC_free (edge_growth_cache_entry, heap, edge_growth_cache);
- edge_growth_cache = 0;
- VEC_free (int, heap, node_growth_cache);
- node_growth_cache = 0;
+ edge_growth_cache.release ();
+ node_growth_cache.release ();
}
@@ -1337,12 +1311,10 @@ dump_inline_edge_summary (FILE * f, int indent, struct cgraph_node *node,
}
else
fprintf (f, "\n");
- if (es->param)
- for (i = 0; i < (int)VEC_length (inline_param_summary_t, es->param);
- i++)
+ if (es->param.exists ())
+ for (i = 0; i < (int)es->param.length (); i++)
{
- int prob = VEC_index (inline_param_summary_t,
- es->param, i).change_prob;
+ int prob = es->param[i].change_prob;
if (!prob)
fprintf (f, "%*s op%i is compile time invariant\n",
@@ -1413,9 +1385,7 @@ dump_inline_summary (FILE * f, struct cgraph_node *node)
if (s->scc_no)
fprintf (f, " In SCC: %i\n",
(int) s->scc_no);
- for (i = 0;
- VEC_iterate (size_time_entry, s->entry, i, e);
- i++)
+ for (i = 0; vec_safe_iterate (s->entry, i, &e); i++)
{
fprintf (f, " size:%f, time:%f, predicate:",
(double) e->size / INLINE_SIZE_SCALE,
@@ -1922,8 +1892,6 @@ compute_bb_predicates (struct cgraph_node *node,
/* We keep info about constantness of SSA names. */
typedef struct predicate predicate_t;
-DEF_VEC_O (predicate_t);
-DEF_VEC_ALLOC_O (predicate_t, heap);
/* Return predicate specifying when the STMT might have result that is not
a compile time constant. */
@@ -1931,7 +1899,7 @@ static struct predicate
will_be_nonconstant_expr_predicate (struct ipa_node_params *info,
struct inline_summary *summary,
tree expr,
- VEC (predicate_t, heap) *nonconstant_names)
+ vec<predicate_t> nonconstant_names)
{
tree parm;
int index;
@@ -1946,8 +1914,7 @@ will_be_nonconstant_expr_predicate (struct ipa_node_params *info,
if (is_gimple_min_invariant (expr))
return false_predicate ();
if (TREE_CODE (expr) == SSA_NAME)
- return VEC_index (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (expr));
+ return nonconstant_names[SSA_NAME_VERSION (expr)];
if (BINARY_CLASS_P (expr)
|| COMPARISON_CLASS_P (expr))
{
@@ -1997,7 +1964,7 @@ static struct predicate
will_be_nonconstant_predicate (struct ipa_node_params *info,
struct inline_summary *summary,
gimple stmt,
- VEC (predicate_t, heap) *nonconstant_names)
+ vec<predicate_t> nonconstant_names)
{
struct predicate p = true_predicate ();
ssa_op_iter iter;
@@ -2047,8 +2014,7 @@ will_be_nonconstant_predicate (struct ipa_node_params *info,
return p;
/* If we know when operand is constant,
we still can say something useful. */
- if (!true_predicate_p (&VEC_index (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (use))))
+ if (!true_predicate_p (&nonconstant_names[SSA_NAME_VERSION (use)]))
continue;
return p;
}
@@ -2071,14 +2037,13 @@ will_be_nonconstant_predicate (struct ipa_node_params *info,
continue;
}
else
- p = VEC_index (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (use));
+ p = nonconstant_names[SSA_NAME_VERSION (use)];
op_non_const = or_predicates (summary->conds, &p, &op_non_const);
}
if (gimple_code (stmt) == GIMPLE_ASSIGN
&& TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME)
- VEC_replace (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (gimple_assign_lhs (stmt)), op_non_const);
+ nonconstant_names[SSA_NAME_VERSION (gimple_assign_lhs (stmt))]
+ = op_non_const;
return op_non_const;
}
@@ -2200,7 +2165,7 @@ static bool
phi_result_unknown_predicate (struct ipa_node_params *info,
struct inline_summary *summary, basic_block bb,
struct predicate *p,
- VEC (predicate_t, heap) *nonconstant_names)
+ vec<predicate_t> nonconstant_names)
{
edge e;
edge_iterator ei;
@@ -2259,7 +2224,7 @@ phi_result_unknown_predicate (struct ipa_node_params *info,
static void
predicate_for_phi_result (struct inline_summary *summary, gimple phi,
struct predicate *p,
- VEC (predicate_t, heap) *nonconstant_names)
+ vec<predicate_t> nonconstant_names)
{
unsigned i;
@@ -2270,8 +2235,7 @@ predicate_for_phi_result (struct inline_summary *summary, gimple phi,
{
gcc_assert (TREE_CODE (arg) == SSA_NAME);
*p = or_predicates (summary->conds, p,
- &VEC_index (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (arg)));
+ &nonconstant_names[SSA_NAME_VERSION (arg)]);
if (true_predicate_p (p))
return;
}
@@ -2282,15 +2246,14 @@ predicate_for_phi_result (struct inline_summary *summary, gimple phi,
fprintf (dump_file, "\t\tphi predicate: ");
dump_predicate (dump_file, summary->conds, p);
}
- VEC_replace (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (gimple_phi_result (phi)), *p);
+ nonconstant_names[SSA_NAME_VERSION (gimple_phi_result (phi))] = *p;
}
/* Return predicate specifying when array index in access OP becomes non-constant. */
static struct predicate
array_index_predicate (struct inline_summary *info,
- VEC (predicate_t, heap) *nonconstant_names, tree op)
+ vec<predicate_t> nonconstant_names, tree op)
{
struct predicate p = false_predicate ();
while (handled_component_p (op))
@@ -2300,8 +2263,8 @@ array_index_predicate (struct inline_summary *info,
{
if (TREE_CODE (TREE_OPERAND (op, 1)) == SSA_NAME)
p = or_predicates (info->conds, &p,
- &VEC_index (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (TREE_OPERAND (op, 1))));
+ &nonconstant_names[
+ SSA_NAME_VERSION (TREE_OPERAND (op, 1))]);
}
op = TREE_OPERAND (op, 0);
}
@@ -2327,24 +2290,23 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
struct inline_summary *info = inline_summary (node);
struct predicate bb_predicate;
struct ipa_node_params *parms_info = NULL;
- VEC (predicate_t, heap) *nonconstant_names = NULL;
+ vec<predicate_t> nonconstant_names = vec<predicate_t>();
int nblocks, n;
int *order;
predicate array_index = true_predicate ();
- info->conds = 0;
- info->entry = 0;
+ info->conds = NULL;
+ info->entry = NULL;
if (optimize && !early)
{
calculate_dominance_info (CDI_DOMINATORS);
loop_optimizer_init (LOOPS_NORMAL | LOOPS_HAVE_RECORDED_EXITS);
- if (ipa_node_params_vector)
+ if (ipa_node_params_vector.exists ())
{
parms_info = IPA_NODE_REF (node);
- VEC_safe_grow_cleared (predicate_t, heap, nonconstant_names,
- VEC_length (tree, SSANAMES (my_function)));
+ nonconstant_names.safe_grow_cleared(SSANAMES (my_function)->length());
}
}
@@ -2388,7 +2350,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
dump_predicate (dump_file, info->conds, &bb_predicate);
}
- if (parms_info && nonconstant_names)
+ if (parms_info && nonconstant_names.exists ())
{
struct predicate phi_predicate;
bool first_phi = true;
@@ -2427,7 +2389,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
((double)freq)/CGRAPH_FREQ_BASE, this_size, this_time);
}
- if (gimple_assign_load_p (stmt) && nonconstant_names)
+ if (gimple_assign_load_p (stmt) && nonconstant_names.exists ())
{
struct predicate this_array_index;
this_array_index = array_index_predicate (info, nonconstant_names,
@@ -2435,7 +2397,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
if (!false_predicate_p (&this_array_index))
array_index = and_predicates (info->conds, &array_index, &this_array_index);
}
- if (gimple_store_p (stmt) && nonconstant_names)
+ if (gimple_store_p (stmt) && nonconstant_names.exists ())
{
struct predicate this_array_index;
this_array_index = array_index_predicate (info, nonconstant_names,
@@ -2453,30 +2415,27 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
/* Special case: results of BUILT_IN_CONSTANT_P will be always
resolved as constant. We however don't want to optimize
out the cgraph edges. */
- if (nonconstant_names
+ if (nonconstant_names.exists ()
&& gimple_call_builtin_p (stmt, BUILT_IN_CONSTANT_P)
&& gimple_call_lhs (stmt)
&& TREE_CODE (gimple_call_lhs (stmt)) == SSA_NAME)
{
struct predicate false_p = false_predicate ();
- VEC_replace (predicate_t, nonconstant_names,
- SSA_NAME_VERSION (gimple_call_lhs (stmt)),
- false_p);
+ nonconstant_names[SSA_NAME_VERSION (gimple_call_lhs (stmt))]
+ = false_p;
}
- if (ipa_node_params_vector)
+ if (ipa_node_params_vector.exists ())
{
int count = gimple_call_num_args (stmt);
int i;
if (count)
- VEC_safe_grow_cleared (inline_param_summary_t, heap,
- es->param, count);
+ es->param.safe_grow_cleared (count);
for (i = 0; i < count; i++)
{
int prob = param_change_prob (stmt, i);
gcc_assert (prob >= 0 && prob <= REG_BR_PROB_BASE);
- VEC_index (inline_param_summary_t,
- es->param, i).change_prob = prob;
+ es->param[i].change_prob = prob;
}
}
@@ -2547,7 +2506,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
time = MAX_TIME;
free (order);
- if (!early && nonconstant_names)
+ if (!early && nonconstant_names.exists ())
{
struct loop *loop;
loop_iterator li;
@@ -2559,7 +2518,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
scev_initialize ();
FOR_EACH_LOOP (li, loop, 0)
{
- VEC (edge, heap) *exits;
+ vec<edge> exits;
edge ex;
unsigned int j, i;
struct tree_niter_desc niter_desc;
@@ -2567,7 +2526,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
bb_predicate = *(struct predicate *)loop->header->aux;
exits = get_loop_exit_edges (loop);
- FOR_EACH_VEC_ELT (edge, exits, j, ex)
+ FOR_EACH_VEC_ELT (exits, j, ex)
if (number_of_iterations_exit (loop, ex, &niter_desc, false)
&& !is_gimple_min_invariant (niter_desc.niter))
{
@@ -2584,7 +2543,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
independent predicate. */
loop_iterations = and_predicates (info->conds, &loop_iterations, &will_be_nonconstant);
}
- VEC_free (edge, heap, exits);
+ exits.release ();
for (i = 0; i < loop->num_nodes; i++)
{
@@ -2642,7 +2601,7 @@ estimate_function_body_sizes (struct cgraph_node *node, bool early)
}
inline_summary (node)->self_time = time;
inline_summary (node)->self_size = size;
- VEC_free (predicate_t, heap, nonconstant_names);
+ nonconstant_names.release ();
if (optimize && !early)
{
loop_optimizer_finalize ();
@@ -2779,15 +2738,15 @@ struct gimple_opt_pass pass_inline_parameters =
static bool
estimate_edge_devirt_benefit (struct cgraph_edge *ie,
int *size, int *time,
- VEC (tree, heap) *known_vals,
- VEC (tree, heap) *known_binfos,
- VEC (ipa_agg_jump_function_p, heap) *known_aggs)
+ vec<tree> known_vals,
+ vec<tree> known_binfos,
+ vec<ipa_agg_jump_function_p> known_aggs)
{
tree target;
struct cgraph_node *callee;
struct inline_summary *isummary;
- if (!known_vals && !known_binfos)
+ if (!known_vals.exists () && !known_binfos.exists ())
return false;
if (!flag_indirect_inlining)
return false;
@@ -2815,9 +2774,9 @@ estimate_edge_devirt_benefit (struct cgraph_edge *ie,
static inline void
estimate_edge_size_and_time (struct cgraph_edge *e, int *size, int *time,
int prob,
- VEC (tree, heap) *known_vals,
- VEC (tree, heap) *known_binfos,
- VEC (ipa_agg_jump_function_p, heap) *known_aggs,
+ vec<tree> known_vals,
+ vec<tree> known_binfos,
+ vec<ipa_agg_jump_function_p> known_aggs,
inline_hints *hints)
{
@@ -2847,9 +2806,9 @@ static void
estimate_calls_size_and_time (struct cgraph_node *node, int *size, int *time,
inline_hints *hints,
clause_t possible_truths,
- VEC (tree, heap) *known_vals,
- VEC (tree, heap) *known_binfos,
- VEC (ipa_agg_jump_function_p, heap) *known_aggs)
+ vec<tree> known_vals,
+ vec<tree> known_binfos,
+ vec<ipa_agg_jump_function_p> known_aggs)
{
struct cgraph_edge *e;
for (e = node->callees; e; e = e->next_callee)
@@ -2889,13 +2848,13 @@ estimate_calls_size_and_time (struct cgraph_node *node, int *size, int *time,
static void
estimate_node_size_and_time (struct cgraph_node *node,
clause_t possible_truths,
- VEC (tree, heap) *known_vals,
- VEC (tree, heap) *known_binfos,
- VEC (ipa_agg_jump_function_p, heap) *known_aggs,
+ vec<tree> known_vals,
+ vec<tree> known_binfos,
+ vec<ipa_agg_jump_function_p> known_aggs,
int *ret_size, int *ret_time,
inline_hints *ret_hints,
- VEC (inline_param_summary_t, heap)
- *inline_param_summary)
+ vec<inline_param_summary_t>
+ inline_param_summary)
{
struct inline_summary *info = inline_summary (node);
size_time_entry *e;
@@ -2915,7 +2874,7 @@ estimate_node_size_and_time (struct cgraph_node *node,
for (i = predicate_not_inlined_condition;
i < (predicate_first_dynamic_condition
- + (int)VEC_length (condition, info->conds)); i++)
+ + (int)vec_safe_length (info->conds)); i++)
if (!(possible_truths & (1 << i)))
{
if (found)
@@ -2925,13 +2884,13 @@ estimate_node_size_and_time (struct cgraph_node *node,
}
}
- for (i = 0; VEC_iterate (size_time_entry, info->entry, i, e); i++)
+ for (i = 0; vec_safe_iterate (info->entry, i, &e); i++)
if (evaluate_predicate (&e->predicate, possible_truths))
{
size += e->size;
gcc_checking_assert (e->time >= 0);
gcc_checking_assert (time >= 0);
- if (!inline_param_summary)
+ if (!inline_param_summary.exists ())
time += e->time;
else
{
@@ -2992,9 +2951,9 @@ estimate_node_size_and_time (struct cgraph_node *node,
void
estimate_ipcp_clone_size_and_time (struct cgraph_node *node,
- VEC (tree, heap) *known_vals,
- VEC (tree, heap) *known_binfos,
- VEC (ipa_agg_jump_function_p, heap) *known_aggs,
+ vec<tree> known_vals,
+ vec<tree> known_binfos,
+ vec<ipa_agg_jump_function_p> known_aggs,
int *ret_size, int *ret_time,
inline_hints *hints)
{
@@ -3003,7 +2962,8 @@ estimate_ipcp_clone_size_and_time (struct cgraph_node *node,
clause = evaluate_conditions_for_known_args (node, false, known_vals,
known_aggs);
estimate_node_size_and_time (node, clause, known_vals, known_binfos,
- known_aggs, ret_size, ret_time, hints, NULL);
+ known_aggs, ret_size, ret_time, hints,
+ vec<inline_param_summary_t>());
}
/* Translate all conditions from callee representation into caller
@@ -3023,8 +2983,8 @@ static struct predicate
remap_predicate (struct inline_summary *info,
struct inline_summary *callee_info,
struct predicate *p,
- VEC (int, heap) *operand_map,
- VEC (int, heap) *offset_map,
+ vec<int> operand_map,
+ vec<int> offset_map,
clause_t possible_truths,
struct predicate *toplev_predicate)
{
@@ -3053,26 +3013,25 @@ remap_predicate (struct inline_summary *info,
{
struct condition *c;
- c = &VEC_index (condition, callee_info->conds,
- cond - predicate_first_dynamic_condition);
+ c = &(*callee_info->conds)[cond
+ - predicate_first_dynamic_condition];
/* See if we can remap condition operand to caller's operand.
Otherwise give up. */
- if (!operand_map
- || (int)VEC_length (int, operand_map) <= c->operand_num
- || VEC_index (int, operand_map, c->operand_num) == -1
+ if (!operand_map.exists ()
+ || (int)operand_map.length () <= c->operand_num
+ || operand_map[c->operand_num] == -1
/* TODO: For non-aggregate conditions, adding an offset is
basically an arithmetic jump function processing which
we should support in future. */
|| ((!c->agg_contents || !c->by_ref)
- && VEC_index (int, offset_map, c->operand_num) > 0)
+ && offset_map[c->operand_num] > 0)
|| (c->agg_contents && c->by_ref
- && VEC_index (int, offset_map, c->operand_num) < 0))
+ && offset_map[c->operand_num] < 0))
cond_predicate = true_predicate ();
else
{
struct agg_position_info ap;
- HOST_WIDE_INT offset_delta = VEC_index (int, offset_map,
- c->operand_num);
+ HOST_WIDE_INT offset_delta = offset_map[c->operand_num];
if (offset_delta < 0)
{
gcc_checking_assert (!c->agg_contents || !c->by_ref);
@@ -3085,9 +3044,7 @@ remap_predicate (struct inline_summary *info,
ap.agg_contents = c->agg_contents;
ap.by_ref = c->by_ref;
cond_predicate = add_condition (info,
- VEC_index (int,
- operand_map,
- c->operand_num),
+ operand_map[c->operand_num],
&ap, c->code, c->val);
}
}
@@ -3148,7 +3105,7 @@ static void
remap_edge_change_prob (struct cgraph_edge *inlined_edge,
struct cgraph_edge *edge)
{
- if (ipa_node_params_vector)
+ if (ipa_node_params_vector.exists ())
{
int i;
struct ipa_edge_args *args = IPA_EDGE_REF (edge);
@@ -3161,23 +3118,18 @@ remap_edge_change_prob (struct cgraph_edge *inlined_edge,
struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, i);
if (jfunc->type == IPA_JF_PASS_THROUGH
&& (ipa_get_jf_pass_through_formal_id (jfunc)
- < (int) VEC_length (inline_param_summary_t,
- inlined_es->param)))
+ < (int) inlined_es->param.length ()))
{
int jf_formal_id = ipa_get_jf_pass_through_formal_id (jfunc);
- int prob1 = VEC_index (inline_param_summary_t,
- es->param, i).change_prob;
- int prob2 = VEC_index
- (inline_param_summary_t,
- inlined_es->param, jf_formal_id).change_prob;
+ int prob1 = es->param[i].change_prob;
+ int prob2 = inlined_es->param[jf_formal_id].change_prob;
int prob = ((prob1 * prob2 + REG_BR_PROB_BASE / 2)
/ REG_BR_PROB_BASE);
if (prob1 && prob2 && !prob)
prob = 1;
- VEC_index (inline_param_summary_t,
- es->param, i).change_prob = prob;
+ es->param[i].change_prob = prob;
}
}
}
@@ -3195,8 +3147,8 @@ remap_edge_summaries (struct cgraph_edge *inlined_edge,
struct cgraph_node *node,
struct inline_summary *info,
struct inline_summary *callee_info,
- VEC (int, heap) *operand_map,
- VEC (int, heap) *offset_map,
+ vec<int> operand_map,
+ vec<int> offset_map,
clause_t possible_truths,
struct predicate *toplev_predicate)
{
@@ -3266,8 +3218,8 @@ static void
remap_hint_predicate (struct inline_summary *info,
struct inline_summary *callee_info,
struct predicate **hint,
- VEC (int, heap) *operand_map,
- VEC (int, heap) *offset_map,
+ vec<int> operand_map,
+ vec<int> offset_map,
clause_t possible_truths,
struct predicate *toplev_predicate)
{
@@ -3303,8 +3255,8 @@ inline_merge_summary (struct cgraph_edge *edge)
struct inline_summary *info = inline_summary (to);
clause_t clause = 0; /* not_inline is known to be false. */
size_time_entry *e;
- VEC (int, heap) *operand_map = NULL;
- VEC (int, heap) *offset_map = NULL;
+ vec<int> operand_map = vec<int>();
+ vec<int> offset_map = vec<int>();
int i;
struct predicate toplev_predicate;
struct predicate true_p = true_predicate ();
@@ -3315,7 +3267,7 @@ inline_merge_summary (struct cgraph_edge *edge)
else
toplev_predicate = true_predicate ();
- if (ipa_node_params_vector && callee_info->conds)
+ if (ipa_node_params_vector.exists () && callee_info->conds)
{
struct ipa_edge_args *args = IPA_EDGE_REF (edge);
int count = ipa_get_cs_argument_count (args);
@@ -3324,8 +3276,8 @@ inline_merge_summary (struct cgraph_edge *edge)
evaluate_properties_for_edge (edge, true, &clause, NULL, NULL, NULL);
if (count)
{
- VEC_safe_grow_cleared (int, heap, operand_map, count);
- VEC_safe_grow_cleared (int, heap, offset_map, count);
+ operand_map.safe_grow_cleared (count);
+ offset_map.safe_grow_cleared (count);
}
for (i = 0; i < count; i++)
{
@@ -3338,7 +3290,7 @@ inline_merge_summary (struct cgraph_edge *edge)
if (ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
map = ipa_get_jf_pass_through_formal_id (jfunc);
if (!ipa_get_jf_pass_through_agg_preserved (jfunc))
- VEC_replace (int, offset_map, i, -1);
+ offset_map[i] = -1;
}
else if (jfunc->type == IPA_JF_ANCESTOR)
{
@@ -3348,14 +3300,14 @@ inline_merge_summary (struct cgraph_edge *edge)
map = ipa_get_jf_ancestor_formal_id (jfunc);
if (!ipa_get_jf_ancestor_agg_preserved (jfunc))
offset = -1;
- VEC_replace (int, offset_map, i, offset);
+ offset_map[i] = offset;
}
}
- VEC_replace (int, operand_map, i, map);
+ operand_map[i] = map;
gcc_assert (map < ipa_get_param_count (IPA_NODE_REF (to)));
}
}
- for (i = 0; VEC_iterate (size_time_entry, callee_info->entry, i, e); i++)
+ for (i = 0; vec_safe_iterate (callee_info->entry, i, &e); i++)
{
struct predicate p = remap_predicate (info, callee_info,
&e->predicate, operand_map,
@@ -3401,9 +3353,9 @@ inline_merge_summary (struct cgraph_edge *edge)
/* We do not maintain predicates of inlined edges, free it. */
edge_set_predicate (edge, &true_p);
/* Similarly remove param summaries. */
- VEC_free (inline_param_summary_t, heap, es->param);
- VEC_free (int, heap, operand_map);
- VEC_free (int, heap, offset_map);
+ es->param.release ();
+ operand_map.release ();
+ offset_map.release ();
}
/* For performance reasons inline_merge_summary is not updating overall size
@@ -3418,7 +3370,7 @@ inline_update_overall_summary (struct cgraph_node *node)
info->size = 0;
info->time = 0;
- for (i = 0; VEC_iterate (size_time_entry, info->entry, i, e); i++)
+ for (i = 0; vec_safe_iterate (info->entry, i, &e); i++)
{
info->size += e->size, info->time += e->time;
if (info->time > MAX_TIME * INLINE_TIME_SCALE)
@@ -3426,7 +3378,9 @@ inline_update_overall_summary (struct cgraph_node *node)
}
estimate_calls_size_and_time (node, &info->size, &info->time, NULL,
~(clause_t)(1 << predicate_false_condition),
- NULL, NULL, NULL);
+ vec<tree>(),
+ vec<tree>(),
+ vec<ipa_agg_jump_function_p>());
info->time = (info->time + INLINE_TIME_SCALE / 2) / INLINE_TIME_SCALE;
info->size = (info->size + INLINE_SIZE_SCALE / 2) / INLINE_SIZE_SCALE;
}
@@ -3466,9 +3420,9 @@ do_estimate_edge_time (struct cgraph_edge *edge)
inline_hints hints;
struct cgraph_node *callee;
clause_t clause;
- VEC (tree, heap) *known_vals;
- VEC (tree, heap) *known_binfos;
- VEC (ipa_agg_jump_function_p, heap) *known_aggs;
+ vec<tree> known_vals;
+ vec<tree> known_binfos;
+ vec<ipa_agg_jump_function_p> known_aggs;
struct inline_edge_summary *es = inline_edge_summary (edge);
callee = cgraph_function_or_thunk_node (edge->callee, NULL);
@@ -3479,27 +3433,22 @@ do_estimate_edge_time (struct cgraph_edge *edge)
&known_aggs);
estimate_node_size_and_time (callee, clause, known_vals, known_binfos,
known_aggs, &size, &time, &hints, es->param);
- VEC_free (tree, heap, known_vals);
- VEC_free (tree, heap, known_binfos);
- VEC_free (ipa_agg_jump_function_p, heap, known_aggs);
+ known_vals.release ();
+ known_binfos.release ();
+ known_aggs.release ();
gcc_checking_assert (size >= 0);
gcc_checking_assert (time >= 0);
/* When caching, update the cache entry. */
- if (edge_growth_cache)
+ if (edge_growth_cache.exists ())
{
- if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache)
- <= edge->uid)
- VEC_safe_grow_cleared (edge_growth_cache_entry, heap, edge_growth_cache,
- cgraph_edge_max_uid);
- VEC_index (edge_growth_cache_entry, edge_growth_cache, edge->uid).time
- = time + (time >= 0);
+ if ((int)edge_growth_cache.length () <= edge->uid)
+ edge_growth_cache.safe_grow_cleared (cgraph_edge_max_uid);
+ edge_growth_cache[edge->uid].time = time + (time >= 0);
- VEC_index (edge_growth_cache_entry, edge_growth_cache, edge->uid).size
- = size + (size >= 0);
+ edge_growth_cache[edge->uid].size = size + (size >= 0);
hints |= simple_edge_hints (edge);
- VEC_index (edge_growth_cache_entry, edge_growth_cache, edge->uid).hints
- = hints + 1;
+ edge_growth_cache[edge->uid].hints = hints + 1;
}
return time;
}
@@ -3514,18 +3463,16 @@ do_estimate_edge_size (struct cgraph_edge *edge)
int size;
struct cgraph_node *callee;
clause_t clause;
- VEC (tree, heap) *known_vals;
- VEC (tree, heap) *known_binfos;
- VEC (ipa_agg_jump_function_p, heap) *known_aggs;
+ vec<tree> known_vals;
+ vec<tree> known_binfos;
+ vec<ipa_agg_jump_function_p> known_aggs;
/* When we do caching, use do_estimate_edge_time to populate the entry. */
- if (edge_growth_cache)
+ if (edge_growth_cache.exists ())
{
do_estimate_edge_time (edge);
- size = VEC_index (edge_growth_cache_entry,
- edge_growth_cache,
- edge->uid).size;
+ size = edge_growth_cache[edge->uid].size;
gcc_checking_assert (size);
return size - (size > 0);
}
@@ -3538,10 +3485,11 @@ do_estimate_edge_size (struct cgraph_edge *edge)
&clause, &known_vals, &known_binfos,
&known_aggs);
estimate_node_size_and_time (callee, clause, known_vals, known_binfos,
- known_aggs, &size, NULL, NULL, NULL);
- VEC_free (tree, heap, known_vals);
- VEC_free (tree, heap, known_binfos);
- VEC_free (ipa_agg_jump_function_p, heap, known_aggs);
+ known_aggs, &size, NULL, NULL,
+ vec<inline_param_summary_t>());
+ known_vals.release ();
+ known_binfos.release ();
+ known_aggs.release ();
return size;
}
@@ -3555,18 +3503,16 @@ do_estimate_edge_hints (struct cgraph_edge *edge)
inline_hints hints;
struct cgraph_node *callee;
clause_t clause;
- VEC (tree, heap) *known_vals;
- VEC (tree, heap) *known_binfos;
- VEC (ipa_agg_jump_function_p, heap) *known_aggs;
+ vec<tree> known_vals;
+ vec<tree> known_binfos;
+ vec<ipa_agg_jump_function_p> known_aggs;
/* When we do caching, use do_estimate_edge_time to populate the entry. */
- if (edge_growth_cache)
+ if (edge_growth_cache.exists ())
{
do_estimate_edge_time (edge);
- hints = VEC_index (edge_growth_cache_entry,
- edge_growth_cache,
- edge->uid).hints;
+ hints = edge_growth_cache[edge->uid].hints;
gcc_checking_assert (hints);
return hints - 1;
}
@@ -3579,10 +3525,11 @@ do_estimate_edge_hints (struct cgraph_edge *edge)
&clause, &known_vals, &known_binfos,
&known_aggs);
estimate_node_size_and_time (callee, clause, known_vals, known_binfos,
- known_aggs, NULL, NULL, &hints, NULL);
- VEC_free (tree, heap, known_vals);
- VEC_free (tree, heap, known_binfos);
- VEC_free (ipa_agg_jump_function_p, heap, known_aggs);
+ known_aggs, NULL, NULL, &hints,
+ vec<inline_param_summary_t>());
+ known_vals.release ();
+ known_binfos.release ();
+ known_aggs.release ();
hints |= simple_edge_hints (edge);
return hints;
}
@@ -3687,12 +3634,11 @@ do_estimate_growth (struct cgraph_node *node)
+ 50) / 100;
}
- if (node_growth_cache)
+ if (node_growth_cache.exists ())
{
- if ((int)VEC_length (int, node_growth_cache) <= node->uid)
- VEC_safe_grow_cleared (int, heap, node_growth_cache, cgraph_max_uid);
- VEC_replace (int, node_growth_cache, node->uid,
- d.growth + (d.growth >= 0));
+ if ((int)node_growth_cache.length () <= node->uid)
+ node_growth_cache.safe_grow_cleared (cgraph_max_uid);
+ node_growth_cache[node->uid] = d.growth + (d.growth >= 0);
}
return d.growth;
}
@@ -3800,9 +3746,9 @@ read_inline_edge_summary (struct lto_input_block *ib, struct cgraph_edge *e)
length = streamer_read_uhwi (ib);
if (length)
{
- VEC_safe_grow_cleared (inline_param_summary_t, heap, es->param, length);
+ es->param.safe_grow_cleared (length);
for (i = 0; i < length; i++)
- VEC_index (inline_param_summary_t, es->param, i).change_prob
+ es->param[i].change_prob
= streamer_read_uhwi (ib);
}
}
@@ -3829,7 +3775,8 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
data_in =
lto_data_in_create (file_data, (const char *) data + string_offset,
- header->string_size, NULL);
+ header->string_size,
+ vec<ld_plugin_symbol_resolution_t>());
f_count = streamer_read_uhwi (&ib);
for (i = 0; i < f_count; i++)
{
@@ -3867,7 +3814,7 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
c.by_ref = bp_unpack_value (&bp, 1);
if (c.agg_contents)
c.offset = streamer_read_uhwi (&ib);
- VEC_safe_push (condition, gc, info->conds, c);
+ vec_safe_push (info->conds, c);
}
count2 = streamer_read_uhwi (&ib);
gcc_assert (!info->entry);
@@ -3879,7 +3826,7 @@ inline_read_section (struct lto_file_decl_data *file_data, const char *data,
e.time = streamer_read_uhwi (&ib);
e.predicate = read_predicate (&ib);
- VEC_safe_push (size_time_entry, gc, info->entry, e);
+ vec_safe_push (info->entry, e);
}
p = read_predicate (&ib);
@@ -3966,10 +3913,9 @@ write_inline_edge_summary (struct output_block *ob, struct cgraph_edge *e)
streamer_write_uhwi (ob, es->call_stmt_time);
streamer_write_uhwi (ob, es->loop_depth);
write_predicate (ob, es->predicate);
- streamer_write_uhwi (ob, VEC_length (inline_param_summary_t, es->param));
- for (i = 0; i < (int)VEC_length (inline_param_summary_t, es->param); i++)
- streamer_write_uhwi (ob, VEC_index (inline_param_summary_t,
- es->param, i).change_prob);
+ streamer_write_uhwi (ob, es->param.length ());
+ for (i = 0; i < (int)es->param.length (); i++)
+ streamer_write_uhwi (ob, es->param[i].change_prob);
}
@@ -4015,8 +3961,8 @@ inline_write_summary (void)
bp = bitpack_create (ob->main_stream);
bp_pack_value (&bp, info->inlinable, 1);
streamer_write_bitpack (&bp);
- streamer_write_uhwi (ob, VEC_length (condition, info->conds));
- for (i = 0; VEC_iterate (condition, info->conds, i, c); i++)
+ streamer_write_uhwi (ob, vec_safe_length (info->conds));
+ for (i = 0; vec_safe_iterate (info->conds, i, &c); i++)
{
streamer_write_uhwi (ob, c->operand_num);
streamer_write_uhwi (ob, c->code);
@@ -4028,10 +3974,8 @@ inline_write_summary (void)
if (c->agg_contents)
streamer_write_uhwi (ob, c->offset);
}
- streamer_write_uhwi (ob, VEC_length (size_time_entry, info->entry));
- for (i = 0;
- VEC_iterate (size_time_entry, info->entry, i, e);
- i++)
+ streamer_write_uhwi (ob, vec_safe_length (info->entry));
+ for (i = 0; vec_safe_iterate (info->entry, i, &e); i++)
{
streamer_write_uhwi (ob, e->size);
streamer_write_uhwi (ob, e->time);
@@ -4061,7 +4005,7 @@ void
inline_free_summary (void)
{
struct cgraph_node *node;
- if (inline_edge_summary_vec == NULL)
+ if (!inline_edge_summary_vec.exists ())
return;
FOR_EACH_DEFINED_FUNCTION (node)
reset_inline_summary (node);
@@ -4080,10 +4024,8 @@ inline_free_summary (void)
if (edge_duplication_hook_holder)
cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
edge_duplication_hook_holder = NULL;
- VEC_free (inline_summary_t, gc, inline_summary_vec);
- inline_summary_vec = NULL;
- VEC_free (inline_edge_summary_t, heap, inline_edge_summary_vec);
- inline_edge_summary_vec = NULL;
+ vec_free (inline_summary_vec);
+ inline_edge_summary_vec.release ();
if (edge_predicate_pool)
free_alloc_pool (edge_predicate_pool);
edge_predicate_pool = 0;
diff --git a/gcc/ipa-inline-transform.c b/gcc/ipa-inline-transform.c
index 766757ef6e7..19b2c855c75 100644
--- a/gcc/ipa-inline-transform.c
+++ b/gcc/ipa-inline-transform.c
@@ -171,7 +171,8 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
struct cgraph_node *n;
n = cgraph_clone_node (e->callee, e->callee->symbol.decl,
e->count, e->frequency,
- update_original, NULL, true);
+ update_original, vec<cgraph_edge_p>(),
+ true);
cgraph_redirect_edge_callee (e, n);
}
}
@@ -202,7 +203,7 @@ clone_inlined_nodes (struct cgraph_edge *e, bool duplicate,
bool
inline_call (struct cgraph_edge *e, bool update_original,
- VEC (cgraph_edge_p, heap) **new_edges,
+ vec<cgraph_edge_p> *new_edges,
int *overall_size, bool update_overall_summary)
{
int old_size = 0, new_size = 0;
@@ -351,7 +352,8 @@ save_inline_function_body (struct cgraph_node *node)
/* Copy the OLD_VERSION_NODE function tree to the new version. */
tree_function_versioning (node->symbol.decl, first_clone->symbol.decl,
- NULL, true, NULL, false, NULL, NULL);
+ NULL, true, NULL, false,
+ NULL, NULL);
/* The function will be short lived and removed after we inline all the clones,
but make it internal so we won't confuse ourself. */
@@ -359,9 +361,7 @@ save_inline_function_body (struct cgraph_node *node)
DECL_COMDAT_GROUP (first_clone->symbol.decl) = NULL_TREE;
TREE_PUBLIC (first_clone->symbol.decl) = 0;
DECL_COMDAT (first_clone->symbol.decl) = 0;
- VEC_free (ipa_opt_pass, heap,
- first_clone->ipa_transforms_to_apply);
- first_clone->ipa_transforms_to_apply = NULL;
+ first_clone->ipa_transforms_to_apply.release ();
/* When doing recursive inlining, the clone may become unnecessary.
This is possible i.e. in the case when the recursive function is proved to be
diff --git a/gcc/ipa-inline.c b/gcc/ipa-inline.c
index dd25ae33908..14e825609b2 100644
--- a/gcc/ipa-inline.c
+++ b/gcc/ipa-inline.c
@@ -1223,7 +1223,7 @@ lookup_recursive_calls (struct cgraph_node *node, struct cgraph_node *where,
static bool
recursive_inlining (struct cgraph_edge *edge,
- VEC (cgraph_edge_p, heap) **new_edges)
+ vec<cgraph_edge_p> *new_edges)
{
int limit = PARAM_VALUE (PARAM_MAX_INLINE_INSNS_RECURSIVE_AUTO);
fibheap_t heap;
@@ -1313,7 +1313,8 @@ recursive_inlining (struct cgraph_edge *edge,
/* We need original clone to copy around. */
master_clone = cgraph_clone_node (node, node->symbol.decl,
node->count, CGRAPH_FREQ_BASE,
- false, NULL, true);
+ false, vec<cgraph_edge_p>(),
+ true);
for (e = master_clone->callees; e; e = e->next_callee)
if (!e->inline_failed)
clone_inlined_nodes (e, true, false, NULL);
@@ -1373,11 +1374,11 @@ compute_max_insns (int insns)
/* Compute badness of all edges in NEW_EDGES and add them to the HEAP. */
static void
-add_new_edges_to_heap (fibheap_t heap, VEC (cgraph_edge_p, heap) *new_edges)
+add_new_edges_to_heap (fibheap_t heap, vec<cgraph_edge_p> new_edges)
{
- while (VEC_length (cgraph_edge_p, new_edges) > 0)
+ while (new_edges.length () > 0)
{
- struct cgraph_edge *edge = VEC_pop (cgraph_edge_p, new_edges);
+ struct cgraph_edge *edge = new_edges.pop ();
gcc_assert (!edge->aux);
if (edge->inline_failed
@@ -1402,12 +1403,12 @@ inline_small_functions (void)
fibheap_t edge_heap = fibheap_new ();
bitmap updated_nodes = BITMAP_ALLOC (NULL);
int min_size, max_size;
- VEC (cgraph_edge_p, heap) *new_indirect_edges = NULL;
+ vec<cgraph_edge_p> new_indirect_edges = vec<cgraph_edge_p>();
int initial_size = 0;
struct cgraph_node **order = XCNEWVEC (struct cgraph_node *, cgraph_n_nodes);
if (flag_indirect_inlining)
- new_indirect_edges = VEC_alloc (cgraph_edge_p, heap, 8);
+ new_indirect_edges.create (8);
/* Compute overall unit size and other global parameters used by badness
metrics. */
@@ -1654,8 +1655,7 @@ inline_small_functions (void)
}
free_growth_caches ();
- if (new_indirect_edges)
- VEC_free (cgraph_edge_p, heap, new_indirect_edges);
+ new_indirect_edges.release ();
fibheap_delete (edge_heap);
if (dump_file)
fprintf (dump_file,
@@ -1994,7 +1994,7 @@ early_inliner (void)
it. This may confuse ourself when early inliner decide to inline call to
function clone, because function clones don't have parameter list in
ipa-prop matching their signature. */
- if (ipa_node_params_vector)
+ if (ipa_node_params_vector.exists ())
return 0;
#ifdef ENABLE_CHECKING
diff --git a/gcc/ipa-inline.h b/gcc/ipa-inline.h
index 1e9658c3b96..729ea5698d7 100644
--- a/gcc/ipa-inline.h
+++ b/gcc/ipa-inline.h
@@ -73,10 +73,8 @@ enum inline_hints_vals {
};
typedef int inline_hints;
-DEF_VEC_O (condition);
-DEF_VEC_ALLOC_O (condition, gc);
-typedef VEC(condition,gc) *conditions;
+typedef vec<condition, va_gc> *conditions;
/* Representation of predicates i.e. formulas using conditions defined
above. Predicates are simple logical formulas in conjunctive-disjunctive
@@ -108,8 +106,6 @@ typedef struct GTY(()) size_time_entry
int size;
int time;
} size_time_entry;
-DEF_VEC_O (size_time_entry);
-DEF_VEC_ALLOC_O (size_time_entry, gc);
/* Function inlining information. */
struct GTY(()) inline_summary
@@ -141,7 +137,7 @@ struct GTY(()) inline_summary
/* Conditional size/time information. The summaries are being
merged during inlining. */
conditions conds;
- VEC(size_time_entry,gc) *entry;
+ vec<size_time_entry, va_gc> *entry;
/* Predicate on when some loop in the function becomes to have known
bounds. */
@@ -163,9 +159,7 @@ struct GTY(()) inline_summary
typedef struct inline_summary inline_summary_t;
-DEF_VEC_O(inline_summary_t);
-DEF_VEC_ALLOC_O(inline_summary_t,gc);
-extern GTY(()) VEC(inline_summary_t,gc) *inline_summary_vec;
+extern GTY(()) vec<inline_summary_t, va_gc> *inline_summary_vec;
/* Information kept about parameter of call site. */
struct inline_param_summary
@@ -180,8 +174,6 @@ struct inline_param_summary
int change_prob;
};
typedef struct inline_param_summary inline_param_summary_t;
-DEF_VEC_O(inline_param_summary_t);
-DEF_VEC_ALLOC_O(inline_param_summary_t,heap);
/* Information kept about callgraph edges. */
struct inline_edge_summary
@@ -195,24 +187,20 @@ struct inline_edge_summary
/* Array indexed by parameters.
0 means that parameter change all the time, REG_BR_PROB_BASE means
that parameter is constant. */
- VEC (inline_param_summary_t, heap) *param;
+ vec<inline_param_summary_t> param;
};
typedef struct inline_edge_summary inline_edge_summary_t;
-DEF_VEC_O(inline_edge_summary_t);
-DEF_VEC_ALLOC_O(inline_edge_summary_t,heap);
-extern VEC(inline_edge_summary_t,heap) *inline_edge_summary_vec;
+extern vec<inline_edge_summary_t> inline_edge_summary_vec;
typedef struct edge_growth_cache_entry
{
int time, size;
inline_hints hints;
} edge_growth_cache_entry;
-DEF_VEC_O(edge_growth_cache_entry);
-DEF_VEC_ALLOC_O(edge_growth_cache_entry,heap);
-extern VEC(int,heap) *node_growth_cache;
-extern VEC(edge_growth_cache_entry,heap) *edge_growth_cache;
+extern vec<int> node_growth_cache;
+extern vec<edge_growth_cache_entry> edge_growth_cache;
/* In ipa-inline-analysis.c */
void debug_inline_summary (struct cgraph_node *);
@@ -227,8 +215,8 @@ void initialize_inline_failed (struct cgraph_edge *);
int estimate_time_after_inlining (struct cgraph_node *, struct cgraph_edge *);
int estimate_size_after_inlining (struct cgraph_node *, struct cgraph_edge *);
void estimate_ipcp_clone_size_and_time (struct cgraph_node *,
- VEC (tree, heap) *, VEC (tree, heap) *,
- VEC (ipa_agg_jump_function_p, heap) *,
+ vec<tree>, vec<tree>,
+ vec<ipa_agg_jump_function_p>,
int *, int *, inline_hints *);
int do_estimate_growth (struct cgraph_node *);
void inline_merge_summary (struct cgraph_edge *edge);
@@ -241,7 +229,7 @@ void free_growth_caches (void);
void compute_inline_parameters (struct cgraph_node *, bool);
/* In ipa-inline-transform.c */
-bool inline_call (struct cgraph_edge *, bool, VEC (cgraph_edge_p, heap) **, int *, bool);
+bool inline_call (struct cgraph_edge *, bool, vec<cgraph_edge_p> *, int *, bool);
unsigned int inline_transform (struct cgraph_node *);
void clone_inlined_nodes (struct cgraph_edge *e, bool, bool, int *);
@@ -251,14 +239,13 @@ extern int nfunctions_inlined;
static inline struct inline_summary *
inline_summary (struct cgraph_node *node)
{
- return &VEC_index (inline_summary_t, inline_summary_vec, node->uid);
+ return &(*inline_summary_vec)[node->uid];
}
static inline struct inline_edge_summary *
inline_edge_summary (struct cgraph_edge *edge)
{
- return &VEC_index (inline_edge_summary_t,
- inline_edge_summary_vec, edge->uid);
+ return &inline_edge_summary_vec[edge->uid];
}
/* Return estimated unit growth after inlning all calls to NODE.
@@ -270,8 +257,8 @@ static inline int
estimate_growth (struct cgraph_node *node)
{
int ret;
- if ((int)VEC_length (int, node_growth_cache) <= node->uid
- || !(ret = VEC_index (int, node_growth_cache, node->uid)))
+ if ((int)node_growth_cache.length () <= node->uid
+ || !(ret = node_growth_cache[node->uid]))
return do_estimate_growth (node);
return ret - (ret > 0);
}
@@ -283,10 +270,8 @@ static inline int
estimate_edge_size (struct cgraph_edge *edge)
{
int ret;
- if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache) <= edge->uid
- || !(ret = VEC_index (edge_growth_cache_entry,
- edge_growth_cache,
- edge->uid).size))
+ if ((int)edge_growth_cache.length () <= edge->uid
+ || !(ret = edge_growth_cache[edge->uid].size))
return do_estimate_edge_size (edge);
return ret - (ret > 0);
}
@@ -310,10 +295,8 @@ static inline int
estimate_edge_time (struct cgraph_edge *edge)
{
int ret;
- if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache) <= edge->uid
- || !(ret = VEC_index (edge_growth_cache_entry,
- edge_growth_cache,
- edge->uid).time))
+ if ((int)edge_growth_cache.length () <= edge->uid
+ || !(ret = edge_growth_cache[edge->uid].time))
return do_estimate_edge_time (edge);
return ret - (ret > 0);
}
@@ -326,10 +309,8 @@ static inline inline_hints
estimate_edge_hints (struct cgraph_edge *edge)
{
inline_hints ret;
- if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache) <= edge->uid
- || !(ret = VEC_index (edge_growth_cache_entry,
- edge_growth_cache,
- edge->uid).hints))
+ if ((int)edge_growth_cache.length () <= edge->uid
+ || !(ret = edge_growth_cache[edge->uid].hints))
return do_estimate_edge_hints (edge);
return ret - 1;
}
@@ -340,8 +321,8 @@ estimate_edge_hints (struct cgraph_edge *edge)
static inline void
reset_node_growth_cache (struct cgraph_node *node)
{
- if ((int)VEC_length (int, node_growth_cache) > node->uid)
- VEC_replace (int, node_growth_cache, node->uid, 0);
+ if ((int)node_growth_cache.length () > node->uid)
+ node_growth_cache[node->uid] = 0;
}
/* Reset cached value for EDGE. */
@@ -349,9 +330,9 @@ reset_node_growth_cache (struct cgraph_node *node)
static inline void
reset_edge_growth_cache (struct cgraph_edge *edge)
{
- if ((int)VEC_length (edge_growth_cache_entry, edge_growth_cache) > edge->uid)
+ if ((int)edge_growth_cache.length () > edge->uid)
{
struct edge_growth_cache_entry zero = {0, 0, 0};
- VEC_replace (edge_growth_cache_entry, edge_growth_cache, edge->uid, zero);
+ edge_growth_cache[edge->uid] = zero;
}
}
diff --git a/gcc/ipa-prop.c b/gcc/ipa-prop.c
index b78dc65dbc7..3150bd6db15 100644
--- a/gcc/ipa-prop.c
+++ b/gcc/ipa-prop.c
@@ -50,11 +50,11 @@ struct param_analysis_info
};
/* Vector where the parameter infos are actually stored. */
-VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
+vec<ipa_node_params_t> ipa_node_params_vector;
/* Vector of known aggregate values in cloned nodes. */
-VEC (ipa_agg_replacement_value_p, gc) *ipa_node_agg_replacements;
+vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
/* Vector where the parameter infos are actually stored. */
-VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
+vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
/* Holders of ipa cgraph hooks: */
static struct cgraph_edge_hook_list *edge_removal_hook_holder;
@@ -67,14 +67,13 @@ static struct cgraph_node_hook_list *function_insertion_hook_holder;
to INFO. */
static int
-ipa_get_param_decl_index_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
- tree ptree)
+ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
{
int i, count;
- count = VEC_length (ipa_param_descriptor_t, descriptors);
+ count = descriptors.length ();
for (i = 0; i < count; i++)
- if (VEC_index (ipa_param_descriptor_t, descriptors, i).decl == ptree)
+ if (descriptors[i].decl == ptree)
return i;
return -1;
@@ -94,7 +93,7 @@ ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
static void
ipa_populate_param_decls (struct cgraph_node *node,
- VEC (ipa_param_descriptor_t, heap) *descriptors)
+ vec<ipa_param_descriptor_t> &descriptors)
{
tree fndecl;
tree fnargs;
@@ -106,7 +105,7 @@ ipa_populate_param_decls (struct cgraph_node *node,
param_num = 0;
for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
{
- VEC_index (ipa_param_descriptor_t, descriptors, param_num).decl = parm;
+ descriptors[param_num].decl = parm;
param_num++;
}
}
@@ -134,15 +133,14 @@ ipa_initialize_node_params (struct cgraph_node *node)
{
struct ipa_node_params *info = IPA_NODE_REF (node);
- if (!info->descriptors)
+ if (!info->descriptors.exists ())
{
int param_count;
param_count = count_formal_params (node->symbol.decl);
if (param_count)
{
- VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
- info->descriptors, param_count);
+ info->descriptors.safe_grow_cleared (param_count);
ipa_populate_param_decls (node, info->descriptors);
}
}
@@ -226,8 +224,7 @@ ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
fprintf (f, " Aggregate passed by %s:\n",
jump_func->agg.by_ref ? "reference" : "value");
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items,
- j, item)
+ FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
{
fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
item->offset);
@@ -672,7 +669,7 @@ parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
modified. Otherwise return -1. */
static int
-load_from_unmodified_param (VEC (ipa_param_descriptor_t, heap) *descriptors,
+load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
struct param_analysis_info *parms_ainfo,
gimple stmt)
{
@@ -760,7 +757,7 @@ parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
reference respectively. */
static bool
-ipa_load_from_parm_agg_1 (VEC (ipa_param_descriptor_t, heap) *descriptors,
+ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
struct param_analysis_info *parms_ainfo, gimple stmt,
tree op, int *index_p, HOST_WIDE_INT *offset_p,
bool *by_ref_p)
@@ -1381,7 +1378,7 @@ determine_known_aggregate_parts (gimple call, tree arg,
if (const_count)
{
jfunc->agg.by_ref = by_ref;
- jfunc->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, const_count);
+ vec_alloc (jfunc->agg.items, const_count);
while (list)
{
if (list->constant)
@@ -1389,7 +1386,7 @@ determine_known_aggregate_parts (gimple call, tree arg,
struct ipa_agg_jf_item item;
item.offset = list->offset - arg_offset;
item.value = prune_expression_for_jf (list->constant);
- VEC_quick_push (ipa_agg_jf_item_t, jfunc->agg.items, item);
+ jfunc->agg.items->quick_push (item);
}
list = list->next;
}
@@ -1411,7 +1408,7 @@ ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
if (arg_num == 0 || args->jump_functions)
return;
- VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, arg_num);
+ vec_safe_grow_cleared (args->jump_functions, arg_num);
for (n = 0; n < arg_num; n++)
{
@@ -2050,9 +2047,9 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
replace with merging when we do. */
gcc_assert (!dst->agg.items);
- dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc, src->agg.items);
+ dst->agg.items = vec_safe_copy (src->agg.items);
dst->agg.by_ref = src->agg.by_ref;
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, dst->agg.items, j, item)
+ FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
item->offset -= dst->value.ancestor.offset;
}
@@ -2100,8 +2097,7 @@ update_jump_functions_after_inlining (struct cgraph_edge *cs,
gcc_assert (!dst->agg.items);
dst->agg.by_ref = src->agg.by_ref;
- dst->agg.items = VEC_copy (ipa_agg_jf_item_t, gc,
- src->agg.items);
+ dst->agg.items = vec_safe_copy (src->agg.items);
}
if (!agg_p)
@@ -2177,7 +2173,7 @@ ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
if (by_ref != agg->by_ref)
return NULL;
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, agg->items, i, item)
+ FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
if (item->offset == offset)
{
/* Currently we do not have clobber values, return NULL for them once
@@ -2256,7 +2252,7 @@ try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
static bool
update_indirect_edges_after_inlining (struct cgraph_edge *cs,
struct cgraph_node *node,
- VEC (cgraph_edge_p, heap) **new_edges)
+ vec<cgraph_edge_p> *new_edges)
{
struct ipa_edge_args *top;
struct cgraph_edge *ie, *next_ie, *new_direct_edge;
@@ -2326,8 +2322,7 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
new_direct_edge->callee->symbol.decl);
if (new_edges)
{
- VEC_safe_push (cgraph_edge_p, heap, *new_edges,
- new_direct_edge);
+ new_edges->safe_push (new_direct_edge);
top = IPA_EDGE_REF (cs);
res = true;
}
@@ -2348,7 +2343,7 @@ update_indirect_edges_after_inlining (struct cgraph_edge *cs,
static bool
propagate_info_to_inlined_callees (struct cgraph_edge *cs,
struct cgraph_node *node,
- VEC (cgraph_edge_p, heap) **new_edges)
+ vec<cgraph_edge_p> *new_edges)
{
struct cgraph_edge *e;
bool res;
@@ -2374,12 +2369,12 @@ propagate_info_to_inlined_callees (struct cgraph_edge *cs,
bool
ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
- VEC (cgraph_edge_p, heap) **new_edges)
+ vec<cgraph_edge_p> *new_edges)
{
bool changed;
/* Do nothing if the preparation phase has not been carried out yet
(i.e. during early inlining). */
- if (!ipa_node_params_vector)
+ if (!ipa_node_params_vector.exists ())
return false;
gcc_assert (ipa_edge_args_vector);
@@ -2397,9 +2392,7 @@ ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
void
ipa_free_edge_args_substructures (struct ipa_edge_args *args)
{
- if (args->jump_functions)
- ggc_free (args->jump_functions);
-
+ vec_free (args->jump_functions);
memset (args, 0, sizeof (*args));
}
@@ -2411,11 +2404,13 @@ ipa_free_all_edge_args (void)
int i;
struct ipa_edge_args *args;
- FOR_EACH_VEC_ELT (ipa_edge_args_t, ipa_edge_args_vector, i, args)
+ if (!ipa_edge_args_vector)
+ return;
+
+ FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
ipa_free_edge_args_substructures (args);
- VEC_free (ipa_edge_args_t, gc, ipa_edge_args_vector);
- ipa_edge_args_vector = NULL;
+ vec_free (ipa_edge_args_vector);
}
/* Frees all dynamically allocated structures that the param info points
@@ -2424,11 +2419,11 @@ ipa_free_all_edge_args (void)
void
ipa_free_node_params_substructures (struct ipa_node_params *info)
{
- VEC_free (ipa_param_descriptor_t, heap, info->descriptors);
+ info->descriptors.release ();
free (info->lattices);
/* Lattice values and their sources are deallocated with their alocation
pool. */
- VEC_free (tree, heap, info->known_vals);
+ info->known_vals.release ();
memset (info, 0, sizeof (*info));
}
@@ -2440,11 +2435,10 @@ ipa_free_all_node_params (void)
int i;
struct ipa_node_params *info;
- FOR_EACH_VEC_ELT (ipa_node_params_t, ipa_node_params_vector, i, info)
+ FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
ipa_free_node_params_substructures (info);
- VEC_free (ipa_node_params_t, heap, ipa_node_params_vector);
- ipa_node_params_vector = NULL;
+ ipa_node_params_vector.release ();
}
/* Set the aggregate replacements of NODE to be AGGVALS. */
@@ -2453,13 +2447,10 @@ void
ipa_set_node_agg_value_chain (struct cgraph_node *node,
struct ipa_agg_replacement_value *aggvals)
{
- if (VEC_length (ipa_agg_replacement_value_p, ipa_node_agg_replacements)
- <= (unsigned) cgraph_max_uid)
- VEC_safe_grow_cleared (ipa_agg_replacement_value_p, gc,
- ipa_node_agg_replacements, cgraph_max_uid + 1);
+ if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
+ vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
- VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
- node->uid, aggvals);
+ (*ipa_node_agg_replacements)[node->uid] = aggvals;
}
/* Hook that is called by cgraph.c when an edge is removed. */
@@ -2468,8 +2459,7 @@ static void
ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
{
/* During IPA-CP updating we can be called on not-yet analyze clones. */
- if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
- <= (unsigned)cs->uid)
+ if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
return;
ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
}
@@ -2480,13 +2470,10 @@ static void
ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
{
/* During IPA-CP updating we can be called on not-yet analyze clones. */
- if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
- > (unsigned)node->uid)
+ if (ipa_node_params_vector.length () > (unsigned)node->uid)
ipa_free_node_params_substructures (IPA_NODE_REF (node));
- if (VEC_length (ipa_agg_replacement_value_p, ipa_node_agg_replacements)
- > (unsigned)node->uid)
- VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
- (unsigned)node->uid, NULL);
+ if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
+ (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
}
/* Hook that is called by cgraph.c when an edge is duplicated. */
@@ -2503,14 +2490,11 @@ ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
old_args = IPA_EDGE_REF (src);
new_args = IPA_EDGE_REF (dst);
- new_args->jump_functions = VEC_copy (ipa_jump_func_t, gc,
- old_args->jump_functions);
+ new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
- for (i = 0; i < VEC_length (ipa_jump_func_t, old_args->jump_functions); i++)
- VEC_index (ipa_jump_func_t, new_args->jump_functions, i).agg.items
- = VEC_copy (ipa_agg_jf_item_t, gc,
- VEC_index (ipa_jump_func_t,
- old_args->jump_functions, i).agg.items);
+ for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
+ (*new_args->jump_functions)[i].agg.items
+ = vec_safe_copy ((*old_args->jump_functions)[i].agg.items);
}
/* Hook that is called by cgraph.c when a node is duplicated. */
@@ -2526,8 +2510,7 @@ ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
old_info = IPA_NODE_REF (src);
new_info = IPA_NODE_REF (dst);
- new_info->descriptors = VEC_copy (ipa_param_descriptor_t, heap,
- old_info->descriptors);
+ new_info->descriptors = old_info->descriptors.copy ();
new_info->lattices = NULL;
new_info->ipcp_orig_node = old_info->ipcp_orig_node;
@@ -2678,17 +2661,17 @@ ipa_print_all_params (FILE * f)
/* Return a heap allocated vector containing formal parameters of FNDECL. */
-VEC(tree, heap) *
+vec<tree>
ipa_get_vector_of_formal_parms (tree fndecl)
{
- VEC(tree, heap) *args;
+ vec<tree> args;
int count;
tree parm;
count = count_formal_params (fndecl);
- args = VEC_alloc (tree, heap, count);
+ args.create (count);
for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
- VEC_quick_push (tree, args, parm);
+ args.quick_push (parm);
return args;
}
@@ -2696,19 +2679,19 @@ ipa_get_vector_of_formal_parms (tree fndecl)
/* Return a heap allocated vector containing types of formal parameters of
function type FNTYPE. */
-static inline VEC(tree, heap) *
+static inline vec<tree>
get_vector_of_formal_parm_types (tree fntype)
{
- VEC(tree, heap) *types;
+ vec<tree> types;
int count = 0;
tree t;
for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
count++;
- types = VEC_alloc (tree, heap, count);
+ types.create (count);
for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
- VEC_quick_push (tree, types, TREE_VALUE (t));
+ types.quick_push (TREE_VALUE (t));
return types;
}
@@ -2722,11 +2705,11 @@ void
ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
const char *synth_parm_prefix)
{
- VEC(tree, heap) *oparms, *otypes;
+ vec<tree> oparms, otypes;
tree orig_type, new_type = NULL;
tree old_arg_types, t, new_arg_types = NULL;
tree parm, *link = &DECL_ARGUMENTS (fndecl);
- int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ int i, len = adjustments.length ();
tree new_reversed = NULL;
bool care_for_types, last_parm_void;
@@ -2746,14 +2729,14 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
== void_type_node);
otypes = get_vector_of_formal_parm_types (orig_type);
if (last_parm_void)
- gcc_assert (VEC_length (tree, oparms) + 1 == VEC_length (tree, otypes));
+ gcc_assert (oparms.length () + 1 == otypes.length ());
else
- gcc_assert (VEC_length (tree, oparms) == VEC_length (tree, otypes));
+ gcc_assert (oparms.length () == otypes.length ());
}
else
{
last_parm_void = false;
- otypes = NULL;
+ otypes.create (0);
}
for (i = 0; i < len; i++)
@@ -2761,15 +2744,14 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
struct ipa_parm_adjustment *adj;
gcc_assert (link);
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
- parm = VEC_index (tree, oparms, adj->base_index);
+ adj = &adjustments[i];
+ parm = oparms[adj->base_index];
adj->base = parm;
if (adj->copy_param)
{
if (care_for_types)
- new_arg_types = tree_cons (NULL_TREE, VEC_index (tree, otypes,
- adj->base_index),
+ new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
new_arg_types);
*link = parm;
link = &DECL_CHAIN (parm);
@@ -2827,8 +2809,8 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
When we are asked to remove it, we need to build new FUNCTION_TYPE
instead. */
if (TREE_CODE (orig_type) != METHOD_TYPE
- || (VEC_index (ipa_parm_adjustment_t, adjustments, 0).copy_param
- && VEC_index (ipa_parm_adjustment_t, adjustments, 0).base_index == 0))
+ || (adjustments[0].copy_param
+ && adjustments[0].base_index == 0))
{
new_type = build_distinct_type_copy (orig_type);
TYPE_ARG_TYPES (new_type) = new_reversed;
@@ -2866,9 +2848,8 @@ ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
TREE_TYPE (fndecl) = new_type;
DECL_VIRTUAL_P (fndecl) = 0;
- if (otypes)
- VEC_free (tree, heap, otypes);
- VEC_free (tree, heap, oparms);
+ otypes.release ();
+ oparms.release ();
}
/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
@@ -2879,15 +2860,15 @@ void
ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
ipa_parm_adjustment_vec adjustments)
{
- VEC(tree, heap) *vargs;
- VEC(tree, gc) **debug_args = NULL;
+ vec<tree> vargs;
+ vec<tree, va_gc> **debug_args = NULL;
gimple new_stmt;
gimple_stmt_iterator gsi;
tree callee_decl;
int i, len;
- len = VEC_length (ipa_parm_adjustment_t, adjustments);
- vargs = VEC_alloc (tree, heap, len);
+ len = adjustments.length ();
+ vargs.create (len);
callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
gsi = gsi_for_stmt (stmt);
@@ -2895,13 +2876,13 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
{
struct ipa_parm_adjustment *adj;
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (adj->copy_param)
{
tree arg = gimple_call_arg (stmt, adj->base_index);
- VEC_quick_push (tree, vargs, arg);
+ vargs.quick_push (arg);
}
else if (!adj->remove_param)
{
@@ -2996,7 +2977,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
adj->by_ref
|| is_gimple_reg_type (adj->type),
NULL, true, GSI_SAME_STMT);
- VEC_quick_push (tree, vargs, expr);
+ vargs.quick_push (expr);
}
if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
{
@@ -3014,10 +2995,10 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
}
if (debug_args == NULL)
debug_args = decl_debug_args_insert (callee_decl);
- for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl); ix += 2)
+ for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
if (ddecl == origin)
{
- ddecl = VEC_index (tree, *debug_args, ix + 1);
+ ddecl = (**debug_args)[ix + 1];
break;
}
if (ddecl == NULL)
@@ -3027,11 +3008,10 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
TREE_TYPE (ddecl) = TREE_TYPE (origin);
DECL_MODE (ddecl) = DECL_MODE (origin);
- VEC_safe_push (tree, gc, *debug_args, origin);
- VEC_safe_push (tree, gc, *debug_args, ddecl);
+ vec_safe_push (*debug_args, origin);
+ vec_safe_push (*debug_args, ddecl);
}
- def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
- stmt);
+ def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
}
}
@@ -3043,7 +3023,7 @@ ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
}
new_stmt = gimple_build_call_vec (callee_decl, vargs);
- VEC_free (tree, heap, vargs);
+ vargs.release ();
if (gimple_call_lhs (stmt))
gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
@@ -3072,13 +3052,13 @@ static bool
index_in_adjustments_multiple_times_p (int base_index,
ipa_parm_adjustment_vec adjustments)
{
- int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ int i, len = adjustments.length ();
bool one = false;
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (adj->base_index == base_index)
{
@@ -3100,31 +3080,29 @@ ipa_parm_adjustment_vec
ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
ipa_parm_adjustment_vec outer)
{
- int i, outlen = VEC_length (ipa_parm_adjustment_t, outer);
- int inlen = VEC_length (ipa_parm_adjustment_t, inner);
+ int i, outlen = outer.length ();
+ int inlen = inner.length ();
int removals = 0;
ipa_parm_adjustment_vec adjustments, tmp;
- tmp = VEC_alloc (ipa_parm_adjustment_t, heap, inlen);
+ tmp.create (inlen);
for (i = 0; i < inlen; i++)
{
struct ipa_parm_adjustment *n;
- n = &VEC_index (ipa_parm_adjustment_t, inner, i);
+ n = &inner[i];
if (n->remove_param)
removals++;
else
- VEC_quick_push (ipa_parm_adjustment_t, tmp, *n);
+ tmp.quick_push (*n);
}
- adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, outlen + removals);
+ adjustments.create (outlen + removals);
for (i = 0; i < outlen; i++)
{
struct ipa_parm_adjustment r;
- struct ipa_parm_adjustment *out = &VEC_index (ipa_parm_adjustment_t,
- outer, i);
- struct ipa_parm_adjustment *in = &VEC_index (ipa_parm_adjustment_t, tmp,
- out->base_index);
+ struct ipa_parm_adjustment *out = &outer[i];
+ struct ipa_parm_adjustment *in = &tmp[out->base_index];
memset (&r, 0, sizeof (r));
gcc_assert (!in->remove_param);
@@ -3133,7 +3111,7 @@ ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
{
r.remove_param = true;
- VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
+ adjustments.quick_push (r);
}
continue;
}
@@ -3151,19 +3129,18 @@ ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
r.offset = in->offset;
else
r.offset = in->offset + out->offset;
- VEC_quick_push (ipa_parm_adjustment_t, adjustments, r);
+ adjustments.quick_push (r);
}
for (i = 0; i < inlen; i++)
{
- struct ipa_parm_adjustment *n = &VEC_index (ipa_parm_adjustment_t,
- inner, i);
+ struct ipa_parm_adjustment *n = &inner[i];
if (n->remove_param)
- VEC_quick_push (ipa_parm_adjustment_t, adjustments, *n);
+ adjustments.quick_push (*n);
}
- VEC_free (ipa_parm_adjustment_t, heap, tmp);
+ tmp.release ();
return adjustments;
}
@@ -3174,15 +3151,15 @@ void
ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
tree fndecl)
{
- int i, len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ int i, len = adjustments.length ();
bool first = true;
- VEC(tree, heap) *parms = ipa_get_vector_of_formal_parms (fndecl);
+ vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
fprintf (file, "IPA param adjustments: ");
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (!first)
fprintf (file, " ");
@@ -3190,7 +3167,7 @@ ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
first = false;
fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
- print_generic_expr (file, VEC_index (tree, parms, adj->base_index), 0);
+ print_generic_expr (file, parms[adj->base_index], 0);
if (adj->base)
{
fprintf (file, ", base: ");
@@ -3218,7 +3195,7 @@ ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
print_node_brief (file, ", type: ", adj->type, 0);
fprintf (file, "\n");
}
- VEC_free (tree, heap, parms);
+ parms.release ();
}
/* Dump the AV linked list. */
@@ -3281,7 +3258,7 @@ ipa_write_jump_function (struct output_block *ob,
break;
}
- count = VEC_length (ipa_agg_jf_item_t, jump_func->agg.items);
+ count = vec_safe_length (jump_func->agg.items);
streamer_write_uhwi (ob, count);
if (count)
{
@@ -3290,7 +3267,7 @@ ipa_write_jump_function (struct output_block *ob,
streamer_write_bitpack (&bp);
}
- FOR_EACH_VEC_ELT (ipa_agg_jf_item_t, jump_func->agg.items, i, item)
+ FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
{
streamer_write_uhwi (ob, item->offset);
stream_write_tree (ob, item->value, true);
@@ -3339,7 +3316,7 @@ ipa_read_jump_function (struct lto_input_block *ib,
}
count = streamer_read_uhwi (ib);
- jump_func->agg.items = VEC_alloc (ipa_agg_jf_item_t, gc, count);
+ vec_alloc (jump_func->agg.items, count);
if (count)
{
bp = streamer_read_bitpack (ib);
@@ -3350,7 +3327,7 @@ ipa_read_jump_function (struct lto_input_block *ib,
struct ipa_agg_jf_item item;
item.offset = streamer_read_uhwi (ib);
item.value = stream_read_tree (ib, data_in);
- VEC_quick_push (ipa_agg_jf_item_t, jump_func->agg.items, item);
+ jump_func->agg.items->quick_push (item);
}
}
@@ -3472,7 +3449,7 @@ ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
if (!count)
continue;
- VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions, count);
+ vec_safe_grow_cleared (args->jump_functions, count);
for (k = 0; k < ipa_get_cs_argument_count (args); k++)
ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
@@ -3484,8 +3461,7 @@ ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
if (count)
{
- VEC_safe_grow_cleared (ipa_jump_func_t, gc, args->jump_functions,
- count);
+ vec_safe_grow_cleared (args->jump_functions, count);
for (k = 0; k < ipa_get_cs_argument_count (args); k++)
ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
data_in);
@@ -3506,7 +3482,7 @@ ipa_prop_write_jump_functions (void)
lto_symtab_encoder_t encoder;
- if (!ipa_node_params_vector)
+ if (!ipa_node_params_vector.exists ())
return;
ob = create_output_block (LTO_section_jump_functions);
@@ -3558,7 +3534,8 @@ ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
data_in =
lto_data_in_create (file_data, (const char *) data + string_offset,
- header->string_size, NULL);
+ header->string_size,
+ vec<ld_plugin_symbol_resolution_t>());
count = streamer_read_uhwi (&ib_main);
for (i = 0; i < count; i++)
@@ -3731,7 +3708,8 @@ read_replacements_section (struct lto_file_decl_data *file_data,
header->main_size);
data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
- header->string_size, NULL);
+ header->string_size,
+ vec<ld_plugin_symbol_resolution>());
count = streamer_read_uhwi (&ib_main);
for (i = 0; i < count; i++)
@@ -3812,7 +3790,7 @@ adjust_agg_replacement_values (struct cgraph_node *node,
unsigned int
ipcp_transform_function (struct cgraph_node *node)
{
- VEC (ipa_param_descriptor_t, heap) *descriptors = NULL;
+ vec<ipa_param_descriptor_t> descriptors = vec<ipa_param_descriptor_t>();
struct param_analysis_info *parms_ainfo;
struct ipa_agg_replacement_value *aggval;
gimple_stmt_iterator gsi;
@@ -3838,8 +3816,7 @@ ipcp_transform_function (struct cgraph_node *node)
ipa_dump_agg_replacement_values (dump_file, aggval);
parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
- VEC_safe_grow_cleared (ipa_param_descriptor_t, heap,
- descriptors, param_count);
+ descriptors.safe_grow_cleared (param_count);
ipa_populate_param_decls (node, descriptors);
FOR_EACH_BB (bb)
@@ -3929,10 +3906,9 @@ ipcp_transform_function (struct cgraph_node *node)
cfg_changed = true;
}
- VEC_replace (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
- node->uid, NULL);
+ (*ipa_node_agg_replacements)[node->uid] = NULL;
free_parms_ainfo (parms_ainfo, param_count);
- VEC_free (ipa_param_descriptor_t, heap, descriptors);
+ descriptors.release ();
if (!something_changed)
return 0;
diff --git a/gcc/ipa-prop.h b/gcc/ipa-prop.h
index da6200fbaa4..1d0a9927fd0 100644
--- a/gcc/ipa-prop.h
+++ b/gcc/ipa-prop.h
@@ -142,9 +142,6 @@ typedef struct GTY(()) ipa_agg_jf_item
tree value;
} ipa_agg_jf_item_t;
-DEF_VEC_O (ipa_agg_jf_item_t);
-DEF_VEC_ALLOC_O (ipa_agg_jf_item_t, gc);
-DEF_VEC_ALLOC_O (ipa_agg_jf_item_t, heap);
/* Aggregate jump function - i.e. description of contents of aggregates passed
either by reference or value. */
@@ -152,17 +149,13 @@ DEF_VEC_ALLOC_O (ipa_agg_jf_item_t, heap);
struct GTY(()) ipa_agg_jump_function
{
/* Description of the individual items. */
- VEC (ipa_agg_jf_item_t, gc) *items;
+ vec<ipa_agg_jf_item_t, va_gc> *items;
/* True if the data was passed by reference (as opposed to by value). */
bool by_ref;
};
typedef struct ipa_agg_jump_function *ipa_agg_jump_function_p;
-DEF_VEC_P (ipa_agg_jump_function_p);
-DEF_VEC_ALLOC_P (ipa_agg_jump_function_p, heap);
typedef struct ipa_agg_jump_function ipa_agg_jump_function_t;
-DEF_VEC_P (ipa_agg_jump_function_t);
-DEF_VEC_ALLOC_P (ipa_agg_jump_function_t, heap);
/* A jump function for a callsite represents the values passed as actual
arguments of the callsite. See enum jump_func_type for the various
@@ -186,8 +179,6 @@ typedef struct GTY (()) ipa_jump_func
} GTY ((desc ("%1.type"))) value;
} ipa_jump_func_t;
-DEF_VEC_O (ipa_jump_func_t);
-DEF_VEC_ALLOC_O (ipa_jump_func_t, gc);
/* Return the offset of the component that is decribed by a known type jump
function JFUNC. */
@@ -311,8 +302,6 @@ struct ipa_param_descriptor
};
typedef struct ipa_param_descriptor ipa_param_descriptor_t;
-DEF_VEC_O (ipa_param_descriptor_t);
-DEF_VEC_ALLOC_O (ipa_param_descriptor_t, heap);
struct ipcp_lattice;
/* ipa_node_params stores information related to formal parameters of functions
@@ -323,7 +312,7 @@ struct ipa_node_params
{
/* Information about individual formal parameters that are gathered when
summaries are generated. */
- VEC (ipa_param_descriptor_t, heap) *descriptors;
+ vec<ipa_param_descriptor_t> descriptors;
/* Pointer to an array of structures describing individual formal
parameters. */
struct ipcp_param_lattices *lattices;
@@ -332,7 +321,7 @@ struct ipa_node_params
struct cgraph_node *ipcp_orig_node;
/* If this node is an ipa-cp clone, these are the known values that describe
what it has been specialized for. */
- VEC (tree, heap) *known_vals;
+ vec<tree> known_vals;
/* Whether the param uses analysis has already been performed. */
unsigned uses_analysis_done : 1;
/* Whether the function is enqueued in ipa-cp propagation stack. */
@@ -353,7 +342,7 @@ struct ipa_node_params
static inline int
ipa_get_param_count (struct ipa_node_params *info)
{
- return VEC_length (ipa_param_descriptor_t, info->descriptors);
+ return info->descriptors.length ();
}
/* Return the declaration of Ith formal parameter of the function corresponding
@@ -363,7 +352,7 @@ ipa_get_param_count (struct ipa_node_params *info)
static inline tree
ipa_get_param (struct ipa_node_params *info, int i)
{
- return VEC_index (ipa_param_descriptor_t, info->descriptors, i).decl;
+ return info->descriptors[i].decl;
}
/* Set the used flag corresponding to the Ith formal parameter of the function
@@ -372,7 +361,7 @@ ipa_get_param (struct ipa_node_params *info, int i)
static inline void
ipa_set_param_used (struct ipa_node_params *info, int i, bool val)
{
- VEC_index (ipa_param_descriptor_t, info->descriptors, i).used = val;
+ info->descriptors[i].used = val;
}
/* Return the used flag corresponding to the Ith formal parameter of the
@@ -381,7 +370,7 @@ ipa_set_param_used (struct ipa_node_params *info, int i, bool val)
static inline bool
ipa_is_param_used (struct ipa_node_params *info, int i)
{
- return VEC_index (ipa_param_descriptor_t, info->descriptors, i).used;
+ return info->descriptors[i].used;
}
/* Information about replacements done in aggregates for a given node (each
@@ -399,8 +388,6 @@ struct GTY(()) ipa_agg_replacement_value
};
typedef struct ipa_agg_replacement_value *ipa_agg_replacement_value_p;
-DEF_VEC_P (ipa_agg_replacement_value_p);
-DEF_VEC_ALLOC_P (ipa_agg_replacement_value_p, gc);
void ipa_set_node_agg_value_chain (struct cgraph_node *node,
struct ipa_agg_replacement_value *aggvals);
@@ -410,7 +397,7 @@ void ipa_set_node_agg_value_chain (struct cgraph_node *node,
typedef struct GTY(()) ipa_edge_args
{
/* Vector of the callsite's jump function of each parameter. */
- VEC (ipa_jump_func_t, gc) *jump_functions;
+ vec<ipa_jump_func_t, va_gc> *jump_functions;
} ipa_edge_args_t;
/* ipa_edge_args access functions. Please use these to access fields that
@@ -421,7 +408,7 @@ typedef struct GTY(()) ipa_edge_args
static inline int
ipa_get_cs_argument_count (struct ipa_edge_args *args)
{
- return VEC_length (ipa_jump_func_t, args->jump_functions);
+ return vec_safe_length (args->jump_functions);
}
/* Returns a pointer to the jump function for the ith argument. Please note
@@ -431,31 +418,25 @@ ipa_get_cs_argument_count (struct ipa_edge_args *args)
static inline struct ipa_jump_func *
ipa_get_ith_jump_func (struct ipa_edge_args *args, int i)
{
- return &VEC_index (ipa_jump_func_t, args->jump_functions, i);
+ return &(*args->jump_functions)[i];
}
/* Vectors need to have typedefs of structures. */
typedef struct ipa_node_params ipa_node_params_t;
/* Types of vectors holding the infos. */
-DEF_VEC_O (ipa_node_params_t);
-DEF_VEC_ALLOC_O (ipa_node_params_t, heap);
-DEF_VEC_O (ipa_edge_args_t);
-DEF_VEC_ALLOC_O (ipa_edge_args_t, gc);
/* Vector where the parameter infos are actually stored. */
-extern VEC (ipa_node_params_t, heap) *ipa_node_params_vector;
+extern vec<ipa_node_params_t> ipa_node_params_vector;
/* Vector of known aggregate values in cloned nodes. */
-extern GTY(()) VEC (ipa_agg_replacement_value_p, gc) *ipa_node_agg_replacements;
+extern GTY(()) vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
/* Vector where the parameter infos are actually stored. */
-extern GTY(()) VEC (ipa_edge_args_t, gc) *ipa_edge_args_vector;
+extern GTY(()) vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
/* Return the associated parameter/argument info corresponding to the given
node/edge. */
-#define IPA_NODE_REF(NODE) (&VEC_index (ipa_node_params_t, \
- ipa_node_params_vector, (NODE)->uid))
-#define IPA_EDGE_REF(EDGE) (&VEC_index (ipa_edge_args_t, \
- ipa_edge_args_vector, (EDGE)->uid))
+#define IPA_NODE_REF(NODE) (&ipa_node_params_vector[(NODE)->uid])
+#define IPA_EDGE_REF(EDGE) (&(*ipa_edge_args_vector)[(EDGE)->uid])
/* This macro checks validity of index returned by
ipa_get_param_decl_index function. */
#define IS_VALID_JUMP_FUNC_INDEX(I) ((I) != -1)
@@ -477,14 +458,11 @@ void ipa_register_cgraph_hooks (void);
static inline void
ipa_check_create_node_params (void)
{
- if (!ipa_node_params_vector)
- ipa_node_params_vector = VEC_alloc (ipa_node_params_t, heap,
- cgraph_max_uid);
+ if (!ipa_node_params_vector.exists ())
+ ipa_node_params_vector.create (cgraph_max_uid);
- if (VEC_length (ipa_node_params_t, ipa_node_params_vector)
- <= (unsigned) cgraph_max_uid)
- VEC_safe_grow_cleared (ipa_node_params_t, heap,
- ipa_node_params_vector, cgraph_max_uid + 1);
+ if (ipa_node_params_vector.length () <= (unsigned) cgraph_max_uid)
+ ipa_node_params_vector.safe_grow_cleared (cgraph_max_uid + 1);
}
/* This function ensures the array of edge arguments infos is big enough to
@@ -493,14 +471,8 @@ ipa_check_create_node_params (void)
static inline void
ipa_check_create_edge_args (void)
{
- if (!ipa_edge_args_vector)
- ipa_edge_args_vector = VEC_alloc (ipa_edge_args_t, gc,
- cgraph_edge_max_uid);
-
- if (VEC_length (ipa_edge_args_t, ipa_edge_args_vector)
- <= (unsigned) cgraph_edge_max_uid)
- VEC_safe_grow_cleared (ipa_edge_args_t, gc, ipa_edge_args_vector,
- cgraph_edge_max_uid + 1);
+ if (vec_safe_length (ipa_edge_args_vector) <= (unsigned) cgraph_edge_max_uid)
+ vec_safe_grow_cleared (ipa_edge_args_vector, cgraph_edge_max_uid + 1);
}
/* Returns true if the array of edge infos is large enough to accommodate an
@@ -510,8 +482,7 @@ ipa_check_create_edge_args (void)
static inline bool
ipa_edge_args_info_available_for_edge_p (struct cgraph_edge *edge)
{
- return ((unsigned) edge->uid < VEC_length (ipa_edge_args_t,
- ipa_edge_args_vector));
+ return ((unsigned) edge->uid < vec_safe_length (ipa_edge_args_vector));
}
/* Return the aggregate replacements for NODE, if there are any. */
@@ -519,23 +490,21 @@ ipa_edge_args_info_available_for_edge_p (struct cgraph_edge *edge)
static inline struct ipa_agg_replacement_value *
ipa_get_agg_replacements_for_node (struct cgraph_node *node)
{
- if ((unsigned) node->uid >= VEC_length (ipa_agg_replacement_value_p,
- ipa_node_agg_replacements))
+ if ((unsigned) node->uid >= vec_safe_length (ipa_node_agg_replacements))
return NULL;
- return VEC_index (ipa_agg_replacement_value_p, ipa_node_agg_replacements,
- node->uid);
+ return (*ipa_node_agg_replacements)[node->uid];
}
/* Function formal parameters related computations. */
void ipa_initialize_node_params (struct cgraph_node *node);
bool ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
- VEC (cgraph_edge_p, heap) **new_edges);
+ vec<cgraph_edge_p> *new_edges);
/* Indirect edge and binfo processing. */
tree ipa_get_indirect_edge_target (struct cgraph_edge *ie,
- VEC (tree, heap) *,
- VEC (tree, heap) *,
- VEC (ipa_agg_jump_function_p, heap) *);
+ vec<tree> ,
+ vec<tree> ,
+ vec<ipa_agg_jump_function_p> );
struct cgraph_edge *ipa_make_edge_direct_to_target (struct cgraph_edge *, tree);
/* Functions related to both. */
@@ -616,12 +585,10 @@ struct ipa_parm_adjustment
};
typedef struct ipa_parm_adjustment ipa_parm_adjustment_t;
-DEF_VEC_O (ipa_parm_adjustment_t);
-DEF_VEC_ALLOC_O (ipa_parm_adjustment_t, heap);
-typedef VEC (ipa_parm_adjustment_t, heap) *ipa_parm_adjustment_vec;
+typedef vec<ipa_parm_adjustment_t> ipa_parm_adjustment_vec;
-VEC(tree, heap) *ipa_get_vector_of_formal_parms (tree fndecl);
+vec<tree> ipa_get_vector_of_formal_parms (tree fndecl);
void ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec,
const char *);
void ipa_modify_call_arguments (struct cgraph_edge *, gimple,
diff --git a/gcc/ipa-pure-const.c b/gcc/ipa-pure-const.c
index 5cf974b7b37..341501ea172 100644
--- a/gcc/ipa-pure-const.c
+++ b/gcc/ipa-pure-const.c
@@ -107,9 +107,7 @@ typedef struct funct_state_d * funct_state;
/* Array, indexed by cgraph node uid, of function states. */
-DEF_VEC_P (funct_state);
-DEF_VEC_ALLOC_P (funct_state, heap);
-static VEC (funct_state, heap) *funct_state_vec;
+static vec<funct_state> funct_state_vec;
/* Holders of ipa cgraph hooks: */
static struct cgraph_node_hook_list *function_insertion_hook_holder;
@@ -198,7 +196,7 @@ warn_function_noreturn (tree decl)
static void
finish_state (void)
{
- free (funct_state_vec);
+ funct_state_vec.release ();
}
@@ -207,10 +205,10 @@ finish_state (void)
static inline bool
has_function_state (struct cgraph_node *node)
{
- if (!funct_state_vec
- || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
+ if (!funct_state_vec.exists ()
+ || funct_state_vec.length () <= (unsigned int)node->uid)
return false;
- return VEC_index (funct_state, funct_state_vec, node->uid) != NULL;
+ return funct_state_vec[node->uid] != NULL;
}
/* Return the function state from NODE. */
@@ -218,12 +216,12 @@ has_function_state (struct cgraph_node *node)
static inline funct_state
get_function_state (struct cgraph_node *node)
{
- if (!funct_state_vec
- || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid
- || !VEC_index (funct_state, funct_state_vec, node->uid))
+ if (!funct_state_vec.exists ()
+ || funct_state_vec.length () <= (unsigned int)node->uid
+ || !funct_state_vec[node->uid])
/* We might want to put correct previously_known state into varying. */
return &varying_state;
- return VEC_index (funct_state, funct_state_vec, node->uid);
+ return funct_state_vec[node->uid];
}
/* Set the function state S for NODE. */
@@ -231,10 +229,10 @@ get_function_state (struct cgraph_node *node)
static inline void
set_function_state (struct cgraph_node *node, funct_state s)
{
- if (!funct_state_vec
- || VEC_length (funct_state, funct_state_vec) <= (unsigned int)node->uid)
- VEC_safe_grow_cleared (funct_state, heap, funct_state_vec, node->uid + 1);
- VEC_replace (funct_state, funct_state_vec, node->uid, s);
+ if (!funct_state_vec.exists ()
+ || funct_state_vec.length () <= (unsigned int)node->uid)
+ funct_state_vec.safe_grow_cleared (node->uid + 1);
+ funct_state_vec[node->uid] = s;
}
/* Check to see if the use (or definition when CHECKING_WRITE is true)
@@ -1482,7 +1480,7 @@ propagate (void)
FOR_EACH_DEFINED_FUNCTION (node)
if (has_function_state (node))
free (get_function_state (node));
- VEC_free (funct_state, heap, funct_state_vec);
+ funct_state_vec.release ();
finish_state ();
return 0;
}
diff --git a/gcc/ipa-ref-inline.h b/gcc/ipa-ref-inline.h
index 575bba901ac..d793b5ea127 100644
--- a/gcc/ipa-ref-inline.h
+++ b/gcc/ipa-ref-inline.h
@@ -71,9 +71,9 @@ ipa_ref_referred_ref_list (struct ipa_ref *ref)
static inline struct ipa_ref *
ipa_ref_list_first_reference (struct ipa_ref_list *list)
{
- if (!VEC_length (ipa_ref_t, list->references))
+ if (!vec_safe_length (list->references))
return NULL;
- return &VEC_index (ipa_ref_t, list->references, 0);
+ return &(*list->references)[0];
}
/* Return first referring ref in LIST or NULL if empty. */
@@ -81,9 +81,9 @@ ipa_ref_list_first_reference (struct ipa_ref_list *list)
static inline struct ipa_ref *
ipa_ref_list_first_referring (struct ipa_ref_list *list)
{
- if (!VEC_length (ipa_ref_ptr, list->referring))
+ if (!list->referring.length ())
return NULL;
- return VEC_index (ipa_ref_ptr, list->referring, 0);
+ return list->referring[0];
}
/* Clear reference list. */
@@ -91,7 +91,7 @@ ipa_ref_list_first_referring (struct ipa_ref_list *list)
static inline void
ipa_empty_ref_list (struct ipa_ref_list *list)
{
- list->referring = NULL;
+ list->referring.create (0);
list->references = NULL;
}
@@ -100,10 +100,10 @@ ipa_empty_ref_list (struct ipa_ref_list *list)
static inline unsigned int
ipa_ref_list_nreferences (struct ipa_ref_list *list)
{
- return VEC_length (ipa_ref_t, list->references);
+ return vec_safe_length (list->references);
}
#define ipa_ref_list_reference_iterate(L,I,P) \
- VEC_iterate(ipa_ref_t, (L)->references, (I), (P))
+ vec_safe_iterate ((L)->references, (I), &(P))
#define ipa_ref_list_referring_iterate(L,I,P) \
- VEC_iterate(ipa_ref_ptr, (L)->referring, (I), (P))
+ (L)->referring.iterate ((I), &(P))
diff --git a/gcc/ipa-ref.c b/gcc/ipa-ref.c
index 52850c6f872..75887d293e6 100644
--- a/gcc/ipa-ref.c
+++ b/gcc/ipa-ref.c
@@ -40,33 +40,30 @@ ipa_record_reference (symtab_node referring_node,
{
struct ipa_ref *ref;
struct ipa_ref_list *list, *list2;
- VEC(ipa_ref_t,gc) *old_references;
+ ipa_ref_t *old_references;
gcc_checking_assert (!stmt || is_a <cgraph_node> (referring_node));
gcc_checking_assert (use_type != IPA_REF_ALIAS || !stmt);
list = &referring_node->symbol.ref_list;
- old_references = list->references;
- VEC_safe_grow (ipa_ref_t, gc, list->references,
- VEC_length (ipa_ref_t, list->references) + 1);
- ref = &VEC_last (ipa_ref_t, list->references);
+ old_references = vec_safe_address (list->references);
+ vec_safe_grow (list->references, vec_safe_length (list->references) + 1);
+ ref = &list->references->last ();
list2 = &referred_node->symbol.ref_list;
- VEC_safe_push (ipa_ref_ptr, heap, list2->referring, ref);
- ref->referred_index = VEC_length (ipa_ref_ptr, list2->referring) - 1;
+ list2->referring.safe_push (ref);
+ ref->referred_index = list2->referring.length () - 1;
ref->referring = referring_node;
ref->referred = referred_node;
ref->stmt = stmt;
ref->use = use_type;
/* If vector was moved in memory, update pointers. */
- if (old_references != list->references)
+ if (old_references != list->references->address ())
{
int i;
for (i = 0; ipa_ref_list_reference_iterate (list, i, ref); i++)
- VEC_replace (ipa_ref_ptr,
- ipa_ref_referred_ref_list (ref)->referring,
- ref->referred_index, ref);
+ ipa_ref_referred_ref_list (ref)->referring[ref->referred_index] = ref;
}
return ref;
}
@@ -78,30 +75,26 @@ ipa_remove_reference (struct ipa_ref *ref)
{
struct ipa_ref_list *list = ipa_ref_referred_ref_list (ref);
struct ipa_ref_list *list2 = ipa_ref_referring_ref_list (ref);
- VEC(ipa_ref_t,gc) *old_references = list2->references;
+ vec<ipa_ref_t, va_gc> *old_references = list2->references;
struct ipa_ref *last;
- gcc_assert (VEC_index (ipa_ref_ptr, list->referring, ref->referred_index) == ref);
- last = VEC_last (ipa_ref_ptr, list->referring);
+ gcc_assert (list->referring[ref->referred_index] == ref);
+ last = list->referring.last ();
if (ref != last)
{
- VEC_replace (ipa_ref_ptr, list->referring,
- ref->referred_index,
- VEC_last (ipa_ref_ptr, list->referring));
- VEC_index (ipa_ref_ptr, list->referring,
- ref->referred_index)->referred_index = ref->referred_index;
+ list->referring[ref->referred_index] = list->referring.last ();
+ list->referring[ref->referred_index]->referred_index
+ = ref->referred_index;
}
- VEC_pop (ipa_ref_ptr, list->referring);
+ list->referring.pop ();
- last = &VEC_last (ipa_ref_t, list2->references);
+ last = &list2->references->last ();
if (ref != last)
{
*ref = *last;
- VEC_replace (ipa_ref_ptr,
- ipa_ref_referred_ref_list (ref)->referring,
- ref->referred_index, ref);
+ ipa_ref_referred_ref_list (ref)->referring[ref->referred_index] = ref;
}
- VEC_pop (ipa_ref_t, list2->references);
+ list2->references->pop ();
gcc_assert (list2->references == old_references);
}
@@ -110,10 +103,9 @@ ipa_remove_reference (struct ipa_ref *ref)
void
ipa_remove_all_references (struct ipa_ref_list *list)
{
- while (VEC_length (ipa_ref_t, list->references))
- ipa_remove_reference (&VEC_last (ipa_ref_t, list->references));
- VEC_free (ipa_ref_t, gc, list->references);
- list->references = NULL;
+ while (vec_safe_length (list->references))
+ ipa_remove_reference (&list->references->last ());
+ vec_free (list->references);
}
/* Remove all references in ref list LIST. */
@@ -121,10 +113,9 @@ ipa_remove_all_references (struct ipa_ref_list *list)
void
ipa_remove_all_referring (struct ipa_ref_list *list)
{
- while (VEC_length (ipa_ref_ptr, list->referring))
- ipa_remove_reference (VEC_last (ipa_ref_ptr, list->referring));
- VEC_free (ipa_ref_ptr, heap, list->referring);
- list->referring = NULL;
+ while (list->referring.length ())
+ ipa_remove_reference (list->referring.last ());
+ list->referring.release ();
}
/* Dump references in LIST to FILE. */
diff --git a/gcc/ipa-ref.h b/gcc/ipa-ref.h
index 99273c50fb1..2333cbe91e3 100644
--- a/gcc/ipa-ref.h
+++ b/gcc/ipa-ref.h
@@ -48,19 +48,15 @@ struct GTY(()) ipa_ref
typedef struct ipa_ref ipa_ref_t;
typedef struct ipa_ref *ipa_ref_ptr;
-DEF_VEC_O(ipa_ref_t);
-DEF_VEC_ALLOC_O(ipa_ref_t,gc);
-DEF_VEC_P(ipa_ref_ptr);
-DEF_VEC_ALLOC_P(ipa_ref_ptr,heap);
/* List of references. This is stored in both callgraph and varpool nodes. */
struct GTY(()) ipa_ref_list
{
/* Store actual references in references vector. */
- VEC(ipa_ref_t,gc) *references;
+ vec<ipa_ref_t, va_gc> *references;
/* Referring is vector of pointers to references. It must not live in GGC space
or GGC will try to mark middle of references vectors. */
- VEC(ipa_ref_ptr,heap) * GTY((skip)) referring;
+ vec<ipa_ref_ptr> GTY((skip)) referring;
};
struct ipa_ref * ipa_record_reference (symtab_node,
diff --git a/gcc/ipa-reference.c b/gcc/ipa-reference.c
index 06dc18b2302..ca05a398841 100644
--- a/gcc/ipa-reference.c
+++ b/gcc/ipa-reference.c
@@ -124,38 +124,28 @@ static struct cgraph_node_hook_list *node_removal_hook_holder;
/* Vector where the reference var infos are actually stored.
Indexed by UID of call graph nodes. */
-DEF_VEC_P (ipa_reference_vars_info_t);
-DEF_VEC_ALLOC_P (ipa_reference_vars_info_t, heap);
-static VEC (ipa_reference_vars_info_t, heap) *ipa_reference_vars_vector;
+static vec<ipa_reference_vars_info_t> ipa_reference_vars_vector;
-DEF_VEC_P (ipa_reference_optimization_summary_t);
-DEF_VEC_ALLOC_P (ipa_reference_optimization_summary_t, heap);
-static VEC (ipa_reference_optimization_summary_t, heap) *ipa_reference_opt_sum_vector;
+static vec<ipa_reference_optimization_summary_t> ipa_reference_opt_sum_vector;
/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
static inline ipa_reference_vars_info_t
get_reference_vars_info (struct cgraph_node *node)
{
- if (!ipa_reference_vars_vector
- || VEC_length (ipa_reference_vars_info_t,
- ipa_reference_vars_vector) <= (unsigned int) node->uid)
+ if (!ipa_reference_vars_vector.exists ()
+ || ipa_reference_vars_vector.length () <= (unsigned int) node->uid)
return NULL;
- return VEC_index (ipa_reference_vars_info_t, ipa_reference_vars_vector,
- node->uid);
+ return ipa_reference_vars_vector[node->uid];
}
/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
static inline ipa_reference_optimization_summary_t
get_reference_optimization_summary (struct cgraph_node *node)
{
- if (!ipa_reference_opt_sum_vector
- || (VEC_length (ipa_reference_optimization_summary_t,
- ipa_reference_opt_sum_vector)
- <= (unsigned int) node->uid))
+ if (!ipa_reference_opt_sum_vector.exists ()
+ || (ipa_reference_opt_sum_vector.length () <= (unsigned int) node->uid))
return NULL;
- return VEC_index (ipa_reference_optimization_summary_t,
- ipa_reference_opt_sum_vector,
- node->uid);
+ return ipa_reference_opt_sum_vector[node->uid];
}
/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
@@ -163,13 +153,10 @@ static inline void
set_reference_vars_info (struct cgraph_node *node,
ipa_reference_vars_info_t info)
{
- if (!ipa_reference_vars_vector
- || VEC_length (ipa_reference_vars_info_t,
- ipa_reference_vars_vector) <= (unsigned int) node->uid)
- VEC_safe_grow_cleared (ipa_reference_vars_info_t, heap,
- ipa_reference_vars_vector, node->uid + 1);
- VEC_replace (ipa_reference_vars_info_t, ipa_reference_vars_vector,
- node->uid, info);
+ if (!ipa_reference_vars_vector.exists ()
+ || ipa_reference_vars_vector.length () <= (unsigned int) node->uid)
+ ipa_reference_vars_vector.safe_grow_cleared (node->uid + 1);
+ ipa_reference_vars_vector[node->uid] = info;
}
/* Return the ipa_reference_vars structure starting from the cgraph NODE. */
@@ -177,14 +164,10 @@ static inline void
set_reference_optimization_summary (struct cgraph_node *node,
ipa_reference_optimization_summary_t info)
{
- if (!ipa_reference_opt_sum_vector
- || (VEC_length (ipa_reference_optimization_summary_t,
- ipa_reference_opt_sum_vector)
- <= (unsigned int) node->uid))
- VEC_safe_grow_cleared (ipa_reference_optimization_summary_t,
- heap, ipa_reference_opt_sum_vector, node->uid + 1);
- VEC_replace (ipa_reference_optimization_summary_t,
- ipa_reference_opt_sum_vector, node->uid, info);
+ if (!ipa_reference_opt_sum_vector.exists ()
+ || (ipa_reference_opt_sum_vector.length () <= (unsigned int) node->uid))
+ ipa_reference_opt_sum_vector.safe_grow_cleared (node->uid + 1);
+ ipa_reference_opt_sum_vector[node->uid] = info;
}
/* Return a bitmap indexed by DECL_UID for the static variables that
@@ -746,10 +729,10 @@ propagate (void)
fprintf (dump_file, "Starting cycle with %s/%i\n",
cgraph_node_asm_name (node), node->symbol.order);
- VEC (cgraph_node_p, heap) *cycle_nodes = ipa_get_nodes_in_cycle (node);
+ vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
/* If any node in a cycle is read_all or write_all, they all are. */
- FOR_EACH_VEC_ELT (cgraph_node_p, cycle_nodes, x, w)
+ FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Visiting %s/%i\n",
@@ -771,7 +754,7 @@ propagate (void)
/* Merge the sets of this cycle with all sets of callees reached
from this cycle. */
- FOR_EACH_VEC_ELT (cgraph_node_p, cycle_nodes, x, w)
+ FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
if (read_all && write_all)
break;
@@ -795,13 +778,13 @@ propagate (void)
}
/* All nodes within a cycle have the same global info bitmaps. */
- FOR_EACH_VEC_ELT (cgraph_node_p, cycle_nodes, x, w)
+ FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
w_ri->global = *node_g;
}
- VEC_free (cgraph_node_p, heap, cycle_nodes);
+ cycle_nodes.release ();
}
if (dump_file)
@@ -822,8 +805,8 @@ propagate (void)
ipa_reference_vars_info_t node_info = get_reference_vars_info (node);
ipa_reference_global_vars_info_t node_g = &node_info->global;
- VEC (cgraph_node_p, heap) *cycle_nodes = ipa_get_nodes_in_cycle (node);
- FOR_EACH_VEC_ELT (cgraph_node_p, cycle_nodes, x, w)
+ vec<cgraph_node_ptr> cycle_nodes = ipa_get_nodes_in_cycle (node);
+ FOR_EACH_VEC_ELT (cycle_nodes, x, w)
{
ipa_reference_vars_info_t w_ri = get_reference_vars_info (w);
ipa_reference_local_vars_info_t w_l = &w_ri->local;
@@ -835,7 +818,7 @@ propagate (void)
fprintf (dump_file, "\n locals written: ");
dump_static_vars_set_to_file (dump_file, w_l->statics_written);
}
- VEC_free (cgraph_node_p, heap, cycle_nodes);
+ cycle_nodes.release ();
fprintf (dump_file, "\n globals read: ");
dump_static_vars_set_to_file (dump_file, node_g->statics_read);
@@ -897,8 +880,7 @@ propagate (void)
free (order);
bitmap_obstack_release (&local_info_obstack);
- VEC_free (ipa_reference_vars_info_t, heap, ipa_reference_vars_vector);
- ipa_reference_vars_vector = NULL;
+ ipa_reference_vars_vector.release ();
if (dump_file)
splay_tree_delete (reference_vars_to_consider);
reference_vars_to_consider = NULL;
diff --git a/gcc/ipa-split.c b/gcc/ipa-split.c
index 440b3f92055..ceed8d26624 100644
--- a/gcc/ipa-split.c
+++ b/gcc/ipa-split.c
@@ -99,10 +99,8 @@ typedef struct
unsigned int size;
unsigned int time;
} bb_info;
-DEF_VEC_O(bb_info);
-DEF_VEC_ALLOC_O(bb_info,heap);
-static VEC(bb_info, heap) *bb_info_vec;
+static vec<bb_info> bb_info_vec;
/* Description of split point. */
@@ -192,7 +190,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
basic_block return_bb)
{
bitmap seen = BITMAP_ALLOC (NULL);
- VEC (basic_block,heap) *worklist = NULL;
+ vec<basic_block> worklist = vec<basic_block>();
edge e;
edge_iterator ei;
bool ok = true;
@@ -201,14 +199,14 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
if (e->src != ENTRY_BLOCK_PTR
&& !bitmap_bit_p (current->split_bbs, e->src->index))
{
- VEC_safe_push (basic_block, heap, worklist, e->src);
+ worklist.safe_push (e->src);
bitmap_set_bit (seen, e->src->index);
}
- while (!VEC_empty (basic_block, worklist))
+ while (!worklist.is_empty ())
{
gimple_stmt_iterator bsi;
- basic_block bb = VEC_pop (basic_block, worklist);
+ basic_block bb = worklist.pop ();
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src != ENTRY_BLOCK_PTR
@@ -216,7 +214,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
{
gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
e->src->index));
- VEC_safe_push (basic_block, heap, worklist, e->src);
+ worklist.safe_push (e->src);
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
@@ -271,7 +269,7 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
}
done:
BITMAP_FREE (seen);
- VEC_free (basic_block, heap, worklist);
+ worklist.release ();
return ok;
}
@@ -868,8 +866,6 @@ typedef struct
/* When false we can not split on this BB. */
bool can_split;
} stack_entry;
-DEF_VEC_O(stack_entry);
-DEF_VEC_ALLOC_O(stack_entry,heap);
/* Find all articulations and call consider_split on them.
@@ -893,7 +889,7 @@ static void
find_split_points (int overall_time, int overall_size)
{
stack_entry first;
- VEC(stack_entry, heap) *stack = NULL;
+ vec<stack_entry> stack = vec<stack_entry>();
basic_block bb;
basic_block return_bb = find_return_bb ();
struct split_point current;
@@ -912,12 +908,12 @@ find_split_points (int overall_time, int overall_size)
first.set_ssa_names = 0;
first.used_ssa_names = 0;
first.bbs_visited = 0;
- VEC_safe_push (stack_entry, heap, stack, first);
+ stack.safe_push (first);
ENTRY_BLOCK_PTR->aux = (void *)(intptr_t)-1;
- while (!VEC_empty (stack_entry, stack))
+ while (!stack.is_empty ())
{
- stack_entry *entry = &VEC_last (stack_entry, stack);
+ stack_entry *entry = &stack.last ();
/* We are walking an acyclic graph, so edge_num counts
succ and pred edges together. However when considering
@@ -926,7 +922,7 @@ find_split_points (int overall_time, int overall_size)
if (entry->edge_num == EDGE_COUNT (entry->bb->succs)
&& entry->bb != ENTRY_BLOCK_PTR)
{
- int pos = VEC_length (stack_entry, stack);
+ int pos = stack.length ();
entry->can_split &= visit_bb (entry->bb, return_bb,
entry->set_ssa_names,
entry->used_ssa_names,
@@ -984,9 +980,9 @@ find_split_points (int overall_time, int overall_size)
new_entry.bb = dest;
new_entry.edge_num = 0;
new_entry.overall_time
- = VEC_index (bb_info, bb_info_vec, dest->index).time;
+ = bb_info_vec[dest->index].time;
new_entry.overall_size
- = VEC_index (bb_info, bb_info_vec, dest->index).size;
+ = bb_info_vec[dest->index].size;
new_entry.earliest = INT_MAX;
new_entry.set_ssa_names = BITMAP_ALLOC (NULL);
new_entry.used_ssa_names = BITMAP_ALLOC (NULL);
@@ -994,8 +990,8 @@ find_split_points (int overall_time, int overall_size)
new_entry.non_ssa_vars = BITMAP_ALLOC (NULL);
new_entry.can_split = true;
bitmap_set_bit (new_entry.bbs_visited, dest->index);
- VEC_safe_push (stack_entry, heap, stack, new_entry);
- dest->aux = (void *)(intptr_t)VEC_length (stack_entry, stack);
+ stack.safe_push (new_entry);
+ dest->aux = (void *)(intptr_t)stack.length ();
}
/* Back edge found, record the earliest point. */
else if ((intptr_t)dest->aux > 0
@@ -1006,8 +1002,7 @@ find_split_points (int overall_time, int overall_size)
and merge stuff we accumulate during the walk. */
else if (entry->bb != ENTRY_BLOCK_PTR)
{
- stack_entry *prev = &VEC_index (stack_entry, stack,
- VEC_length (stack_entry, stack) - 2);
+ stack_entry *prev = &stack[stack.length () - 2];
entry->bb->aux = (void *)(intptr_t)-1;
prev->can_split &= entry->can_split;
@@ -1026,15 +1021,15 @@ find_split_points (int overall_time, int overall_size)
BITMAP_FREE (entry->used_ssa_names);
BITMAP_FREE (entry->bbs_visited);
BITMAP_FREE (entry->non_ssa_vars);
- VEC_pop (stack_entry, stack);
+ stack.pop ();
}
else
- VEC_pop (stack_entry, stack);
+ stack.pop ();
}
ENTRY_BLOCK_PTR->aux = NULL;
FOR_EACH_BB (bb)
bb->aux = NULL;
- VEC_free (stack_entry, heap, stack);
+ stack.release ();
BITMAP_FREE (current.ssa_names_to_pass);
}
@@ -1043,7 +1038,7 @@ find_split_points (int overall_time, int overall_size)
static void
split_function (struct split_point *split_point)
{
- VEC (tree, heap) *args_to_pass = NULL;
+ vec<tree> args_to_pass = vec<tree>();
bitmap args_to_skip;
tree parm;
int num = 0;
@@ -1059,7 +1054,7 @@ split_function (struct split_point *split_point)
gimple last_stmt = NULL;
unsigned int i;
tree arg, ddef;
- VEC(tree, gc) **debug_args = NULL;
+ vec<tree, va_gc> **debug_args = NULL;
if (dump_file)
{
@@ -1092,7 +1087,7 @@ split_function (struct split_point *split_point)
if (!useless_type_conversion_p (DECL_ARG_TYPE (parm), TREE_TYPE (arg)))
arg = fold_convert (DECL_ARG_TYPE (parm), arg);
- VEC_safe_push (tree, heap, args_to_pass, arg);
+ args_to_pass.safe_push (arg);
}
/* See if the split function will return. */
@@ -1188,7 +1183,9 @@ split_function (struct split_point *split_point)
/* Now create the actual clone. */
rebuild_cgraph_edges ();
- node = cgraph_function_versioning (cur_node, NULL, NULL, args_to_skip,
+ node = cgraph_function_versioning (cur_node, vec<cgraph_edge_p>(),
+ NULL,
+ args_to_skip,
!split_part_return_p,
split_point->split_bbs,
split_point->entry_bb, "part");
@@ -1222,16 +1219,16 @@ split_function (struct split_point *split_point)
/* Produce the call statement. */
gsi = gsi_last_bb (call_bb);
- FOR_EACH_VEC_ELT (tree, args_to_pass, i, arg)
+ FOR_EACH_VEC_ELT (args_to_pass, i, arg)
if (!is_gimple_val (arg))
{
arg = force_gimple_operand_gsi (&gsi, arg, true, NULL_TREE,
false, GSI_CONTINUE_LINKING);
- VEC_replace (tree, args_to_pass, i, arg);
+ args_to_pass[i] = arg;
}
call = gimple_build_call_vec (node->symbol.decl, args_to_pass);
gimple_set_block (call, DECL_INITIAL (current_function_decl));
- VEC_free (tree, heap, args_to_pass);
+ args_to_pass.release ();
/* For optimized away parameters, add on the caller side
before the call
@@ -1261,8 +1258,8 @@ split_function (struct split_point *split_point)
DECL_ARTIFICIAL (ddecl) = 1;
TREE_TYPE (ddecl) = TREE_TYPE (parm);
DECL_MODE (ddecl) = DECL_MODE (parm);
- VEC_safe_push (tree, gc, *debug_args, DECL_ORIGIN (parm));
- VEC_safe_push (tree, gc, *debug_args, ddecl);
+ vec_safe_push (*debug_args, DECL_ORIGIN (parm));
+ vec_safe_push (*debug_args, ddecl);
def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg),
call);
gsi_insert_after (&gsi, def_temp, GSI_NEW_STMT);
@@ -1284,19 +1281,18 @@ split_function (struct split_point *split_point)
push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
var = BLOCK_VARS (DECL_INITIAL (node->symbol.decl));
- i = VEC_length (tree, *debug_args);
+ i = vec_safe_length (*debug_args);
cgsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
do
{
i -= 2;
while (var != NULL_TREE
- && DECL_ABSTRACT_ORIGIN (var)
- != VEC_index (tree, *debug_args, i))
+ && DECL_ABSTRACT_ORIGIN (var) != (**debug_args)[i])
var = TREE_CHAIN (var);
if (var == NULL_TREE)
break;
vexpr = make_node (DEBUG_EXPR_DECL);
- parm = VEC_index (tree, *debug_args, i);
+ parm = (**debug_args)[i];
DECL_ARTIFICIAL (vexpr) = 1;
TREE_TYPE (vexpr) = TREE_TYPE (parm);
DECL_MODE (vexpr) = DECL_MODE (parm);
@@ -1475,7 +1471,8 @@ execute_split_functions (void)
}
/* This can be relaxed; function might become inlinable after splitting
away the uninlinable part. */
- if (inline_edge_summary_vec && !inline_summary (node)->inlinable)
+ if (inline_edge_summary_vec.exists ()
+ && !inline_summary (node)->inlinable)
{
if (dump_file)
fprintf (dump_file, "Not splitting: not inlinable.\n");
@@ -1537,7 +1534,7 @@ execute_split_functions (void)
calculate_dominance_info (CDI_DOMINATORS);
/* Compute local info about basic blocks and determine function size/time. */
- VEC_safe_grow_cleared (bb_info, heap, bb_info_vec, last_basic_block + 1);
+ bb_info_vec.safe_grow_cleared (last_basic_block + 1);
memset (&best_split_point, 0, sizeof (best_split_point));
FOR_EACH_BB (bb)
{
@@ -1568,8 +1565,8 @@ execute_split_functions (void)
}
overall_time += time;
overall_size += size;
- VEC_index (bb_info, bb_info_vec, bb->index).time = time;
- VEC_index (bb_info, bb_info_vec, bb->index).size = size;
+ bb_info_vec[bb->index].time = time;
+ bb_info_vec[bb->index].size = size;
}
find_split_points (overall_time, overall_size);
if (best_split_point.split_bbs)
@@ -1580,8 +1577,7 @@ execute_split_functions (void)
todo = TODO_update_ssa | TODO_cleanup_cfg;
}
BITMAP_FREE (forbidden_dominators);
- VEC_free (bb_info, heap, bb_info_vec);
- bb_info_vec = NULL;
+ bb_info_vec.release ();
return todo;
}
diff --git a/gcc/ipa-utils.c b/gcc/ipa-utils.c
index 91fc3806a2e..d133f79c0a6 100644
--- a/gcc/ipa-utils.c
+++ b/gcc/ipa-utils.c
@@ -237,14 +237,14 @@ ipa_free_postorder_info (void)
/* Get the set of nodes for the cycle in the reduced call graph starting
from NODE. */
-VEC (cgraph_node_p, heap) *
+vec<cgraph_node_ptr>
ipa_get_nodes_in_cycle (struct cgraph_node *node)
{
- VEC (cgraph_node_p, heap) *v = NULL;
+ vec<cgraph_node_ptr> v = vec<cgraph_node_ptr>();
struct ipa_dfs_info *node_dfs_info;
while (node)
{
- VEC_safe_push (cgraph_node_p, heap, v, node);
+ v.safe_push (node);
node_dfs_info = (struct ipa_dfs_info *) node->symbol.aux;
node = node_dfs_info->next_cycle;
}
@@ -371,7 +371,7 @@ cgraph_node_set_new (void)
new_node_set = XCNEW (struct cgraph_node_set_def);
new_node_set->map = pointer_map_create ();
- new_node_set->nodes = NULL;
+ new_node_set->nodes.create (0);
return new_node_set;
}
@@ -388,15 +388,15 @@ cgraph_node_set_add (cgraph_node_set set, struct cgraph_node *node)
if (*slot)
{
int index = (size_t) *slot - 1;
- gcc_checking_assert ((VEC_index (cgraph_node_ptr, set->nodes, index)
+ gcc_checking_assert ((set->nodes[index]
== node));
return;
}
- *slot = (void *)(size_t) (VEC_length (cgraph_node_ptr, set->nodes) + 1);
+ *slot = (void *)(size_t) (set->nodes.length () + 1);
/* Insert into node vector. */
- VEC_safe_push (cgraph_node_ptr, heap, set->nodes, node);
+ set->nodes.safe_push (node);
}
@@ -414,12 +414,12 @@ cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
return;
index = (size_t) *slot - 1;
- gcc_checking_assert (VEC_index (cgraph_node_ptr, set->nodes, index)
+ gcc_checking_assert (set->nodes[index]
== node);
/* Remove from vector. We do this by swapping node with the last element
of the vector. */
- last_node = VEC_pop (cgraph_node_ptr, set->nodes);
+ last_node = set->nodes.pop ();
if (last_node != node)
{
last_slot = pointer_map_contains (set->map, last_node);
@@ -427,7 +427,7 @@ cgraph_node_set_remove (cgraph_node_set set, struct cgraph_node *node)
*last_slot = (void *)(size_t) (index + 1);
/* Move the last element to the original spot of NODE. */
- VEC_replace (cgraph_node_ptr, set->nodes, index, last_node);
+ set->nodes[index] = last_node;
}
/* Remove element from hash table. */
@@ -485,7 +485,7 @@ debug_cgraph_node_set (cgraph_node_set set)
void
free_cgraph_node_set (cgraph_node_set set)
{
- VEC_free (cgraph_node_ptr, heap, set->nodes);
+ set->nodes.release ();
pointer_map_destroy (set->map);
free (set);
}
@@ -500,7 +500,7 @@ varpool_node_set_new (void)
new_node_set = XCNEW (struct varpool_node_set_def);
new_node_set->map = pointer_map_create ();
- new_node_set->nodes = NULL;
+ new_node_set->nodes.create (0);
return new_node_set;
}
@@ -517,15 +517,15 @@ varpool_node_set_add (varpool_node_set set, struct varpool_node *node)
if (*slot)
{
int index = (size_t) *slot - 1;
- gcc_checking_assert ((VEC_index (varpool_node_ptr, set->nodes, index)
+ gcc_checking_assert ((set->nodes[index]
== node));
return;
}
- *slot = (void *)(size_t) (VEC_length (varpool_node_ptr, set->nodes) + 1);
+ *slot = (void *)(size_t) (set->nodes.length () + 1);
/* Insert into node vector. */
- VEC_safe_push (varpool_node_ptr, heap, set->nodes, node);
+ set->nodes.safe_push (node);
}
@@ -543,12 +543,12 @@ varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
return;
index = (size_t) *slot - 1;
- gcc_checking_assert (VEC_index (varpool_node_ptr, set->nodes, index)
+ gcc_checking_assert (set->nodes[index]
== node);
/* Remove from vector. We do this by swapping node with the last element
of the vector. */
- last_node = VEC_pop (varpool_node_ptr, set->nodes);
+ last_node = set->nodes.pop ();
if (last_node != node)
{
last_slot = pointer_map_contains (set->map, last_node);
@@ -556,7 +556,7 @@ varpool_node_set_remove (varpool_node_set set, struct varpool_node *node)
*last_slot = (void *)(size_t) (index + 1);
/* Move the last element to the original spot of NODE. */
- VEC_replace (varpool_node_ptr, set->nodes, index, last_node);
+ set->nodes[index] = last_node;
}
/* Remove element from hash table. */
@@ -605,7 +605,7 @@ dump_varpool_node_set (FILE *f, varpool_node_set set)
void
free_varpool_node_set (varpool_node_set set)
{
- VEC_free (varpool_node_ptr, heap, set->nodes);
+ set->nodes.release ();
pointer_map_destroy (set->map);
free (set);
}
diff --git a/gcc/ipa-utils.h b/gcc/ipa-utils.h
index f9562406059..7f045b245bd 100644
--- a/gcc/ipa-utils.h
+++ b/gcc/ipa-utils.h
@@ -42,7 +42,7 @@ void ipa_print_order (FILE*, const char *, struct cgraph_node**, int);
int ipa_reduced_postorder (struct cgraph_node **, bool, bool,
bool (*ignore_edge) (struct cgraph_edge *));
void ipa_free_postorder_info (void);
-VEC (cgraph_node_p, heap) *ipa_get_nodes_in_cycle (struct cgraph_node *);
+vec<cgraph_node_ptr> ipa_get_nodes_in_cycle (struct cgraph_node *);
int ipa_reverse_postorder (struct cgraph_node **);
tree get_base_var (tree);
diff --git a/gcc/ipa.c b/gcc/ipa.c
index 641c54d389b..0fb0eb5eb7f 100644
--- a/gcc/ipa.c
+++ b/gcc/ipa.c
@@ -447,7 +447,7 @@ symtab_remove_unreachable_nodes (bool before_inlining_p, FILE *file)
#endif
/* If we removed something, perhaps profile could be improved. */
- if (changed && optimize && inline_edge_summary_vec)
+ if (changed && optimize && inline_edge_summary_vec.exists ())
FOR_EACH_DEFINED_FUNCTION (node)
cgraph_propagate_frequency (node);
@@ -735,16 +735,16 @@ function_and_variable_visibility (bool whole_program)
alias_pair *p;
/* Discover aliased nodes. */
- FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
+ FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
{
if (dump_file)
- fprintf (dump_file, "Alias %s->%s",
- IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
- IDENTIFIER_POINTER (p->target));
+ fprintf (dump_file, "Alias %s->%s",
+ IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (p->decl)),
+ IDENTIFIER_POINTER (p->target));
if ((node = cgraph_node_for_asm (p->target)) != NULL
- && !DECL_EXTERNAL (node->symbol.decl))
- {
+ && !DECL_EXTERNAL (node->symbol.decl))
+ {
if (!node->analyzed)
continue;
cgraph_mark_force_output_node (node);
@@ -752,18 +752,18 @@ function_and_variable_visibility (bool whole_program)
if (dump_file)
fprintf (dump_file, " node %s/%i",
cgraph_node_name (node), node->uid);
- }
+ }
else if ((vnode = varpool_node_for_asm (p->target)) != NULL
&& !DECL_EXTERNAL (vnode->symbol.decl))
- {
+ {
vnode->symbol.force_output = 1;
pointer_set_insert (aliased_vnodes, vnode);
if (dump_file)
fprintf (dump_file, " varpool node %s",
varpool_node_name (vnode));
- }
+ }
if (dump_file)
- fprintf (dump_file, "\n");
+ fprintf (dump_file, "\n");
}
FOR_EACH_FUNCTION (node)
@@ -1215,9 +1215,9 @@ cgraph_build_static_cdtor (char which, tree body, int priority)
}
/* A vector of FUNCTION_DECLs declared as static constructors. */
-static VEC(tree, heap) *static_ctors;
+static vec<tree> static_ctors;
/* A vector of FUNCTION_DECLs declared as static destructors. */
-static VEC(tree, heap) *static_dtors;
+static vec<tree> static_dtors;
/* When target does not have ctors and dtors, we call all constructor
and destructor by special initialization/destruction function
@@ -1230,9 +1230,9 @@ static void
record_cdtor_fn (struct cgraph_node *node)
{
if (DECL_STATIC_CONSTRUCTOR (node->symbol.decl))
- VEC_safe_push (tree, heap, static_ctors, node->symbol.decl);
+ static_ctors.safe_push (node->symbol.decl);
if (DECL_STATIC_DESTRUCTOR (node->symbol.decl))
- VEC_safe_push (tree, heap, static_dtors, node->symbol.decl);
+ static_dtors.safe_push (node->symbol.decl);
node = cgraph_get_node (node->symbol.decl);
DECL_DISREGARD_INLINE_LIMITS (node->symbol.decl) = 1;
}
@@ -1243,10 +1243,10 @@ record_cdtor_fn (struct cgraph_node *node)
they are destructors. */
static void
-build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
+build_cdtor (bool ctor_p, vec<tree> cdtors)
{
size_t i,j;
- size_t len = VEC_length (tree, cdtors);
+ size_t len = cdtors.length ();
i = 0;
while (i < len)
@@ -1261,7 +1261,7 @@ build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
do
{
priority_type p;
- fn = VEC_index (tree, cdtors, j);
+ fn = cdtors[j];
p = ctor_p ? DECL_INIT_PRIORITY (fn) : DECL_FINI_PRIORITY (fn);
if (j == i)
priority = p;
@@ -1283,7 +1283,7 @@ build_cdtor (bool ctor_p, VEC (tree, heap) *cdtors)
for (;i < j; i++)
{
tree call;
- fn = VEC_index (tree, cdtors, i);
+ fn = cdtors[i];
call = build_call_expr (fn, 0);
if (ctor_p)
DECL_STATIC_CONSTRUCTOR (fn) = 0;
@@ -1362,17 +1362,17 @@ compare_dtor (const void *p1, const void *p2)
static void
build_cdtor_fns (void)
{
- if (!VEC_empty (tree, static_ctors))
+ if (!static_ctors.is_empty ())
{
gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
- VEC_qsort (tree, static_ctors, compare_ctor);
+ static_ctors.qsort (compare_ctor);
build_cdtor (/*ctor_p=*/true, static_ctors);
}
- if (!VEC_empty (tree, static_dtors))
+ if (!static_dtors.is_empty ())
{
gcc_assert (!targetm.have_ctors_dtors || in_lto_p);
- VEC_qsort (tree, static_dtors, compare_dtor);
+ static_dtors.qsort (compare_dtor);
build_cdtor (/*ctor_p=*/false, static_dtors);
}
}
@@ -1392,8 +1392,8 @@ ipa_cdtor_merge (void)
|| DECL_STATIC_DESTRUCTOR (node->symbol.decl))
record_cdtor_fn (node);
build_cdtor_fns ();
- VEC_free (tree, heap, static_ctors);
- VEC_free (tree, heap, static_dtors);
+ static_ctors.release ();
+ static_dtors.release ();
return 0;
}
diff --git a/gcc/ira-build.c b/gcc/ira-build.c
index 986bb5e5f62..e0b9c1b20cd 100644
--- a/gcc/ira-build.c
+++ b/gcc/ira-build.c
@@ -124,7 +124,7 @@ create_loop_tree_nodes (void)
bool skip_p;
edge_iterator ei;
edge e;
- VEC (edge, heap) *edges;
+ vec<edge> edges;
loop_p loop;
ira_bb_nodes
@@ -150,8 +150,8 @@ create_loop_tree_nodes (void)
}
ira_loop_nodes = ((struct ira_loop_tree_node *)
ira_allocate (sizeof (struct ira_loop_tree_node)
- * VEC_length (loop_p, ira_loops.larray)));
- FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
+ * vec_safe_length (ira_loops.larray)));
+ FOR_EACH_VEC_SAFE_ELT (ira_loops.larray, i, loop)
{
if (loop != ira_loops.tree_root)
{
@@ -167,13 +167,13 @@ create_loop_tree_nodes (void)
if (skip_p)
continue;
edges = get_loop_exit_edges (loop);
- FOR_EACH_VEC_ELT (edge, edges, j, e)
+ FOR_EACH_VEC_ELT (edges, j, e)
if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
{
skip_p = true;
break;
}
- VEC_free (edge, heap, edges);
+ edges.release ();
if (skip_p)
continue;
}
@@ -190,7 +190,7 @@ more_one_region_p (void)
loop_p loop;
if (current_loops != NULL)
- FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
+ FOR_EACH_VEC_SAFE_ELT (ira_loops.larray, i, loop)
if (ira_loop_nodes[i].regno_allocno_map != NULL
&& ira_loop_tree_root != &ira_loop_nodes[i])
return true;
@@ -223,7 +223,7 @@ finish_loop_tree_nodes (void)
if (current_loops == NULL)
finish_loop_tree_node (&ira_loop_nodes[0]);
else
- FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
+ FOR_EACH_VEC_SAFE_ELT (ira_loops.larray, i, loop)
finish_loop_tree_node (&ira_loop_nodes[i]);
ira_free (ira_loop_nodes);
for (i = 0; i < (unsigned int) last_basic_block_before_change; i++)
@@ -379,7 +379,7 @@ rebuild_regno_allocno_maps (void)
ira_assert (current_loops != NULL);
max_regno = max_reg_num ();
- FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, l, loop)
+ FOR_EACH_VEC_SAFE_ELT (ira_loops.larray, l, loop)
if (ira_loop_nodes[l].regno_allocno_map != NULL)
{
ira_free (ira_loop_nodes[l].regno_allocno_map);
@@ -415,11 +415,11 @@ static alloc_pool allocno_pool, live_range_pool, object_pool;
/* Vec containing references to all created allocnos. It is a
container of array allocnos. */
-static VEC(ira_allocno_t,heap) *allocno_vec;
+static vec<ira_allocno_t> allocno_vec;
/* Vec containing references to all created ira_objects. It is a
container of ira_object_id_map. */
-static VEC(ira_object_t,heap) *ira_object_id_map_vec;
+static vec<ira_object_t> ira_object_id_map_vec;
/* Initialize data concerning allocnos. */
static void
@@ -432,12 +432,11 @@ initiate_allocnos (void)
= create_alloc_pool ("allocnos", sizeof (struct ira_allocno), 100);
object_pool
= create_alloc_pool ("objects", sizeof (struct ira_object), 100);
- allocno_vec = VEC_alloc (ira_allocno_t, heap, max_reg_num () * 2);
+ allocno_vec.create (max_reg_num () * 2);
ira_allocnos = NULL;
ira_allocnos_num = 0;
ira_objects_num = 0;
- ira_object_id_map_vec
- = VEC_alloc (ira_object_t, heap, max_reg_num () * 2);
+ ira_object_id_map_vec.create (max_reg_num () * 2);
ira_object_id_map = NULL;
ira_regno_allocno_map
= (ira_allocno_t *) ira_allocate (max_reg_num ()
@@ -468,10 +467,10 @@ ira_create_object (ira_allocno_t a, int subword)
OBJECT_MAX (obj) = -1;
OBJECT_LIVE_RANGES (obj) = NULL;
- VEC_safe_push (ira_object_t, heap, ira_object_id_map_vec, obj);
+ ira_object_id_map_vec.safe_push (obj);
ira_object_id_map
- = VEC_address (ira_object_t, ira_object_id_map_vec);
- ira_objects_num = VEC_length (ira_object_t, ira_object_id_map_vec);
+ = ira_object_id_map_vec.address ();
+ ira_objects_num = ira_object_id_map_vec.length ();
return obj;
}
@@ -530,9 +529,9 @@ ira_create_allocno (int regno, bool cap_p,
ALLOCNO_NUM_OBJECTS (a) = 0;
ALLOCNO_ADD_DATA (a) = NULL;
- VEC_safe_push (ira_allocno_t, heap, allocno_vec, a);
- ira_allocnos = VEC_address (ira_allocno_t, allocno_vec);
- ira_allocnos_num = VEC_length (ira_allocno_t, allocno_vec);
+ allocno_vec.safe_push (a);
+ ira_allocnos = allocno_vec.address ();
+ ira_allocnos_num = allocno_vec.length ();
return a;
}
@@ -1155,8 +1154,8 @@ finish_allocnos (void)
FOR_EACH_ALLOCNO (a, ai)
finish_allocno (a);
ira_free (ira_regno_allocno_map);
- VEC_free (ira_object_t, heap, ira_object_id_map_vec);
- VEC_free (ira_allocno_t, heap, allocno_vec);
+ ira_object_id_map_vec.release ();
+ allocno_vec.release ();
free_alloc_pool (allocno_pool);
free_alloc_pool (object_pool);
free_alloc_pool (live_range_pool);
@@ -1169,7 +1168,7 @@ static alloc_pool copy_pool;
/* Vec containing references to all created copies. It is a
container of array ira_copies. */
-static VEC(ira_copy_t,heap) *copy_vec;
+static vec<ira_copy_t> copy_vec;
/* The function initializes data concerning allocno copies. */
static void
@@ -1177,7 +1176,7 @@ initiate_copies (void)
{
copy_pool
= create_alloc_pool ("copies", sizeof (struct ira_allocno_copy), 100);
- copy_vec = VEC_alloc (ira_copy_t, heap, get_max_uid ());
+ copy_vec.create (get_max_uid ());
ira_copies = NULL;
ira_copies_num = 0;
}
@@ -1229,9 +1228,9 @@ ira_create_copy (ira_allocno_t first, ira_allocno_t second, int freq,
cp->constraint_p = constraint_p;
cp->insn = insn;
cp->loop_tree_node = loop_tree_node;
- VEC_safe_push (ira_copy_t, heap, copy_vec, cp);
- ira_copies = VEC_address (ira_copy_t, copy_vec);
- ira_copies_num = VEC_length (ira_copy_t, copy_vec);
+ copy_vec.safe_push (cp);
+ ira_copies = copy_vec.address ();
+ ira_copies_num = copy_vec.length ();
return cp;
}
@@ -1399,7 +1398,7 @@ finish_copies (void)
FOR_EACH_COPY (cp, ci)
finish_copy (cp);
- VEC_free (ira_copy_t, heap, copy_vec);
+ copy_vec.release ();
free_alloc_pool (copy_pool);
}
@@ -1472,49 +1471,47 @@ finish_cost_vectors (void)
correct post-ordering but it would be less likely that two nodes
connected by an edge in the CFG are neighbours in the topsort. */
-static VEC (ira_loop_tree_node_t, heap) *
+static vec<ira_loop_tree_node_t>
ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSED,
- VEC (ira_loop_tree_node_t, heap) *loop_preorder)
+ vec<ira_loop_tree_node_t> loop_preorder)
{
- VEC (ira_loop_tree_node_t, heap) *topsort_nodes = NULL;
+ vec<ira_loop_tree_node_t> topsort_nodes = vec<ira_loop_tree_node_t>();
unsigned int n_loop_preorder;
- n_loop_preorder = VEC_length (ira_loop_tree_node_t, loop_preorder);
+ n_loop_preorder = loop_preorder.length ();
if (n_loop_preorder != 0)
{
ira_loop_tree_node_t subloop_node;
unsigned int i;
- VEC (ira_loop_tree_node_t, heap) *dfs_stack;
+ vec<ira_loop_tree_node_t> dfs_stack;
/* This is a bit of strange abuse of the BB_VISITED flag: We use
the flag to mark blocks we still have to visit to add them to
our post-order. Define an alias to avoid confusion. */
#define BB_TO_VISIT BB_VISITED
- FOR_EACH_VEC_ELT (ira_loop_tree_node_t, loop_preorder, i, subloop_node)
+ FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
{
gcc_checking_assert (! (subloop_node->bb->flags & BB_TO_VISIT));
subloop_node->bb->flags |= BB_TO_VISIT;
}
- topsort_nodes = VEC_alloc (ira_loop_tree_node_t, heap, n_loop_preorder);
- dfs_stack = VEC_alloc (ira_loop_tree_node_t, heap, n_loop_preorder);
+ topsort_nodes.create (n_loop_preorder);
+ dfs_stack.create (n_loop_preorder);
- FOR_EACH_VEC_ELT_REVERSE (ira_loop_tree_node_t, loop_preorder,
- i, subloop_node)
+ FOR_EACH_VEC_ELT_REVERSE (loop_preorder, i, subloop_node)
{
if (! (subloop_node->bb->flags & BB_TO_VISIT))
continue;
subloop_node->bb->flags &= ~BB_TO_VISIT;
- VEC_quick_push (ira_loop_tree_node_t, dfs_stack, subloop_node);
- while (! VEC_empty (ira_loop_tree_node_t, dfs_stack))
+ dfs_stack.quick_push (subloop_node);
+ while (! dfs_stack.is_empty ())
{
edge e;
edge_iterator ei;
- ira_loop_tree_node_t n = VEC_last (ira_loop_tree_node_t,
- dfs_stack);
+ ira_loop_tree_node_t n = dfs_stack.last ();
FOR_EACH_EDGE (e, ei, n->bb->preds)
{
ira_loop_tree_node_t pred_node;
@@ -1528,23 +1525,22 @@ ira_loop_tree_body_rev_postorder (ira_loop_tree_node_t loop_node ATTRIBUTE_UNUSE
&& (pred_node->bb->flags & BB_TO_VISIT))
{
pred_node->bb->flags &= ~BB_TO_VISIT;
- VEC_quick_push (ira_loop_tree_node_t, dfs_stack, pred_node);
+ dfs_stack.quick_push (pred_node);
}
}
- if (n == VEC_last (ira_loop_tree_node_t, dfs_stack))
+ if (n == dfs_stack.last ())
{
- VEC_pop (ira_loop_tree_node_t, dfs_stack);
- VEC_quick_push (ira_loop_tree_node_t, topsort_nodes, n);
+ dfs_stack.pop ();
+ topsort_nodes.quick_push (n);
}
}
}
#undef BB_TO_VISIT
- VEC_free (ira_loop_tree_node_t, heap, dfs_stack);
+ dfs_stack.release ();
}
- gcc_assert (VEC_length (ira_loop_tree_node_t, topsort_nodes)
- == n_loop_preorder);
+ gcc_assert (topsort_nodes.length () == n_loop_preorder);
return topsort_nodes;
}
@@ -1583,7 +1579,8 @@ ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
if (bb_p)
{
- VEC (ira_loop_tree_node_t, heap) *loop_preorder = NULL;
+ vec<ira_loop_tree_node_t>
+ loop_preorder = vec<ira_loop_tree_node_t>();
unsigned int i;
/* Add all nodes to the set of nodes to visit. The IRA loop tree
@@ -1593,24 +1590,22 @@ ira_traverse_loop_tree (bool bb_p, ira_loop_tree_node_t loop_node,
subloop_node != NULL;
subloop_node = subloop_node->next)
if (subloop_node->bb != NULL)
- VEC_safe_push (ira_loop_tree_node_t, heap,
- loop_preorder, subloop_node);
+ loop_preorder.safe_push (subloop_node);
if (preorder_func != NULL)
- FOR_EACH_VEC_ELT (ira_loop_tree_node_t, loop_preorder, i, subloop_node)
+ FOR_EACH_VEC_ELT (loop_preorder, i, subloop_node)
(*preorder_func) (subloop_node);
if (postorder_func != NULL)
{
- VEC (ira_loop_tree_node_t, heap) *loop_rev_postorder =
+ vec<ira_loop_tree_node_t> loop_rev_postorder =
ira_loop_tree_body_rev_postorder (loop_node, loop_preorder);
- FOR_EACH_VEC_ELT_REVERSE (ira_loop_tree_node_t, loop_rev_postorder,
- i, subloop_node)
+ FOR_EACH_VEC_ELT_REVERSE (loop_rev_postorder, i, subloop_node)
(*postorder_func) (subloop_node);
- VEC_free (ira_loop_tree_node_t, heap, loop_rev_postorder);
+ loop_rev_postorder.release ();
}
- VEC_free (ira_loop_tree_node_t, heap, loop_preorder);
+ loop_preorder.release ();
}
for (subloop_node = loop_node->subloops;
@@ -1764,7 +1759,7 @@ create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
int i;
edge_iterator ei;
edge e;
- VEC (edge, heap) *edges;
+ vec<edge> edges;
ira_assert (current_loops != NULL);
FOR_EACH_EDGE (e, ei, loop_node->loop->header->preds)
@@ -1772,9 +1767,9 @@ create_loop_tree_node_allocnos (ira_loop_tree_node_t loop_node)
create_loop_allocnos (e);
edges = get_loop_exit_edges (loop_node->loop);
- FOR_EACH_VEC_ELT (edge, edges, i, e)
+ FOR_EACH_VEC_ELT (edges, i, e)
create_loop_allocnos (e);
- VEC_free (edge, heap, edges);
+ edges.release ();
}
}
@@ -1963,7 +1958,7 @@ loop_with_complex_edge_p (struct loop *loop)
int i;
edge_iterator ei;
edge e;
- VEC (edge, heap) *edges;
+ vec<edge> edges;
bool res;
FOR_EACH_EDGE (e, ei, loop->header->preds)
@@ -1971,13 +1966,13 @@ loop_with_complex_edge_p (struct loop *loop)
return true;
edges = get_loop_exit_edges (loop);
res = false;
- FOR_EACH_VEC_ELT (edge, edges, i, e)
+ FOR_EACH_VEC_ELT (edges, i, e)
if (e->flags & EDGE_COMPLEX)
{
res = true;
break;
}
- VEC_free (edge, heap, edges);
+ edges.release ();
return res;
}
#endif
@@ -2027,9 +2022,8 @@ mark_loops_for_removal (void)
ira_assert (current_loops != NULL);
sorted_loops
= (ira_loop_tree_node_t *) ira_allocate (sizeof (ira_loop_tree_node_t)
- * VEC_length (loop_p,
- ira_loops.larray));
- for (n = i = 0; VEC_iterate (loop_p, ira_loops.larray, i, loop); i++)
+ * vec_safe_length (ira_loops.larray));
+ for (n = i = 0; vec_safe_iterate (ira_loops.larray, i, &loop); i++)
if (ira_loop_nodes[i].regno_allocno_map != NULL)
{
if (ira_loop_nodes[i].parent == NULL)
@@ -2073,7 +2067,7 @@ mark_all_loops_for_removal (void)
loop_p loop;
ira_assert (current_loops != NULL);
- FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
+ FOR_EACH_VEC_SAFE_ELT (ira_loops.larray, i, loop)
if (ira_loop_nodes[i].regno_allocno_map != NULL)
{
if (ira_loop_nodes[i].parent == NULL)
@@ -2095,14 +2089,12 @@ mark_all_loops_for_removal (void)
}
/* Definition of vector of loop tree nodes. */
-DEF_VEC_P(ira_loop_tree_node_t);
-DEF_VEC_ALLOC_P(ira_loop_tree_node_t, heap);
/* Vec containing references to all removed loop tree nodes. */
-static VEC(ira_loop_tree_node_t,heap) *removed_loop_vec;
+static vec<ira_loop_tree_node_t> removed_loop_vec;
/* Vec containing references to all children of loop tree nodes. */
-static VEC(ira_loop_tree_node_t,heap) *children_vec;
+static vec<ira_loop_tree_node_t> children_vec;
/* Remove subregions of NODE if their separate allocation will not
improve the result. */
@@ -2115,22 +2107,22 @@ remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_node_t node)
remove_p = node->to_remove_p;
if (! remove_p)
- VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, node);
- start = VEC_length (ira_loop_tree_node_t, children_vec);
+ children_vec.safe_push (node);
+ start = children_vec.length ();
for (subnode = node->children; subnode != NULL; subnode = subnode->next)
if (subnode->bb == NULL)
remove_uneccesary_loop_nodes_from_loop_tree (subnode);
else
- VEC_safe_push (ira_loop_tree_node_t, heap, children_vec, subnode);
+ children_vec.safe_push (subnode);
node->children = node->subloops = NULL;
if (remove_p)
{
- VEC_safe_push (ira_loop_tree_node_t, heap, removed_loop_vec, node);
+ removed_loop_vec.safe_push (node);
return;
}
- while (VEC_length (ira_loop_tree_node_t, children_vec) > start)
+ while (children_vec.length () > start)
{
- subnode = VEC_pop (ira_loop_tree_node_t, children_vec);
+ subnode = children_vec.pop ();
subnode->parent = node;
subnode->next = node->children;
node->children = subnode;
@@ -2385,21 +2377,17 @@ remove_unnecessary_regions (bool all_p)
mark_all_loops_for_removal ();
else
mark_loops_for_removal ();
- children_vec
- = VEC_alloc (ira_loop_tree_node_t, heap,
- last_basic_block + VEC_length (loop_p, ira_loops.larray));
- removed_loop_vec
- = VEC_alloc (ira_loop_tree_node_t, heap,
- last_basic_block + VEC_length (loop_p, ira_loops.larray));
- remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root) ;
- VEC_free (ira_loop_tree_node_t, heap, children_vec);
+ children_vec.create(last_basic_block + vec_safe_length(ira_loops.larray));
+ removed_loop_vec.create(last_basic_block + vec_safe_length(ira_loops.larray));
+ remove_uneccesary_loop_nodes_from_loop_tree (ira_loop_tree_root);
+ children_vec.release ();
if (all_p)
remove_low_level_allocnos ();
else
remove_unnecessary_allocnos ();
- while (VEC_length (ira_loop_tree_node_t, removed_loop_vec) > 0)
- finish_loop_tree_node (VEC_pop (ira_loop_tree_node_t, removed_loop_vec));
- VEC_free (ira_loop_tree_node_t, heap, removed_loop_vec);
+ while (removed_loop_vec.length () > 0)
+ finish_loop_tree_node (removed_loop_vec.pop ());
+ removed_loop_vec.release ();
}
@@ -3271,7 +3259,7 @@ ira_build (void)
}
}
fprintf (ira_dump_file, " regions=%d, blocks=%d, points=%d\n",
- current_loops == NULL ? 1 : VEC_length (loop_p, ira_loops.larray),
+ current_loops == NULL ? 1 : vec_safe_length (ira_loops.larray),
n_basic_blocks, ira_max_point);
fprintf (ira_dump_file,
" allocnos=%d (big %d), copies=%d, conflicts=%d, ranges=%d\n",
diff --git a/gcc/ira-color.c b/gcc/ira-color.c
index dd4c73b9482..33f9d24b00b 100644
--- a/gcc/ira-color.c
+++ b/gcc/ira-color.c
@@ -159,7 +159,7 @@ static bitmap consideration_allocno_bitmap;
static ira_allocno_t *sorted_allocnos;
/* Vec representing the stack of allocnos used during coloring. */
-static VEC(ira_allocno_t,heap) *allocno_stack_vec;
+static vec<ira_allocno_t> allocno_stack_vec;
/* Helper for qsort comparison callbacks - return a positive integer if
X > Y, or a negative value otherwise. Use a conditional expression
@@ -170,11 +170,9 @@ static VEC(ira_allocno_t,heap) *allocno_stack_vec;
/* Definition of vector of allocno hard registers. */
-DEF_VEC_P(allocno_hard_regs_t);
-DEF_VEC_ALLOC_P(allocno_hard_regs_t, heap);
/* Vector of unique allocno hard registers. */
-static VEC(allocno_hard_regs_t, heap) *allocno_hard_regs_vec;
+static vec<allocno_hard_regs_t> allocno_hard_regs_vec;
/* Returns hash value for allocno hard registers V. */
static hashval_t
@@ -221,7 +219,7 @@ insert_hard_regs (allocno_hard_regs_t hv)
static void
init_allocno_hard_regs (void)
{
- allocno_hard_regs_vec = VEC_alloc (allocno_hard_regs_t, heap, 200);
+ allocno_hard_regs_vec.create (200);
allocno_hard_regs_htab
= htab_create (200, allocno_hard_regs_hash, allocno_hard_regs_eq, NULL);
}
@@ -244,7 +242,7 @@ add_allocno_hard_regs (HARD_REG_SET set, HOST_WIDEST_INT cost)
ira_allocate (sizeof (struct allocno_hard_regs)));
COPY_HARD_REG_SET (hv->set, set);
hv->cost = cost;
- VEC_safe_push (allocno_hard_regs_t, heap, allocno_hard_regs_vec, hv);
+ allocno_hard_regs_vec.safe_push (hv);
insert_hard_regs (hv);
}
return hv;
@@ -258,11 +256,11 @@ finish_allocno_hard_regs (void)
allocno_hard_regs_t hv;
for (i = 0;
- VEC_iterate (allocno_hard_regs_t, allocno_hard_regs_vec, i, hv);
+ allocno_hard_regs_vec.iterate (i, &hv);
i++)
ira_free (hv);
htab_delete (allocno_hard_regs_htab);
- VEC_free (allocno_hard_regs_t, heap, allocno_hard_regs_vec);
+ allocno_hard_regs_vec.release ();
}
/* Sort hard regs according to their frequency of usage. */
@@ -297,11 +295,9 @@ static int node_check_tick;
static allocno_hard_regs_node_t hard_regs_roots;
/* Definition of vector of allocno hard register nodes. */
-DEF_VEC_P(allocno_hard_regs_node_t);
-DEF_VEC_ALLOC_P(allocno_hard_regs_node_t, heap);
/* Vector used to create the forest. */
-static VEC(allocno_hard_regs_node_t, heap) *hard_regs_node_vec;
+static vec<allocno_hard_regs_node_t> hard_regs_node_vec;
/* Create and return allocno hard registers node containing allocno
hard registers HV. */
@@ -344,7 +340,7 @@ add_allocno_hard_regs_to_forest (allocno_hard_regs_node_t *roots,
HARD_REG_SET temp_set;
allocno_hard_regs_t hv2;
- start = VEC_length (allocno_hard_regs_node_t, hard_regs_node_vec);
+ start = hard_regs_node_vec.length ();
for (node = *roots; node != NULL; node = node->next)
{
if (hard_reg_set_equal_p (hv->set, node->hard_regs->set))
@@ -355,8 +351,7 @@ add_allocno_hard_regs_to_forest (allocno_hard_regs_node_t *roots,
return;
}
if (hard_reg_set_subset_p (node->hard_regs->set, hv->set))
- VEC_safe_push (allocno_hard_regs_node_t, heap,
- hard_regs_node_vec, node);
+ hard_regs_node_vec.safe_push (node);
else if (hard_reg_set_intersect_p (hv->set, node->hard_regs->set))
{
COPY_HARD_REG_SET (temp_set, hv->set);
@@ -365,26 +360,26 @@ add_allocno_hard_regs_to_forest (allocno_hard_regs_node_t *roots,
add_allocno_hard_regs_to_forest (&node->first, hv2);
}
}
- if (VEC_length (allocno_hard_regs_node_t, hard_regs_node_vec)
+ if (hard_regs_node_vec.length ()
> start + 1)
{
/* Create a new node which contains nodes in hard_regs_node_vec. */
CLEAR_HARD_REG_SET (temp_set);
for (i = start;
- i < VEC_length (allocno_hard_regs_node_t, hard_regs_node_vec);
+ i < hard_regs_node_vec.length ();
i++)
{
- node = VEC_index (allocno_hard_regs_node_t, hard_regs_node_vec, i);
+ node = hard_regs_node_vec[i];
IOR_HARD_REG_SET (temp_set, node->hard_regs->set);
}
hv = add_allocno_hard_regs (temp_set, hv->cost);
new_node = create_new_allocno_hard_regs_node (hv);
prev = NULL;
for (i = start;
- i < VEC_length (allocno_hard_regs_node_t, hard_regs_node_vec);
+ i < hard_regs_node_vec.length ();
i++)
{
- node = VEC_index (allocno_hard_regs_node_t, hard_regs_node_vec, i);
+ node = hard_regs_node_vec[i];
if (node->prev == NULL)
*roots = node->next;
else
@@ -401,7 +396,7 @@ add_allocno_hard_regs_to_forest (allocno_hard_regs_node_t *roots,
}
add_new_allocno_hard_regs_node_to_forest (roots, new_node);
}
- VEC_truncate (allocno_hard_regs_node_t, hard_regs_node_vec, start);
+ hard_regs_node_vec.truncate (start);
}
/* Add allocno hard registers nodes starting with the forest level
@@ -415,8 +410,7 @@ collect_allocno_hard_regs_cover (allocno_hard_regs_node_t first,
ira_assert (first != NULL);
for (node = first; node != NULL; node = node->next)
if (hard_reg_set_subset_p (node->hard_regs->set, set))
- VEC_safe_push (allocno_hard_regs_node_t, heap, hard_regs_node_vec,
- node);
+ hard_regs_node_vec.safe_push (node);
else if (hard_reg_set_intersect_p (set, node->hard_regs->set))
collect_allocno_hard_regs_cover (node->first, set);
}
@@ -673,7 +667,7 @@ form_allocno_hard_regs_nodes_forest (void)
node_check_tick = 0;
init_allocno_hard_regs ();
hard_regs_roots = NULL;
- hard_regs_node_vec = VEC_alloc (allocno_hard_regs_node_t, heap, 100);
+ hard_regs_node_vec.create (100);
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (! TEST_HARD_REG_BIT (ira_no_alloc_regs, i))
{
@@ -683,7 +677,7 @@ form_allocno_hard_regs_nodes_forest (void)
node = create_new_allocno_hard_regs_node (hv);
add_new_allocno_hard_regs_node_to_forest (&hard_regs_roots, node);
}
- start = VEC_length (allocno_hard_regs_t, allocno_hard_regs_vec);
+ start = allocno_hard_regs_vec.length ();
EXECUTE_IF_SET_IN_BITMAP (coloring_allocno_bitmap, 0, i, bi)
{
a = ira_allocnos[i];
@@ -698,16 +692,15 @@ form_allocno_hard_regs_nodes_forest (void)
SET_HARD_REG_SET (temp);
AND_COMPL_HARD_REG_SET (temp, ira_no_alloc_regs);
add_allocno_hard_regs (temp, 0);
- qsort (VEC_address (allocno_hard_regs_t, allocno_hard_regs_vec) + start,
- VEC_length (allocno_hard_regs_t, allocno_hard_regs_vec) - start,
+ qsort (allocno_hard_regs_vec.address () + start,
+ allocno_hard_regs_vec.length () - start,
sizeof (allocno_hard_regs_t), allocno_hard_regs_compare);
for (i = start;
- VEC_iterate (allocno_hard_regs_t, allocno_hard_regs_vec, i, hv);
+ allocno_hard_regs_vec.iterate (i, &hv);
i++)
{
add_allocno_hard_regs_to_forest (&hard_regs_roots, hv);
- ira_assert (VEC_length (allocno_hard_regs_node_t,
- hard_regs_node_vec) == 0);
+ ira_assert (hard_regs_node_vec.length () == 0);
}
/* We need to set up parent fields for right work of
first_common_ancestor_node. */
@@ -718,14 +711,11 @@ form_allocno_hard_regs_nodes_forest (void)
allocno_data = ALLOCNO_COLOR_DATA (a);
if (hard_reg_set_empty_p (allocno_data->profitable_hard_regs))
continue;
- VEC_truncate (allocno_hard_regs_node_t, hard_regs_node_vec, 0);
+ hard_regs_node_vec.truncate (0);
collect_allocno_hard_regs_cover (hard_regs_roots,
allocno_data->profitable_hard_regs);
allocno_hard_regs_node = NULL;
- for (j = 0;
- VEC_iterate (allocno_hard_regs_node_t, hard_regs_node_vec,
- j, node);
- j++)
+ for (j = 0; hard_regs_node_vec.iterate (j, &node); j++)
allocno_hard_regs_node
= (j == 0
? node
@@ -764,7 +754,7 @@ form_allocno_hard_regs_nodes_forest (void)
allocno_hard_regs_subnodes
= ((allocno_hard_regs_subnode_t)
ira_allocate (sizeof (struct allocno_hard_regs_subnode) * start));
- VEC_free (allocno_hard_regs_node_t, heap, hard_regs_node_vec);
+ hard_regs_node_vec.release ();
}
/* Free tree of allocno hard registers nodes given by its ROOT. */
@@ -1911,7 +1901,7 @@ push_allocno_to_stack (ira_allocno_t a)
data = ALLOCNO_COLOR_DATA (a);
data->in_graph_p = false;
- VEC_safe_push (ira_allocno_t, heap, allocno_stack_vec, a);
+ allocno_stack_vec.safe_push (a);
aclass = ALLOCNO_CLASS (a);
if (aclass == NO_REGS)
return;
@@ -2004,7 +1994,7 @@ ira_loop_edge_freq (ira_loop_tree_node_t loop_node, int regno, bool exit_p)
int freq, i;
edge_iterator ei;
edge e;
- VEC (edge, heap) *edges;
+ vec<edge> edges;
ira_assert (current_loops != NULL && loop_node->loop != NULL
&& (regno < 0 || regno >= FIRST_PSEUDO_REGISTER));
@@ -2021,12 +2011,12 @@ ira_loop_edge_freq (ira_loop_tree_node_t loop_node, int regno, bool exit_p)
else
{
edges = get_loop_exit_edges (loop_node->loop);
- FOR_EACH_VEC_ELT (edge, edges, i, e)
+ FOR_EACH_VEC_ELT (edges, i, e)
if (regno < 0
|| (bitmap_bit_p (df_get_live_out (e->src), regno)
&& bitmap_bit_p (df_get_live_in (e->dest), regno)))
freq += EDGE_FREQUENCY (e);
- VEC_free (edge, heap, edges);
+ edges.release ();
}
return REG_FREQ_FROM_EDGE_FREQ (freq);
@@ -2143,9 +2133,9 @@ pop_allocnos_from_stack (void)
ira_allocno_t allocno;
enum reg_class aclass;
- for (;VEC_length (ira_allocno_t, allocno_stack_vec) != 0;)
+ for (;allocno_stack_vec.length () != 0;)
{
- allocno = VEC_pop (ira_allocno_t, allocno_stack_vec);
+ allocno = allocno_stack_vec.pop ();
aclass = ALLOCNO_CLASS (allocno);
if (internal_flag_ira_verbose > 3 && ira_dump_file != NULL)
{
@@ -4314,12 +4304,12 @@ ira_finish_assign (void)
static void
color (void)
{
- allocno_stack_vec = VEC_alloc (ira_allocno_t, heap, ira_allocnos_num);
+ allocno_stack_vec.create (ira_allocnos_num);
memset (allocated_hardreg_p, 0, sizeof (allocated_hardreg_p));
ira_initiate_assign ();
do_coloring ();
ira_finish_assign ();
- VEC_free (ira_allocno_t, heap, allocno_stack_vec);
+ allocno_stack_vec.release ();
move_spill_restore ();
}
diff --git a/gcc/ira-emit.c b/gcc/ira-emit.c
index 1e7f354102e..291c9a0e0b8 100644
--- a/gcc/ira-emit.c
+++ b/gcc/ira-emit.c
@@ -92,13 +92,11 @@ ira_emit_data_t ira_allocno_emit_data;
/* Definitions for vectors of pointers. */
typedef void *void_p;
-DEF_VEC_P (void_p);
-DEF_VEC_ALLOC_P (void_p,heap);
/* Pointers to data allocated for allocnos being created during
emitting. Usually there are quite few such allocnos because they
are created only for resolving loop in register shuffling. */
-static VEC(void_p, heap) *new_allocno_emit_data_vec;
+static vec<void_p> new_allocno_emit_data_vec;
/* Allocate and initiate the emit data. */
void
@@ -114,7 +112,7 @@ ira_initiate_emit_data (void)
ira_allocnos_num * sizeof (struct ira_emit_data));
FOR_EACH_ALLOCNO (a, ai)
ALLOCNO_ADD_DATA (a) = ira_allocno_emit_data + ALLOCNO_NUM (a);
- new_allocno_emit_data_vec = VEC_alloc (void_p, heap, 50);
+ new_allocno_emit_data_vec.create (50);
}
@@ -129,12 +127,12 @@ ira_finish_emit_data (void)
ira_free (ira_allocno_emit_data);
FOR_EACH_ALLOCNO (a, ai)
ALLOCNO_ADD_DATA (a) = NULL;
- for (;VEC_length (void_p, new_allocno_emit_data_vec) != 0;)
+ for (;new_allocno_emit_data_vec.length () != 0;)
{
- p = VEC_pop (void_p, new_allocno_emit_data_vec);
+ p = new_allocno_emit_data_vec.pop ();
ira_free (p);
}
- VEC_free (void_p, heap, new_allocno_emit_data_vec);
+ new_allocno_emit_data_vec.release ();
}
/* Create and return a new allocno with given REGNO and
@@ -147,7 +145,7 @@ create_new_allocno (int regno, ira_loop_tree_node_t loop_tree_node)
a = ira_create_allocno (regno, false, loop_tree_node);
ALLOCNO_ADD_DATA (a) = ira_allocate (sizeof (struct ira_emit_data));
memset (ALLOCNO_ADD_DATA (a), 0, sizeof (struct ira_emit_data));
- VEC_safe_push (void_p, heap, new_allocno_emit_data_vec, ALLOCNO_ADD_DATA (a));
+ new_allocno_emit_data_vec.safe_push (ALLOCNO_ADD_DATA (a));
return a;
}
@@ -437,7 +435,7 @@ setup_entered_from_non_parent_p (void)
loop_p loop;
ira_assert (current_loops != NULL);
- FOR_EACH_VEC_ELT (loop_p, ira_loops.larray, i, loop)
+ FOR_EACH_VEC_SAFE_ELT (ira_loops.larray, i, loop)
if (ira_loop_nodes[i].regno_allocno_map != NULL)
ira_loop_nodes[i].entered_from_non_parent_p
= entered_from_non_parent_p (&ira_loop_nodes[i]);
@@ -669,7 +667,7 @@ set_allocno_somewhere_renamed_p (void)
/* Return TRUE if move lists on all edges given in vector VEC are
equal. */
static bool
-eq_edge_move_lists_p (VEC(edge,gc) *vec)
+eq_edge_move_lists_p (vec<edge, va_gc> *vec)
{
move_t list;
int i;
@@ -690,7 +688,7 @@ unify_moves (basic_block bb, bool start_p)
int i;
edge e;
move_t list;
- VEC(edge,gc) *vec;
+ vec<edge, va_gc> *vec;
vec = (start_p ? bb->preds : bb->succs);
if (EDGE_COUNT (vec) == 0 || ! eq_edge_move_lists_p (vec))
@@ -729,12 +727,10 @@ static move_t *allocno_last_set;
static int *allocno_last_set_check;
/* Definition of vector of moves. */
-DEF_VEC_P(move_t);
-DEF_VEC_ALLOC_P(move_t, heap);
/* This vec contains moves sorted topologically (depth-first) on their
dependency graph. */
-static VEC(move_t,heap) *move_vec;
+static vec<move_t> move_vec;
/* The variable value is used to check correctness of values of
elements of arrays `hard_regno_last_set' and
@@ -753,7 +749,7 @@ traverse_moves (move_t move)
move->visited_p = true;
for (i = move->deps_num - 1; i >= 0; i--)
traverse_moves (move->deps[i]);
- VEC_safe_push (move_t, heap, move_vec, move);
+ move_vec.safe_push (move);
}
/* Remove unnecessary moves in the LIST, makes topological sorting,
@@ -805,22 +801,22 @@ modify_move_list (move_t list)
}
}
/* Toplogical sorting: */
- VEC_truncate (move_t, move_vec, 0);
+ move_vec.truncate (0);
for (move = list; move != NULL; move = move->next)
traverse_moves (move);
last = NULL;
- for (i = (int) VEC_length (move_t, move_vec) - 1; i >= 0; i--)
+ for (i = (int) move_vec.length () - 1; i >= 0; i--)
{
- move = VEC_index (move_t, move_vec, i);
+ move = move_vec[i];
move->next = NULL;
if (last != NULL)
last->next = move;
last = move;
}
- first = VEC_last (move_t, move_vec);
+ first = move_vec.last ();
/* Removing cycles: */
curr_tick++;
- VEC_truncate (move_t, move_vec, 0);
+ move_vec.truncate (0);
for (move = first; move != NULL; move = move->next)
{
from = move->from;
@@ -868,7 +864,7 @@ modify_move_list (move_t list)
new_move = create_move (set_move->to, new_allocno);
set_move->to = new_allocno;
- VEC_safe_push (move_t, heap, move_vec, new_move);
+ move_vec.safe_push (new_move);
ira_move_loops_num++;
if (internal_flag_ira_verbose > 2 && ira_dump_file != NULL)
fprintf (ira_dump_file,
@@ -886,9 +882,9 @@ modify_move_list (move_t list)
hard_regno_last_set_check[hard_regno + i] = curr_tick;
}
}
- for (i = (int) VEC_length (move_t, move_vec) - 1; i >= 0; i--)
+ for (i = (int) move_vec.length () - 1; i >= 0; i--)
{
- move = VEC_index (move_t, move_vec, i);
+ move = move_vec[i];
move->next = NULL;
last->next = move;
last = move;
@@ -941,8 +937,7 @@ emit_move_list (move_t list, int freq)
|| (ira_reg_equiv[regno].invariant == NULL_RTX
&& ira_reg_equiv[regno].constant == NULL_RTX))
continue; /* regno has no equivalence. */
- ira_assert ((int) VEC_length (reg_equivs_t, reg_equivs)
- > regno);
+ ira_assert ((int) reg_equivs->length () > regno);
reg_equiv_init (regno)
= gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init (regno));
}
@@ -1283,7 +1278,7 @@ ira_emit (bool loops_p)
unify_moves (bb, true);
FOR_EACH_BB (bb)
unify_moves (bb, false);
- move_vec = VEC_alloc (move_t, heap, ira_allocnos_num);
+ move_vec.create (ira_allocnos_num);
emit_moves ();
add_ranges_and_copies ();
/* Clean up: */
@@ -1297,7 +1292,7 @@ ira_emit (bool loops_p)
e->aux = NULL;
}
}
- VEC_free (move_t, heap, move_vec);
+ move_vec.release ();
ira_free (allocno_last_set_check);
ira_free (allocno_last_set);
commit_edge_insertions ();
diff --git a/gcc/ira-int.h b/gcc/ira-int.h
index a64e3a14afe..69db4411b47 100644
--- a/gcc/ira-int.h
+++ b/gcc/ira-int.h
@@ -65,12 +65,6 @@ typedef struct ira_allocno_copy *ira_copy_t;
typedef struct ira_object *ira_object_t;
/* Definition of vector of allocnos and copies. */
-DEF_VEC_P(ira_allocno_t);
-DEF_VEC_ALLOC_P(ira_allocno_t, heap);
-DEF_VEC_P(ira_object_t);
-DEF_VEC_ALLOC_P(ira_object_t, heap);
-DEF_VEC_P(ira_copy_t);
-DEF_VEC_ALLOC_P(ira_copy_t, heap);
/* Typedef for pointer to the subsequent structure. */
typedef struct ira_loop_tree_node *ira_loop_tree_node_t;
diff --git a/gcc/ira.c b/gcc/ira.c
index 9caa4cf843c..2f629338c09 100644
--- a/gcc/ira.c
+++ b/gcc/ira.c
@@ -2269,9 +2269,9 @@ fix_reg_equiv_init (void)
int i, new_regno, max;
rtx x, prev, next, insn, set;
- if (VEC_length (reg_equivs_t, reg_equivs) < max_regno)
+ if (vec_safe_length (reg_equivs) < max_regno)
{
- max = VEC_length (reg_equivs_t, reg_equivs);
+ max = vec_safe_length (reg_equivs);
grow_reg_equivs ();
for (i = FIRST_PSEUDO_REGISTER; i < max; i++)
for (prev = NULL_RTX, x = reg_equiv_init (i);
@@ -3871,7 +3871,7 @@ int first_moveable_pseudo, last_moveable_pseudo;
find_movable_pseudos, with index 0 holding data for the
first_moveable_pseudo. */
/* The original home register. */
-static VEC (rtx, heap) *pseudo_replaced_reg;
+static vec<rtx> pseudo_replaced_reg;
/* Look for instances where we have an instruction that is known to increase
register pressure, and whose result is not used immediately. If it is
@@ -3915,8 +3915,8 @@ find_moveable_pseudos (void)
bitmap_initialize (&interesting, 0);
first_moveable_pseudo = max_regs;
- VEC_free (rtx, heap, pseudo_replaced_reg);
- VEC_safe_grow (rtx, heap, pseudo_replaced_reg, max_regs);
+ pseudo_replaced_reg.release ();
+ pseudo_replaced_reg.safe_grow_cleared (max_regs);
df_analyze ();
calculate_dominance_info (CDI_DOMINATORS);
@@ -4213,7 +4213,7 @@ find_moveable_pseudos (void)
unsigned nregno = REGNO (newreg);
emit_insn_before (gen_move_insn (def_reg, newreg), use_insn);
nregno -= max_regs;
- VEC_replace (rtx, pseudo_replaced_reg, nregno, def_reg);
+ pseudo_replaced_reg[nregno] = def_reg;
}
}
}
@@ -4256,7 +4256,7 @@ move_unallocated_pseudos (void)
if (reg_renumber[i] < 0)
{
int idx = i - first_moveable_pseudo;
- rtx other_reg = VEC_index (rtx, pseudo_replaced_reg, idx);
+ rtx other_reg = pseudo_replaced_reg[idx];
rtx def_insn = DF_REF_INSN (DF_REG_DEF_CHAIN (i));
/* The use must follow all definitions of OTHER_REG, so we can
insert the new definition immediately after any of them. */
@@ -4624,7 +4624,7 @@ do_reload (void)
lra (ira_dump_file);
/* ???!!! Move it before lra () when we use ira_reg_equiv in
LRA. */
- VEC_free (reg_equivs_t, gc, reg_equivs);
+ vec_free (reg_equivs);
reg_equivs = NULL;
need_dce = false;
}
diff --git a/gcc/java/ChangeLog b/gcc/java/ChangeLog
index 0f2722413bf..97aef463673 100644
--- a/gcc/java/ChangeLog
+++ b/gcc/java/ChangeLog
@@ -1,3 +1,17 @@
+2012-11-16 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * boehm.c: Use new vec API in vec.h.
+ * class.c: Likewise.
+ * constants.c: Likewise.
+ * decl.c: Likewise.
+ * expr.c: Likewise.
+ * java-tree.h: Likewise.
+ * jcf-parse.c: Likewise.
+ * resource.c: Likewise.
+ * verify-glue.c: Likewise.
+
2012-11-15 Jan Hubicka <jh@suse.cz>
* builtins.c (define_builtin): Accept ECF flags and
diff --git a/gcc/java/boehm.c b/gcc/java/boehm.c
index 0fa8964df45..e125883d443 100644
--- a/gcc/java/boehm.c
+++ b/gcc/java/boehm.c
@@ -233,6 +233,6 @@ uses_jv_markobj_p (tree dtable)
this function is only used with flag_reduced_reflection. No
point in asserting unless we hit the bad case. */
gcc_assert (!flag_reduced_reflection || TARGET_VTABLE_USES_DESCRIPTORS == 0);
- v = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (dtable), 3).value;
+ v = (*CONSTRUCTOR_ELTS (dtable))[3].value;
return (PROCEDURE_OBJECT_DESCRIPTOR == TREE_INT_CST_LOW (v));
}
diff --git a/gcc/java/class.c b/gcc/java/class.c
index a89b83183b6..fb8cd8b444b 100644
--- a/gcc/java/class.c
+++ b/gcc/java/class.c
@@ -40,7 +40,6 @@ The Free Software Foundation is independent of Sun Microsystems, Inc. */
#include "ggc.h"
#include "cgraph.h"
#include "tree-iterator.h"
-#include "vecprim.h"
#include "target.h"
static tree make_method_value (tree);
@@ -98,7 +97,7 @@ static GTY(()) tree class_roots[4];
#define class_list class_roots[2]
#define class_dtable_decl class_roots[3]
-static GTY(()) VEC(tree,gc) *registered_class;
+static GTY(()) vec<tree, va_gc> *registered_class;
/* A tree that returns the address of the class$ of the class
currently being compiled. */
@@ -106,7 +105,7 @@ static GTY(()) tree this_classdollar;
/* A list of static class fields. This is to emit proper debug
info for them. */
-VEC(tree,gc) *pending_static_fields;
+vec<tree, va_gc> *pending_static_fields;
/* Return the node that most closely represents the class whose name
is IDENT. Start the search from NODE (followed by its siblings).
@@ -878,7 +877,7 @@ add_field (tree klass, tree name, tree field_type, int flags)
object file. */
DECL_EXTERNAL (field) = (is_compiled_class (klass) != 2);
if (!DECL_EXTERNAL (field))
- VEC_safe_push (tree, gc, pending_static_fields, field);
+ vec_safe_push (pending_static_fields, field);
}
return field;
@@ -941,7 +940,7 @@ build_utf8_ref (tree name)
int name_hash;
tree ref = IDENTIFIER_UTF8_REF (name);
tree decl;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
if (ref != NULL_TREE)
return ref;
@@ -1422,7 +1421,7 @@ make_field_value (tree fdecl)
int flags;
tree type = TREE_TYPE (fdecl);
int resolved = is_compiled_class (type) && ! flag_indirect_dispatch;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
START_RECORD_CONSTRUCTOR (v, field_type_node);
PUSH_FIELD_VALUE (v, "name", build_utf8_ref (DECL_NAME (fdecl)));
@@ -1480,7 +1479,7 @@ make_method_value (tree mdecl)
tree class_decl;
#define ACC_TRANSLATED 0x4000
int accflags = get_access_flags_from_decl (mdecl) | ACC_TRANSLATED;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
class_decl = DECL_CONTEXT (mdecl);
/* For interfaces, the index field contains the dispatch index. */
@@ -1520,29 +1519,29 @@ make_method_value (tree mdecl)
/* Compute the `throws' information for the method. */
tree table = null_pointer_node;
- if (!VEC_empty (tree, DECL_FUNCTION_THROWS (mdecl)))
+ if (!vec_safe_is_empty (DECL_FUNCTION_THROWS (mdecl)))
{
- int length = 1 + VEC_length (tree, DECL_FUNCTION_THROWS (mdecl));
+ int length = 1 + DECL_FUNCTION_THROWS (mdecl)->length ();
tree t, type, array;
char buf[60];
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int idx = length - 1;
unsigned ix;
constructor_elt *e;
- v = VEC_alloc (constructor_elt, gc, length);
- VEC_safe_grow_cleared (constructor_elt, gc, v, length);
+ vec_alloc (v, length);
+ v->quick_grow_cleared (length);
- e = &VEC_index (constructor_elt, v, idx--);
+ e = &(*v)[idx--];
e->value = null_pointer_node;
- FOR_EACH_VEC_ELT (tree, DECL_FUNCTION_THROWS (mdecl), ix, t)
+ FOR_EACH_VEC_SAFE_ELT (DECL_FUNCTION_THROWS (mdecl), ix, t)
{
tree sig = DECL_NAME (TYPE_NAME (t));
tree utf8
= build_utf8_ref (unmangle_classname (IDENTIFIER_POINTER (sig),
IDENTIFIER_LENGTH (sig)));
- e = &VEC_index (constructor_elt, v, idx--);
+ e = &(*v)[idx--];
e->value = utf8;
}
gcc_assert (idx == -1);
@@ -1611,7 +1610,7 @@ get_dispatch_table (tree type, tree this_class_addr)
int nvirtuals = TREE_VEC_LENGTH (vtable);
int arraysize;
tree gc_descr;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
constructor_elt *e;
tree arraytype;
@@ -1620,8 +1619,8 @@ get_dispatch_table (tree type, tree this_class_addr)
arraysize *= TARGET_VTABLE_USES_DESCRIPTORS;
arraysize += 2;
- VEC_safe_grow_cleared (constructor_elt, gc, v, arraysize);
- e = &VEC_index (constructor_elt, v, arraysize - 1);
+ vec_safe_grow_cleared (v, arraysize);
+ e = &(*v)[arraysize - 1];
#define CONSTRUCTOR_PREPEND_VALUE(E, V) E->value = V, E--
for (i = nvirtuals; --i >= 0; )
@@ -1674,7 +1673,7 @@ get_dispatch_table (tree type, tree this_class_addr)
/** Pointer to type_info object (to be implemented), according to g++ ABI. */
CONSTRUCTOR_PREPEND_VALUE (e, null_pointer_node);
/** Offset to start of whole object. Always (ptrdiff_t)0 for Java. */
- gcc_assert (e == VEC_address (constructor_elt, v));
+ gcc_assert (e == v->address ());
e->index = integer_zero_node;
e->value = null_pointer_node;
#undef CONSTRUCTOR_PREPEND_VALUE
@@ -1737,8 +1736,8 @@ supers_all_compiled (tree type)
}
static void
-add_table_and_syms (VEC(constructor_elt,gc) **v,
- VEC(method_entry,gc) *methods,
+add_table_and_syms (vec<constructor_elt, va_gc> **v,
+ vec<method_entry, va_gc> *methods,
const char *table_name, tree table_slot, tree table_type,
const char *syms_name, tree syms_slot)
{
@@ -1785,13 +1784,13 @@ make_class_data (tree type)
/** Offset from start of virtual function table declaration
to where objects actually point at, following new g++ ABI. */
tree dtable_start_offset = size_int (2 * POINTER_SIZE / BITS_PER_UNIT);
- VEC(int, heap) *field_indexes;
+ vec<int> field_indexes;
tree first_real_field;
- VEC(constructor_elt,gc) *v1 = NULL, *v2 = NULL;
+ vec<constructor_elt, va_gc> *v1 = NULL, *v2 = NULL;
tree reflection_data;
- VEC(constructor_elt,gc) *static_fields = NULL;
- VEC(constructor_elt,gc) *instance_fields = NULL;
- VEC(constructor_elt,gc) *methods = NULL;
+ vec<constructor_elt, va_gc> *static_fields = NULL;
+ vec<constructor_elt, va_gc> *instance_fields = NULL;
+ vec<constructor_elt, va_gc> *methods = NULL;
this_class_addr = build_static_class_ref (type);
decl = TREE_OPERAND (this_class_addr, 0);
@@ -1850,7 +1849,7 @@ make_class_data (tree type)
}
}
field_count = static_field_count + instance_field_count;
- field_indexes = VEC_alloc (int, heap, field_count);
+ field_indexes.create (field_count);
/* gcj sorts fields so that static fields come first, followed by
instance fields. Unfortunately, by the time this takes place we
@@ -1879,7 +1878,7 @@ make_class_data (tree type)
field_index = instance_count++;
else
continue;
- VEC_quick_push (int, field_indexes, field_index);
+ field_indexes.quick_push (field_index);
}
}
}
@@ -1912,14 +1911,12 @@ make_class_data (tree type)
}
}
- gcc_assert (static_field_count
- == (int) VEC_length (constructor_elt, static_fields));
- gcc_assert (instance_field_count
- == (int) VEC_length (constructor_elt, instance_fields));
+ gcc_assert (static_field_count == (int) vec_safe_length (static_fields));
+ gcc_assert (instance_field_count == (int) vec_safe_length (instance_fields));
if (field_count > 0)
{
- VEC_safe_splice (constructor_elt, gc, static_fields, instance_fields);
+ vec_safe_splice (static_fields, instance_fields);
field_array_type = build_prim_array_type (field_type_node, field_count);
fields_decl = build_decl (input_location,
VAR_DECL, mangled_classname ("_FL_", type),
@@ -2021,8 +2018,8 @@ make_class_data (tree type)
{
int i;
tree interface_array_type, idecl;
- VEC(constructor_elt,gc) *init = VEC_alloc (constructor_elt, gc,
- interface_len);
+ vec<constructor_elt, va_gc> *init;
+ vec_alloc (init, interface_len);
interface_array_type
= build_prim_array_type (class_ptr_type, interface_len);
idecl = build_decl (input_location,
@@ -2143,7 +2140,7 @@ make_class_data (tree type)
"itable_syms", TYPE_ITABLE_SYMS_DECL (type));
PUSH_FIELD_VALUE (v2, "catch_classes",
- build1 (ADDR_EXPR, ptr_type_node, TYPE_CTABLE_DECL (type)));
+ build1 (ADDR_EXPR, ptr_type_node, TYPE_CTABLE_DECL (type)));
PUSH_FIELD_VALUE (v2, "interfaces", interfaces);
PUSH_FIELD_VALUE (v2, "loader", null_pointer_node);
PUSH_FIELD_VALUE (v2, "interface_count",
@@ -2180,8 +2177,8 @@ make_class_data (tree type)
{
int i;
int count = TYPE_REFLECTION_DATASIZE (current_class);
- VEC (constructor_elt, gc) *v
- = VEC_alloc (constructor_elt, gc, count);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, count);
unsigned char *data = TYPE_REFLECTION_DATA (current_class);
tree max_index = build_int_cst (sizetype, count);
tree index = build_index_type (max_index);
@@ -2194,14 +2191,14 @@ make_class_data (tree type)
array = build_decl (input_location,
VAR_DECL, get_identifier (buf), type);
- rewrite_reflection_indexes (field_indexes);
+ rewrite_reflection_indexes (&field_indexes);
for (i = 0; i < count; i++)
{
constructor_elt elt;
elt.index = build_int_cst (sizetype, i);
elt.value = build_int_cstu (byte_type_node, data[i]);
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
}
DECL_INITIAL (array) = build_constructor (type, v);
@@ -2727,13 +2724,13 @@ register_class (void)
tree node;
if (!registered_class)
- registered_class = VEC_alloc (tree, gc, 8);
+ vec_alloc (registered_class, 8);
if (flag_indirect_classes)
node = current_class;
else
node = TREE_OPERAND (build_class_ref (current_class), 0);
- VEC_safe_push (tree, gc, registered_class, node);
+ vec_safe_push (registered_class, node);
}
/* Emit a function that calls _Jv_RegisterNewClasses with a list of
@@ -2745,15 +2742,16 @@ emit_indirect_register_classes (tree *list_p)
tree klass, t, register_class_fn;
int i;
- int size = VEC_length (tree, registered_class) * 2 + 1;
- VEC(constructor_elt,gc) *init = VEC_alloc (constructor_elt, gc, size);
+ int size = vec_safe_length (registered_class) * 2 + 1;
+ vec<constructor_elt, va_gc> *init;
+ vec_alloc (init, size);
tree class_array_type
= build_prim_array_type (ptr_type_node, size);
tree cdecl = build_decl (input_location,
VAR_DECL, get_identifier ("_Jv_CLS"),
class_array_type);
tree reg_class_list;
- FOR_EACH_VEC_ELT (tree, registered_class, i, klass)
+ FOR_EACH_VEC_SAFE_ELT (registered_class, i, klass)
{
t = fold_convert (ptr_type_node, build_static_class_ref (klass));
CONSTRUCTOR_APPEND_ELT (init, NULL_TREE, t);
@@ -2792,10 +2790,11 @@ emit_register_classes_in_jcr_section (void)
#ifdef JCR_SECTION_NAME
tree klass, cdecl, class_array_type;
int i;
- int size = VEC_length (tree, registered_class);
- VEC(constructor_elt,gc) *init = VEC_alloc (constructor_elt, gc, size);
+ int size = vec_safe_length (registered_class);
+ vec<constructor_elt, va_gc> *init;
+ vec_alloc (init, size);
- FOR_EACH_VEC_ELT (tree, registered_class, i, klass)
+ FOR_EACH_VEC_SAFE_ELT (registered_class, i, klass)
CONSTRUCTOR_APPEND_ELT (init, NULL_TREE, build_fold_addr_expr (klass));
/* ??? I would like to use tree_output_constant_def() but there is no way
@@ -2844,7 +2843,7 @@ emit_Jv_RegisterClass_calls (tree *list_p)
DECL_EXTERNAL (t) = 1;
register_class_fn = t;
- FOR_EACH_VEC_ELT (tree, registered_class, i, klass)
+ FOR_EACH_VEC_SAFE_ELT (registered_class, i, klass)
{
t = build_fold_addr_expr (klass);
t = build_call_expr (register_class_fn, 1, t);
@@ -2890,7 +2889,7 @@ static tree
build_symbol_table_entry (tree clname, tree name, tree signature)
{
tree symbol;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
START_RECORD_CONSTRUCTOR (v, symbol_type);
PUSH_FIELD_VALUE (v, "clname", clname);
@@ -2935,22 +2934,22 @@ build_symbol_entry (tree decl, tree special)
tree
emit_symbol_table (tree name, tree the_table,
- VEC(method_entry,gc) *decl_table,
+ vec<method_entry, va_gc> *decl_table,
tree the_syms_decl, tree the_array_element_type,
int element_size)
{
tree table, null_symbol, table_size, the_array_type;
unsigned index;
method_entry *e;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Only emit a table if this translation unit actually made any
references via it. */
- if (decl_table == NULL)
+ if (!decl_table)
return the_table;
/* Build a list of _Jv_MethodSymbols for each entry in otable_methods. */
- FOR_EACH_VEC_ELT (method_entry, decl_table, index, e)
+ FOR_EACH_VEC_ELT (*decl_table, index, e)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE,
build_symbol_entry (e->method, e->special));
@@ -2990,7 +2989,7 @@ make_catch_class_record (tree catch_class, tree classname)
{
tree entry;
tree type = TREE_TYPE (TREE_TYPE (TYPE_CTABLE_DECL (output_class)));
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
START_RECORD_CONSTRUCTOR (v, type);
PUSH_FIELD_VALUE (v, "address", catch_class);
PUSH_FIELD_VALUE (v, "classname", classname);
@@ -3008,13 +3007,12 @@ emit_catch_table (tree this_class)
int n_catch_classes;
constructor_elt *e;
/* Fill in the dummy entry that make_class created. */
- e = &VEC_index (constructor_elt, TYPE_CATCH_CLASSES (this_class), 0);
+ e = &(*TYPE_CATCH_CLASSES (this_class))[0];
e->value = make_catch_class_record (null_pointer_node, null_pointer_node);
CONSTRUCTOR_APPEND_ELT (TYPE_CATCH_CLASSES (this_class), NULL_TREE,
make_catch_class_record (null_pointer_node,
null_pointer_node));
- n_catch_classes = VEC_length (constructor_elt,
- TYPE_CATCH_CLASSES (this_class));
+ n_catch_classes = TYPE_CATCH_CLASSES (this_class)->length ();
table_size = build_index_type (build_int_cst (NULL_TREE, n_catch_classes));
array_type
= build_array_type (TREE_TYPE (TREE_TYPE (TYPE_CTABLE_DECL (this_class))),
@@ -3052,7 +3050,7 @@ build_signature_for_libgcj (tree type)
static tree
build_assertion_table_entry (tree code, tree op1, tree op2)
{
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
tree entry;
START_RECORD_CONSTRUCTOR (v, assertion_entry_type);
@@ -3072,7 +3070,8 @@ add_assertion_table_entry (void **htab_entry, void *ptr)
{
tree entry;
tree code_val, op1_utf8, op2_utf8;
- VEC(constructor_elt,gc) **v = (VEC(constructor_elt,gc) **) ptr;
+ vec<constructor_elt, va_gc> **v
+ = ((vec<constructor_elt, va_gc> **) ptr);
type_assertion *as = (type_assertion *) *htab_entry;
code_val = build_int_cst (NULL_TREE, as->assertion_code);
@@ -3100,7 +3099,7 @@ emit_assertion_table (tree klass)
{
tree null_entry, ctor, table_decl;
htab_t assertions_htab = TYPE_ASSERTIONS (klass);
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* Iterate through the hash table. */
htab_traverse (assertions_htab, add_assertion_table_entry, &v);
@@ -3266,11 +3265,11 @@ in_same_package (tree name1, tree name2)
void
java_write_globals (void)
{
- tree *vec = VEC_address (tree, pending_static_fields);
- int len = VEC_length (tree, pending_static_fields);
+ tree *vec = vec_safe_address (pending_static_fields);
+ int len = vec_safe_length (pending_static_fields);
write_global_declarations ();
emit_debug_global_declarations (vec, len);
- VEC_free (tree, gc, pending_static_fields);
+ vec_free (pending_static_fields);
}
#include "gt-java-class.h"
diff --git a/gcc/java/constants.c b/gcc/java/constants.c
index c709fa40a3c..653c3c266dd 100644
--- a/gcc/java/constants.c
+++ b/gcc/java/constants.c
@@ -502,20 +502,20 @@ build_constants_constructor (void)
CPool *outgoing_cpool = cpool_for_class (current_class);
tree tags_value, data_value;
tree cons;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int i;
- VEC(constructor_elt,gc) *tags = NULL;
- VEC(constructor_elt,gc) *data = NULL;
+ vec<constructor_elt, va_gc> *tags = NULL;
+ vec<constructor_elt, va_gc> *data = NULL;
constructor_elt *t = NULL;
constructor_elt *d = NULL;
if (outgoing_cpool->count > 0)
{
int c = outgoing_cpool->count;
- VEC_safe_grow_cleared (constructor_elt, gc, tags, c);
- VEC_safe_grow_cleared (constructor_elt, gc, data, c);
- t = &VEC_index (constructor_elt, tags, c-1);
- d = &VEC_index (constructor_elt, data, c-1);
+ vec_safe_grow_cleared (tags, c);
+ vec_safe_grow_cleared (data, c);
+ t = &(*tags)[c-1];
+ d = &(*data)[c-1];
}
#define CONSTRUCTOR_PREPEND_VALUE(E, V) E->value = V, E--
@@ -569,8 +569,8 @@ build_constants_constructor (void)
tree tem;
/* Add dummy 0'th element of constant pool. */
- gcc_assert (t == VEC_address (constructor_elt, tags));
- gcc_assert (d == VEC_address (constructor_elt, data));
+ gcc_assert (t == tags->address ());
+ gcc_assert (d == data->address ());
t->value = get_tag_node (0);
d->value = null_pointer_node;
diff --git a/gcc/java/decl.c b/gcc/java/decl.c
index 30048fd021c..c1ddd57303c 100644
--- a/gcc/java/decl.c
+++ b/gcc/java/decl.c
@@ -1953,7 +1953,7 @@ java_mark_class_local (tree klass)
if (FIELD_STATIC (t))
{
if (DECL_EXTERNAL (t))
- VEC_safe_push (tree, gc, pending_static_fields, t);
+ vec_safe_push (pending_static_fields, t);
java_mark_decl_local (t);
}
diff --git a/gcc/java/expr.c b/gcc/java/expr.c
index 8041cdd99c4..0f6e465b9a1 100644
--- a/gcc/java/expr.c
+++ b/gcc/java/expr.c
@@ -74,7 +74,7 @@ static void expand_cond (enum tree_code, tree, int);
static void expand_java_goto (int);
static tree expand_java_switch (tree, int);
static void expand_java_add_case (tree, int, int);
-static VEC(tree,gc) *pop_arguments (tree);
+static vec<tree, va_gc> *pop_arguments (tree);
static void expand_invoke (int, int, int);
static void expand_java_field_op (int, int, int);
static void java_push_constant_from_pool (struct JCF *, int);
@@ -96,7 +96,7 @@ tree dtable_ident = NULL_TREE;
int always_initialize_class_p = 0;
/* We store the stack state in two places:
- Within a basic block, we use the quick_stack, which is a VEC of expression
+ Within a basic block, we use the quick_stack, which is a vec of expression
nodes.
This is the top part of the stack; below that we use find_stack_slot.
At the end of a basic block, the quick_stack must be flushed
@@ -122,7 +122,7 @@ int always_initialize_class_p = 0;
So dup cannot just add an extra element to the quick_stack, but iadd can.
*/
-static GTY(()) VEC(tree,gc) *quick_stack;
+static GTY(()) vec<tree, va_gc> *quick_stack;
/* The physical memory page size used in this computer. See
build_field_ref(). */
@@ -222,10 +222,10 @@ flush_quick_stack (void)
tree t;
/* Count the number of slots the quick stack is holding. */
- for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
+ for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
stack_index -= 1 + TYPE_IS_WIDE (TREE_TYPE (t));
- for (ix = 0; VEC_iterate(tree, quick_stack, ix, t); ix++)
+ for (ix = 0; vec_safe_iterate (quick_stack, ix, &t); ix++)
{
tree decl, type = TREE_TYPE (t);
@@ -235,7 +235,7 @@ flush_quick_stack (void)
stack_index += 1 + TYPE_IS_WIDE (type);
}
- VEC_truncate (tree, quick_stack, 0);
+ vec_safe_truncate (quick_stack, 0);
}
/* Push TYPE on the type stack.
@@ -276,7 +276,7 @@ push_value (tree value)
value = convert (type, value);
}
push_type (type);
- VEC_safe_push (tree, gc, quick_stack, value);
+ vec_safe_push (quick_stack, value);
/* If the value has a side effect, then we need to evaluate it
whether or not the result is used. If the value ends up on the
@@ -590,8 +590,8 @@ static tree
pop_value (tree type)
{
type = pop_type (type);
- if (VEC_length (tree, quick_stack) != 0)
- return VEC_pop (tree, quick_stack);
+ if (vec_safe_length (quick_stack) != 0)
+ return quick_stack->pop ();
else
return find_stack_slot (stack_pointer, promote_type (type));
}
@@ -1100,17 +1100,17 @@ static void
expand_java_multianewarray (tree class_type, int ndim)
{
int i;
- VEC(tree,gc) *args = NULL;
+ vec<tree, va_gc> *args = NULL;
- VEC_safe_grow (tree, gc, args, 3 + ndim);
+ vec_safe_grow (args, 3 + ndim);
- VEC_replace (tree, args, 0, build_class_ref (class_type));
- VEC_replace (tree, args, 1, build_int_cst (NULL_TREE, ndim));
+ (*args)[0] = build_class_ref (class_type);
+ (*args)[1] = build_int_cst (NULL_TREE, ndim);
for(i = ndim - 1; i >= 0; i-- )
- VEC_replace (tree, args, (unsigned)(2 + i), pop_value (int_type_node));
+ (*args)[(unsigned)(2 + i)] = pop_value (int_type_node);
- VEC_replace (tree, args, 2 + ndim, null_pointer_node);
+ (*args)[2 + ndim] = null_pointer_node;
push_value (build_call_vec (promote_type (class_type),
build_address_of (soft_multianewarray_node),
@@ -1808,7 +1808,7 @@ char *instruction_bits;
indexed by PC. Each element is a tree vector holding the type
state at that PC. We only note type states at basic block
boundaries. */
-VEC(tree, gc) *type_states;
+vec<tree, va_gc> *type_states;
static void
note_label (int current_pc ATTRIBUTE_UNUSED, int target_pc)
@@ -1901,12 +1901,12 @@ expand_java_add_case (tree switch_expr, int match, int target_pc)
append_to_statement_list (x, &SWITCH_BODY (switch_expr));
}
-static VEC(tree,gc) *
+static vec<tree, va_gc> *
pop_arguments (tree method_type)
{
function_args_iterator fnai;
tree type;
- VEC(tree,gc) *args = NULL;
+ vec<tree, va_gc> *args = NULL;
int arity;
FOREACH_FUNCTION_ARGS (method_type, type, fnai)
@@ -1915,14 +1915,14 @@ pop_arguments (tree method_type)
if (type == void_type_node)
break;
- VEC_safe_push (tree, gc, args, type);
+ vec_safe_push (args, type);
}
- arity = VEC_length (tree, args);
+ arity = vec_safe_length (args);
while (arity--)
{
- tree arg = pop_value (VEC_index (tree, args, arity));
+ tree arg = pop_value ((*args)[arity]);
/* We simply cast each argument to its proper type. This is
needed since we lose type information coming out of the
@@ -1935,7 +1935,7 @@ pop_arguments (tree method_type)
&& INTEGRAL_TYPE_P (type))
arg = convert (integer_type_node, arg);
- VEC_replace (tree, args, arity, arg);
+ (*args)[arity] = arg;
}
return args;
@@ -2063,14 +2063,14 @@ typedef struct
const char *new_classname;
const char *new_signature;
int flags;
- void (*rewrite_arglist) (VEC(tree,gc) **);
+ void (*rewrite_arglist) (vec<tree, va_gc> **);
} rewrite_rule;
/* Add __builtin_return_address(0) to the end of an arglist. */
static void
-rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
+rewrite_arglist_getcaller (vec<tree, va_gc> **arglist)
{
tree retaddr
= build_call_expr (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS),
@@ -2078,15 +2078,15 @@ rewrite_arglist_getcaller (VEC(tree,gc) **arglist)
DECL_UNINLINABLE (current_function_decl) = 1;
- VEC_safe_push (tree, gc, *arglist, retaddr);
+ vec_safe_push (*arglist, retaddr);
}
/* Add this.class to the end of an arglist. */
static void
-rewrite_arglist_getclass (VEC(tree,gc) **arglist)
+rewrite_arglist_getclass (vec<tree, va_gc> **arglist)
{
- VEC_safe_push (tree, gc, *arglist, build_class_ref (output_class));
+ vec_safe_push (*arglist, build_class_ref (output_class));
}
static rewrite_rule rules[] =
@@ -2137,7 +2137,7 @@ special_method_p (tree candidate_method)
method, update SPECIAL.*/
void
-maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
+maybe_rewrite_invocation (tree *method_p, vec<tree, va_gc> **arg_list_p,
tree *method_signature_p, tree *special)
{
tree context = DECL_NAME (TYPE_NAME (DECL_CONTEXT (*method_p)));
@@ -2185,7 +2185,7 @@ maybe_rewrite_invocation (tree *method_p, VEC(tree,gc) **arg_list_p,
tree
build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
tree self_type, tree method_signature ATTRIBUTE_UNUSED,
- VEC(tree,gc) *arg_list ATTRIBUTE_UNUSED, tree special)
+ vec<tree, va_gc> *arg_list ATTRIBUTE_UNUSED, tree special)
{
tree func;
if (is_compiled_class (self_type))
@@ -2261,12 +2261,12 @@ build_known_method_ref (tree method, tree method_type ATTRIBUTE_UNUSED,
}
tree
-invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
+invoke_build_dtable (int is_invoke_interface, vec<tree, va_gc> *arg_list)
{
tree dtable, objectref;
- tree saved = save_expr (VEC_index (tree, arg_list, 0));
+ tree saved = save_expr ((*arg_list)[0]);
- VEC_replace (tree, arg_list, 0, saved);
+ (*arg_list)[0] = saved;
/* If we're dealing with interfaces and if the objectref
argument is an array then get the dispatch table of the class
@@ -2292,17 +2292,17 @@ invoke_build_dtable (int is_invoke_interface, VEC(tree,gc) *arg_list)
int
get_symbol_table_index (tree t, tree special,
- VEC(method_entry,gc) **symbol_table)
+ vec<method_entry, va_gc> **symbol_table)
{
method_entry *e;
unsigned i;
method_entry elem = {t, special};
- FOR_EACH_VEC_ELT (method_entry, *symbol_table, i, e)
+ FOR_EACH_VEC_SAFE_ELT (*symbol_table, i, e)
if (t == e->method && special == e->special)
goto done;
- VEC_safe_push (method_entry, gc, *symbol_table, elem);
+ vec_safe_push (*symbol_table, elem);
done:
return i + 1;
@@ -2428,7 +2428,7 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
const char *const self_name
= IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (self_type)));
tree call, func, method, method_type;
- VEC(tree,gc) *arg_list;
+ vec<tree, va_gc> *arg_list;
tree check = NULL_TREE;
tree special = NULL_TREE;
@@ -2569,8 +2569,8 @@ expand_invoke (int opcode, int method_ref_index, int nargs ATTRIBUTE_UNUSED)
We do omit the check if we're calling <init>. */
/* We use a SAVE_EXPR here to make sure we only evaluate
the new `self' expression once. */
- tree save_arg = save_expr (VEC_index (tree, arg_list, 0));
- VEC_replace (tree, arg_list, 0, save_arg);
+ tree save_arg = save_expr ((*arg_list)[0]);
+ (*arg_list)[0] = save_arg;
check = java_check_reference (save_arg, ! DECL_INIT_P (method));
func = build_known_method_ref (method, method_type, self_type,
method_signature, arg_list, special);
@@ -2622,7 +2622,7 @@ build_jni_stub (tree method)
tree method_args;
tree meth_var;
tree bind;
- VEC(tree,gc) *args = NULL;
+ vec<tree, va_gc> *args = NULL;
int args_size = 0;
tree klass = DECL_CONTEXT (method);
@@ -2657,7 +2657,7 @@ build_jni_stub (tree method)
/* The JNIEnv structure is the first argument to the JNI function. */
args_size += int_size_in_bytes (TREE_TYPE (env_var));
- VEC_safe_push (tree, gc, args, env_var);
+ vec_safe_push (args, env_var);
/* For a static method the second argument is the class. For a
non-static method the second argument is `this'; that is already
@@ -2665,7 +2665,7 @@ build_jni_stub (tree method)
if (METHOD_STATIC (method))
{
args_size += int_size_in_bytes (TREE_TYPE (klass));
- VEC_safe_push (tree, gc, args, klass);
+ vec_safe_push (args, klass);
}
/* All the arguments to this method become arguments to the
@@ -2680,7 +2680,7 @@ build_jni_stub (tree method)
#endif
args_size += (arg_bits / BITS_PER_UNIT);
- VEC_safe_push (tree, gc, args, tem);
+ vec_safe_push (args, tem);
}
arg_types = TYPE_ARG_TYPES (TREE_TYPE (method));
@@ -2967,7 +2967,7 @@ static void
load_type_state (int pc)
{
int i;
- tree vec = VEC_index (tree, type_states, pc);
+ tree vec = (*type_states)[pc];
int cur_length = TREE_VEC_LENGTH (vec);
stack_pointer = cur_length - DECL_MAX_LOCALS(current_function_decl);
for (i = 0; i < cur_length; i++)
@@ -3010,8 +3010,8 @@ note_instructions (JCF *jcf, tree method)
byte_ops = jcf->read_ptr;
instruction_bits = XRESIZEVAR (char, instruction_bits, length + 1);
memset (instruction_bits, 0, length + 1);
- type_states = VEC_alloc (tree, gc, length + 1);
- VEC_safe_grow_cleared (tree, gc, type_states, length + 1);
+ vec_alloc (type_states, length + 1);
+ type_states->quick_grow_cleared (length + 1);
/* This pass figures out which PC can be the targets of jumps. */
for (PC = 0; PC < length;)
diff --git a/gcc/java/java-tree.h b/gcc/java/java-tree.h
index 5167b9b75c5..11b780b998d 100644
--- a/gcc/java/java-tree.h
+++ b/gcc/java/java-tree.h
@@ -724,7 +724,7 @@ struct GTY(()) lang_decl_func {
int max_stack;
int arg_slot_count;
source_location last_line; /* End line number for a function decl */
- VEC(tree,gc) *throws_list; /* Exception specified by `throws' */
+ vec<tree, va_gc> *throws_list; /* Exception specified by `throws' */
tree exc_obj; /* Decl holding the exception object. */
/* Class initialization test variables */
@@ -869,8 +869,6 @@ typedef struct GTY(()) method_entry_d {
tree special;
} method_entry;
-DEF_VEC_O(method_entry);
-DEF_VEC_ALLOC_O(method_entry,gc);
/* FIXME: the variable_size annotation here is needed because these types are
variable-sized in some other frontends. Due to gengtype deficiency the GTY
@@ -882,24 +880,24 @@ struct GTY((variable_size)) lang_type {
tree cpool_data_ref; /* Cached */
tree package_list; /* List of package names, progressive */
- VEC(method_entry,gc) *otable_methods; /* List of static decls referred
+ vec<method_entry, va_gc> *otable_methods; /* List of static decls referred
to by this class. */
tree otable_decl; /* The static address table. */
tree otable_syms_decl;
- VEC(method_entry,gc) *atable_methods; /* List of abstract methods
+ vec<method_entry, va_gc> *atable_methods; /* List of abstract methods
referred to by this class. */
tree atable_decl; /* The static address table. */
tree atable_syms_decl;
- VEC(method_entry,gc) *itable_methods; /* List of interface methods
+ vec<method_entry, va_gc> *itable_methods; /* List of interface methods
referred to by this class. */
tree itable_decl; /* The interfaces table. */
tree itable_syms_decl;
tree ctable_decl; /* The table of classes for the runtime
type matcher. */
- VEC(constructor_elt,gc) *catch_classes;
+ vec<constructor_elt, va_gc> *catch_classes;
htab_t GTY ((param_is (struct treetreehash_entry))) type_to_runtime_map;
/* The mapping of classes to exception region
@@ -1016,14 +1014,16 @@ extern void initialize_builtins (void);
extern tree lookup_name (tree);
extern bool special_method_p (tree);
-extern void maybe_rewrite_invocation (tree *, VEC(tree,gc) **, tree *, tree *);
-extern tree build_known_method_ref (tree, tree, tree, tree, VEC(tree,gc) *, tree);
+extern void maybe_rewrite_invocation (tree *, vec<tree, va_gc> **, tree *,
+ tree *);
+extern tree build_known_method_ref (tree, tree, tree, tree, vec<tree, va_gc> *,
+ tree);
extern tree build_class_init (tree, tree);
extern int attach_init_test_initialization_flags (void **, void *);
extern tree build_invokevirtual (tree, tree, tree);
extern tree build_invokeinterface (tree, tree);
extern tree build_jni_stub (tree);
-extern tree invoke_build_dtable (int, VEC(tree,gc) *);
+extern tree invoke_build_dtable (int, vec<tree, va_gc> *);
extern tree build_field_ref (tree, tree, tree);
extern tree java_modify_addr_for_volatile (tree);
extern void pushdecl_force_head (tree);
@@ -1062,7 +1062,7 @@ extern void make_class_data (tree);
extern int alloc_name_constant (int, tree);
extern int alloc_constant_fieldref (tree, tree);
extern void emit_register_classes (tree *);
-extern tree emit_symbol_table (tree, tree, VEC(method_entry,gc) *,
+extern tree emit_symbol_table (tree, tree, vec<method_entry, va_gc> *,
tree, tree, int);
extern void lang_init_source (int);
extern void write_classfile (tree);
@@ -1165,7 +1165,7 @@ extern void register_exception_range(struct eh_range *, int, int);
extern void finish_method (tree);
extern void java_expand_body (tree);
-extern int get_symbol_table_index (tree, tree, VEC(method_entry,gc) **);
+extern int get_symbol_table_index (tree, tree, vec<method_entry, va_gc> **);
extern tree make_catch_class_record (tree, tree);
extern tree emit_catch_table (tree);
@@ -1180,7 +1180,7 @@ extern void rewrite_reflection_indexes (void *);
int cxx_keyword_p (const char *name, int length);
-extern GTY(()) VEC(tree,gc) *pending_static_fields;
+extern GTY(()) vec<tree, va_gc> *pending_static_fields;
extern void java_write_globals (void);
@@ -1268,7 +1268,7 @@ extern void java_write_globals (void);
#define CLASS_COMPLETE_P(DECL) DECL_LANG_FLAG_2 (DECL)
/* A vector used to track type states for the current method. */
-extern VEC(tree, gc) *type_states;
+extern vec<tree, va_gc> *type_states;
/* This maps a bytecode offset (PC) to various flags,
listed below (starting with BCODE_). */
@@ -1419,7 +1419,7 @@ extern tree *type_map;
#define START_RECORD_CONSTRUCTOR(V, CTYPE) \
do \
{ \
- V = VEC_alloc (constructor_elt, gc, 0); \
+ vec_alloc (V, 0); \
CONSTRUCTOR_APPEND_ELT (V, TYPE_FIELDS (CTYPE), NULL); \
} \
while (0)
@@ -1430,7 +1430,7 @@ extern tree *type_map;
#define PUSH_SUPER_VALUE(V, VALUE) \
do \
{ \
- constructor_elt *_elt___ = &VEC_last (constructor_elt, V); \
+ constructor_elt *_elt___ = &(V)->last (); \
tree _next___ = DECL_CHAIN (_elt___->index); \
gcc_assert (!DECL_NAME (_elt___->index)); \
_elt___->value = VALUE; \
@@ -1444,7 +1444,7 @@ extern tree *type_map;
#define PUSH_FIELD_VALUE(V, NAME, VALUE) \
do \
{ \
- constructor_elt *_elt___ = &VEC_last (constructor_elt, V); \
+ constructor_elt *_elt___ = &(V)->last (); \
tree _next___ = DECL_CHAIN (_elt___->index); \
gcc_assert (strcmp (IDENTIFIER_POINTER (DECL_NAME (_elt___->index)), \
NAME) == 0); \
@@ -1457,7 +1457,7 @@ extern tree *type_map;
#define FINISH_RECORD_CONSTRUCTOR(CONS, V, CTYPE) \
do \
{ \
- VEC_pop (constructor_elt, V); \
+ V->pop (); \
CONS = build_constructor (CTYPE, V); \
TREE_CONSTANT (CONS) = 0; \
} \
diff --git a/gcc/java/jcf-parse.c b/gcc/java/jcf-parse.c
index 92f032cb9d0..c97b12f88e3 100644
--- a/gcc/java/jcf-parse.c
+++ b/gcc/java/jcf-parse.c
@@ -39,7 +39,6 @@ The Free Software Foundation is independent of Sun Microsystems, Inc. */
#include "ggc.h"
#include "debug.h"
#include "cgraph.h"
-#include "vecprim.h"
#include "bitmap.h"
#include "target.h"
@@ -86,7 +85,7 @@ static location_t file_start_location;
static GTY(()) struct JCF * main_jcf;
/* A list of all the class DECLs seen so far. */
-static GTY(()) VEC(tree,gc) *all_class_list;
+static GTY(()) vec<tree, va_gc> *all_class_list;
/* The number of source files passed to us by -fsource-filename and an
array of pointers to each name. Used by find_sourcefile(). */
@@ -744,7 +743,7 @@ rewrite_reflection_indexes (void *arg)
{
bitmap_iterator bi;
unsigned int offset;
- VEC(int, heap) *map = (VEC(int, heap) *) arg;
+ vec<int> *map = (vec<int> *) arg;
unsigned char *data = TYPE_REFLECTION_DATA (current_class);
if (map)
@@ -753,7 +752,7 @@ rewrite_reflection_indexes (void *arg)
{
uint16 index = annotation_read_short (data + offset);
annotation_rewrite_short
- (VEC_index (int, map, index), data + offset);
+ ((*map)[index], data + offset);
}
}
}
@@ -933,12 +932,13 @@ handle_signature_attribute (int member_index, JCF *jcf,
#define HANDLE_EXCEPTIONS_ATTRIBUTE(COUNT) \
{ \
int n = COUNT; \
- VEC (tree,gc) *v = VEC_alloc (tree, gc, n); \
- gcc_assert (DECL_FUNCTION_THROWS (current_method) == NULL); \
+ vec<tree, va_gc> *v; \
+ vec_alloc (v, n); \
+ gcc_assert (!DECL_FUNCTION_THROWS (current_method)); \
while (--n >= 0) \
{ \
tree thrown_class = get_class_constant (jcf, JCF_readu2 (jcf)); \
- VEC_quick_push (tree, v, thrown_class); \
+ v->quick_push (thrown_class); \
} \
DECL_FUNCTION_THROWS (current_method) = v; \
}
@@ -1480,7 +1480,7 @@ jcf_parse (JCF* jcf)
if (current_class == object_type_node)
layout_class_methods (object_type_node);
else
- VEC_safe_push (tree, gc, all_class_list, TYPE_NAME (current_class));
+ vec_safe_push (all_class_list, TYPE_NAME (current_class));
}
/* If we came across inner classes, load them now. */
@@ -1512,7 +1512,7 @@ static void
java_layout_seen_class_methods (void)
{
unsigned start = 0;
- unsigned end = VEC_length (tree, all_class_list);
+ unsigned end = vec_safe_length (all_class_list);
while (1)
{
@@ -1521,7 +1521,7 @@ java_layout_seen_class_methods (void)
for (ix = start; ix != end; ix++)
{
- tree decl = VEC_index (tree, all_class_list, ix);
+ tree decl = (*all_class_list)[ix];
tree cls = TREE_TYPE (decl);
input_location = DECL_SOURCE_LOCATION (decl);
@@ -1534,7 +1534,7 @@ java_layout_seen_class_methods (void)
/* Note that new classes might have been added while laying out
methods, changing the value of all_class_list. */
- new_length = VEC_length (tree, all_class_list);
+ new_length = vec_safe_length (all_class_list);
if (end != new_length)
{
start = end;
@@ -1665,12 +1665,12 @@ parse_class_file (void)
input_location = save_location;
}
-static VEC(tree,gc) *predefined_filenames;
+static vec<tree, va_gc> *predefined_filenames;
void
add_predefined_file (tree name)
{
- VEC_safe_push (tree, gc, predefined_filenames, name);
+ vec_safe_push (predefined_filenames, name);
}
int
@@ -1679,7 +1679,7 @@ predefined_filename_p (tree node)
unsigned ix;
tree f;
- FOR_EACH_VEC_ELT (tree, predefined_filenames, ix, f)
+ FOR_EACH_VEC_SAFE_ELT (predefined_filenames, ix, f)
if (f == node)
return 1;
@@ -1853,18 +1853,17 @@ java_parse_file (void)
const char *resource_filename;
/* Only one resource file may be compiled at a time. */
- gcc_assert (VEC_length (tree, all_translation_units) == 1);
+ gcc_assert (all_translation_units->length () == 1);
resource_filename
- = IDENTIFIER_POINTER
- (DECL_NAME (VEC_index (tree, all_translation_units, 0)));
+ = IDENTIFIER_POINTER (DECL_NAME ((*all_translation_units)[0]));
compile_resource_file (resource_name, resource_filename);
goto finish;
}
current_jcf = main_jcf;
- FOR_EACH_VEC_ELT (tree, all_translation_units, ix, node)
+ FOR_EACH_VEC_ELT (*all_translation_units, ix, node)
{
unsigned char magic_string[4];
char *real_path;
@@ -1951,7 +1950,7 @@ java_parse_file (void)
}
}
- FOR_EACH_VEC_ELT (tree, all_translation_units, ix, node)
+ FOR_EACH_VEC_ELT (*all_translation_units, ix, node)
{
input_location = DECL_SOURCE_LOCATION (node);
if (CLASS_FILE_P (node))
diff --git a/gcc/java/resource.c b/gcc/java/resource.c
index 1558dcdff2d..52e99c4906c 100644
--- a/gcc/java/resource.c
+++ b/gcc/java/resource.c
@@ -37,13 +37,13 @@ The Free Software Foundation is independent of Sun Microsystems, Inc. */
#include "cgraph.h"
/* A list of all the resources files. */
-static GTY(()) VEC(tree,gc) *resources;
+static GTY(()) vec<tree, va_gc> *resources;
void
compile_resource_data (const char *name, const char *buffer, int length)
{
tree rtype, field = NULL_TREE, data_type, rinit, data, decl;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
data_type = build_prim_array_type (unsigned_byte_type_node,
strlen (name) + length);
@@ -80,7 +80,7 @@ compile_resource_data (const char *name, const char *buffer, int length)
rest_of_decl_compilation (decl, global_bindings_p (), 0);
varpool_finalize_decl (decl);
- VEC_safe_push (tree, gc, resources, decl);
+ vec_safe_push (resources, decl);
}
void
@@ -100,7 +100,7 @@ write_resource_constructor (tree *list_p)
register_resource_fn = t;
/* Write out entries in the same order in which they were defined. */
- FOR_EACH_VEC_ELT (tree, resources, ix, decl)
+ FOR_EACH_VEC_ELT (*resources, ix, decl)
{
t = build_fold_addr_expr (decl);
t = build_call_expr (register_resource_fn, 1, t);
diff --git a/gcc/java/verify-glue.c b/gcc/java/verify-glue.c
index c9eee07ff7e..86cd28945c9 100644
--- a/gcc/java/verify-glue.c
+++ b/gcc/java/verify-glue.c
@@ -394,7 +394,7 @@ void
vfy_note_stack_depth (vfy_method *method, int pc, int depth)
{
tree val = make_tree_vec (method->max_locals + depth);
- VEC_replace (tree, type_states, pc, val);
+ (*type_states)[pc] = val;
/* Called for side effects. */
lookup_label (pc);
}
@@ -409,7 +409,7 @@ vfy_note_stack_type (vfy_method *method, int pc, int slot, vfy_jclass type)
if (type == object_type_node)
type = object_ptr_type_node;
- vec = VEC_index (tree, type_states, pc);
+ vec = (*type_states)[pc];
TREE_VEC_ELT (vec, slot) = type;
/* Called for side effects. */
lookup_label (pc);
@@ -424,7 +424,7 @@ vfy_note_local_type (vfy_method *method ATTRIBUTE_UNUSED, int pc, int slot,
if (type == object_type_node)
type = object_ptr_type_node;
- vec = VEC_index (tree, type_states, pc);
+ vec = (*type_states)[pc];
TREE_VEC_ELT (vec, slot) = type;
/* Called for side effects. */
lookup_label (pc);
diff --git a/gcc/loop-invariant.c b/gcc/loop-invariant.c
index ba31541f80f..0c54a9ec09a 100644
--- a/gcc/loop-invariant.c
+++ b/gcc/loop-invariant.c
@@ -171,12 +171,10 @@ static unsigned actual_stamp;
typedef struct invariant *invariant_p;
-DEF_VEC_P(invariant_p);
-DEF_VEC_ALLOC_P(invariant_p, heap);
/* The invariants. */
-static VEC(invariant_p,heap) *invariants;
+static vec<invariant_p> invariants;
/* Check the size of the invariant table and realloc if necessary. */
@@ -504,7 +502,7 @@ find_identical_invariants (htab_t eq, struct invariant *inv)
EXECUTE_IF_SET_IN_BITMAP (inv->depends_on, 0, depno, bi)
{
- dep = VEC_index (invariant_p, invariants, depno);
+ dep = invariants[depno];
find_identical_invariants (eq, dep);
}
@@ -528,10 +526,10 @@ merge_identical_invariants (void)
{
unsigned i;
struct invariant *inv;
- htab_t eq = htab_create (VEC_length (invariant_p, invariants),
+ htab_t eq = htab_create (invariants.length (),
hash_invariant_expr, eq_invariant_expr, free);
- FOR_EACH_VEC_ELT (invariant_p, invariants, i, inv)
+ FOR_EACH_VEC_ELT (invariants, i, inv)
find_identical_invariants (eq, inv);
htab_delete (eq);
@@ -732,11 +730,11 @@ create_new_invariant (struct def *def, rtx insn, bitmap depends_on,
inv->stamp = 0;
inv->insn = insn;
- inv->invno = VEC_length (invariant_p, invariants);
+ inv->invno = invariants.length ();
inv->eqto = ~0u;
if (def)
def->invno = inv->invno;
- VEC_safe_push (invariant_p, heap, invariants, inv);
+ invariants.safe_push (inv);
if (dump_file)
{
@@ -1079,7 +1077,7 @@ get_inv_cost (struct invariant *inv, int *comp_cost, unsigned *regs_needed)
bitmap_iterator bi;
/* Find the representative of the class of the equivalent invariants. */
- inv = VEC_index (invariant_p, invariants, inv->eqto);
+ inv = invariants[inv->eqto];
*comp_cost = 0;
if (! flag_ira_loop_pressure)
@@ -1145,7 +1143,7 @@ get_inv_cost (struct invariant *inv, int *comp_cost, unsigned *regs_needed)
{
bool check_p;
- dep = VEC_index (invariant_p, invariants, depno);
+ dep = invariants[depno];
get_inv_cost (dep, &acomp_cost, aregs_needed);
@@ -1276,7 +1274,7 @@ best_gain_for_invariant (struct invariant **best, unsigned *regs_needed,
int i, gain = 0, again;
unsigned aregs_needed[N_REG_CLASSES], invno;
- FOR_EACH_VEC_ELT (invariant_p, invariants, invno, inv)
+ FOR_EACH_VEC_ELT (invariants, invno, inv)
{
if (inv->move)
continue;
@@ -1310,11 +1308,11 @@ best_gain_for_invariant (struct invariant **best, unsigned *regs_needed,
static void
set_move_mark (unsigned invno, int gain)
{
- struct invariant *inv = VEC_index (invariant_p, invariants, invno);
+ struct invariant *inv = invariants[invno];
bitmap_iterator bi;
/* Find the representative of the class of the equivalent invariants. */
- inv = VEC_index (invariant_p, invariants, inv->eqto);
+ inv = invariants[inv->eqto];
if (inv->move)
return;
@@ -1345,7 +1343,7 @@ find_invariants_to_move (bool speed, bool call_p)
unsigned i, regs_used, regs_needed[N_REG_CLASSES], new_regs[N_REG_CLASSES];
struct invariant *inv = NULL;
- if (!VEC_length (invariant_p, invariants))
+ if (!invariants.length ())
return;
if (flag_ira_loop_pressure)
@@ -1425,8 +1423,8 @@ replace_uses (struct invariant *inv, rtx reg, bool in_group)
static bool
move_invariant_reg (struct loop *loop, unsigned invno)
{
- struct invariant *inv = VEC_index (invariant_p, invariants, invno);
- struct invariant *repr = VEC_index (invariant_p, invariants, inv->eqto);
+ struct invariant *inv = invariants[invno];
+ struct invariant *repr = invariants[inv->eqto];
unsigned i;
basic_block preheader = loop_preheader_edge (loop)->src;
rtx reg, set, dest, note;
@@ -1530,11 +1528,11 @@ move_invariants (struct loop *loop)
struct invariant *inv;
unsigned i;
- FOR_EACH_VEC_ELT (invariant_p, invariants, i, inv)
+ FOR_EACH_VEC_ELT (invariants, i, inv)
move_invariant_reg (loop, i);
if (flag_ira_loop_pressure && resize_reg_info ())
{
- FOR_EACH_VEC_ELT (invariant_p, invariants, i, inv)
+ FOR_EACH_VEC_ELT (invariants, i, inv)
if (inv->reg != NULL_RTX)
{
if (inv->orig_regno >= 0)
@@ -1556,7 +1554,7 @@ init_inv_motion_data (void)
{
actual_stamp = 1;
- invariants = VEC_alloc (invariant_p, heap, 100);
+ invariants.create (100);
}
/* Frees the data allocated by invariant motion. */
@@ -1583,12 +1581,12 @@ free_inv_motion_data (void)
}
}
- FOR_EACH_VEC_ELT (invariant_p, invariants, i, inv)
+ FOR_EACH_VEC_ELT (invariants, i, inv)
{
BITMAP_FREE (inv->depends_on);
free (inv);
}
- VEC_free (invariant_p, heap, invariants);
+ invariants.release ();
}
/* Move the invariants out of the LOOP. */
diff --git a/gcc/loop-unroll.c b/gcc/loop-unroll.c
index 5288fd74734..58bd9f23cf3 100644
--- a/gcc/loop-unroll.c
+++ b/gcc/loop-unroll.c
@@ -91,7 +91,7 @@ struct var_to_expand
{
rtx insn; /* The insn in that the variable expansion occurs. */
rtx reg; /* The accumulator which is expanded. */
- VEC(rtx,heap) *var_expansions; /* The copies of the accumulator which is expanded. */
+ vec<rtx> var_expansions; /* The copies of the accumulator which is expanded. */
struct var_to_expand *next; /* Next entry in walking order. */
enum rtx_code op; /* The type of the accumulation - addition, subtraction
or multiplication. */
@@ -453,7 +453,7 @@ peel_loop_completely (struct loop *loop)
sbitmap wont_exit;
unsigned HOST_WIDE_INT npeel;
unsigned i;
- VEC (edge, heap) *remove_edges;
+ vec<edge> remove_edges;
edge ein;
struct niter_desc *desc = get_simple_loop_desc (loop);
struct opt_info *opt_info = NULL;
@@ -470,7 +470,7 @@ peel_loop_completely (struct loop *loop)
if (desc->noloop_assumptions)
bitmap_clear_bit (wont_exit, 1);
- remove_edges = NULL;
+ remove_edges.create (0);
if (flag_split_ivs_in_unroller)
opt_info = analyze_insns_in_loop (loop);
@@ -495,9 +495,9 @@ peel_loop_completely (struct loop *loop)
}
/* Remove the exit edges. */
- FOR_EACH_VEC_ELT (edge, remove_edges, i, ein)
+ FOR_EACH_VEC_ELT (remove_edges, i, ein)
remove_path (ein);
- VEC_free (edge, heap, remove_edges);
+ remove_edges.release ();
}
ein = desc->in_edge;
@@ -639,7 +639,7 @@ unroll_loop_constant_iterations (struct loop *loop)
unsigned exit_mod;
sbitmap wont_exit;
unsigned i;
- VEC (edge, heap) *remove_edges;
+ vec<edge> remove_edges;
edge e;
unsigned max_unroll = loop->lpt_decision.times;
struct niter_desc *desc = get_simple_loop_desc (loop);
@@ -657,7 +657,7 @@ unroll_loop_constant_iterations (struct loop *loop)
wont_exit = sbitmap_alloc (max_unroll + 1);
bitmap_ones (wont_exit);
- remove_edges = NULL;
+ remove_edges.create (0);
if (flag_split_ivs_in_unroller
|| flag_variable_expansion_in_unroller)
opt_info = analyze_insns_in_loop (loop);
@@ -805,9 +805,9 @@ unroll_loop_constant_iterations (struct loop *loop)
desc->niter_expr = GEN_INT (desc->niter);
/* Remove the edges. */
- FOR_EACH_VEC_ELT (edge, remove_edges, i, e)
+ FOR_EACH_VEC_ELT (remove_edges, i, e)
remove_path (e);
- VEC_free (edge, heap, remove_edges);
+ remove_edges.release ();
if (dump_file)
fprintf (dump_file,
@@ -982,11 +982,11 @@ unroll_loop_runtime_iterations (struct loop *loop)
rtx old_niter, niter, init_code, branch_code, tmp;
unsigned i, j, p;
basic_block preheader, *body, swtch, ezc_swtch;
- VEC (basic_block, heap) *dom_bbs;
+ vec<basic_block> dom_bbs;
sbitmap wont_exit;
int may_exit_copy;
unsigned n_peel;
- VEC (edge, heap) *remove_edges;
+ vec<edge> remove_edges;
edge e;
bool extra_zero_check, last_may_exit;
unsigned max_unroll = loop->lpt_decision.times;
@@ -1000,20 +1000,20 @@ unroll_loop_runtime_iterations (struct loop *loop)
opt_info = analyze_insns_in_loop (loop);
/* Remember blocks whose dominators will have to be updated. */
- dom_bbs = NULL;
+ dom_bbs.create (0);
body = get_loop_body (loop);
for (i = 0; i < loop->num_nodes; i++)
{
- VEC (basic_block, heap) *ldom;
+ vec<basic_block> ldom;
basic_block bb;
ldom = get_dominated_by (CDI_DOMINATORS, body[i]);
- FOR_EACH_VEC_ELT (basic_block, ldom, j, bb)
+ FOR_EACH_VEC_ELT (ldom, j, bb)
if (!flow_bb_inside_loop_p (loop, bb))
- VEC_safe_push (basic_block, heap, dom_bbs, bb);
+ dom_bbs.safe_push (bb);
- VEC_free (basic_block, heap, ldom);
+ ldom.release ();
}
free (body);
@@ -1058,7 +1058,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
/* Precondition the loop. */
split_edge_and_insert (loop_preheader_edge (loop), init_code);
- remove_edges = NULL;
+ remove_edges.create (0);
wont_exit = sbitmap_alloc (max_unroll + 2);
@@ -1180,9 +1180,9 @@ unroll_loop_runtime_iterations (struct loop *loop)
}
/* Remove the edges. */
- FOR_EACH_VEC_ELT (edge, remove_edges, i, e)
+ FOR_EACH_VEC_ELT (remove_edges, i, e)
remove_path (e);
- VEC_free (edge, heap, remove_edges);
+ remove_edges.release ();
/* We must be careful when updating the number of iterations due to
preconditioning and the fact that the value must be valid at entry
@@ -1220,7 +1220,7 @@ unroll_loop_runtime_iterations (struct loop *loop)
"in runtime, %i insns\n",
max_unroll, num_loop_insns (loop));
- VEC_free (basic_block, heap, dom_bbs);
+ dom_bbs.release ();
}
/* Decide whether to simply peel LOOP and how much. */
@@ -1759,7 +1759,7 @@ analyze_insn_to_expand_var (struct loop *loop, rtx insn)
ves = XNEW (struct var_to_expand);
ves->insn = insn;
ves->reg = copy_rtx (dest);
- ves->var_expansions = VEC_alloc (rtx, heap, 1);
+ ves->var_expansions.create (1);
ves->next = NULL;
ves->op = GET_CODE (src);
ves->expansion_count = 0;
@@ -1858,7 +1858,7 @@ analyze_insns_in_loop (struct loop *loop)
struct var_to_expand *ves = NULL;
PTR *slot1;
PTR *slot2;
- VEC (edge, heap) *edges = get_loop_exit_edges (loop);
+ vec<edge> edges = get_loop_exit_edges (loop);
edge exit;
bool can_apply = false;
@@ -1877,9 +1877,9 @@ analyze_insns_in_loop (struct loop *loop)
/* Record the loop exit bb and loop preheader before the unrolling. */
opt_info->loop_preheader = loop_preheader_edge (loop)->src;
- if (VEC_length (edge, edges) == 1)
+ if (edges.length () == 1)
{
- exit = VEC_index (edge, edges, 0);
+ exit = edges[0];
if (!(exit->flags & EDGE_COMPLEX))
{
opt_info->loop_exit = split_edge (exit);
@@ -1935,7 +1935,7 @@ analyze_insns_in_loop (struct loop *loop)
}
}
- VEC_free (edge, heap, edges);
+ edges.release ();
free (body);
return opt_info;
}
@@ -2090,9 +2090,9 @@ get_expansion (struct var_to_expand *ve)
if (ve->reuse_expansion == 0)
reg = ve->reg;
else
- reg = VEC_index (rtx, ve->var_expansions, ve->reuse_expansion - 1);
+ reg = ve->var_expansions[ve->reuse_expansion - 1];
- if (VEC_length (rtx, ve->var_expansions) == (unsigned) ve->reuse_expansion)
+ if (ve->var_expansions.length () == (unsigned) ve->reuse_expansion)
ve->reuse_expansion = 0;
else
ve->reuse_expansion++;
@@ -2127,7 +2127,7 @@ expand_var_during_unrolling (struct var_to_expand *ve, rtx insn)
if (apply_change_group ())
if (really_new_expansion)
{
- VEC_safe_push (rtx, heap, ve->var_expansions, new_reg);
+ ve->var_expansions.safe_push (new_reg);
ve->expansion_count++;
}
}
@@ -2168,7 +2168,7 @@ insert_var_expansion_initialization (struct var_to_expand *ve,
enum machine_mode mode = GET_MODE (ve->reg);
bool honor_signed_zero_p = HONOR_SIGNED_ZEROS (mode);
- if (VEC_length (rtx, ve->var_expansions) == 0)
+ if (ve->var_expansions.length () == 0)
return;
start_sequence ();
@@ -2178,7 +2178,7 @@ insert_var_expansion_initialization (struct var_to_expand *ve,
/* Note that we only accumulate FMA via the ADD operand. */
case PLUS:
case MINUS:
- FOR_EACH_VEC_ELT (rtx, ve->var_expansions, i, var)
+ FOR_EACH_VEC_ELT (ve->var_expansions, i, var)
{
if (honor_signed_zero_p)
zero_init = simplify_gen_unary (NEG, mode, CONST0_RTX (mode), mode);
@@ -2189,7 +2189,7 @@ insert_var_expansion_initialization (struct var_to_expand *ve,
break;
case MULT:
- FOR_EACH_VEC_ELT (rtx, ve->var_expansions, i, var)
+ FOR_EACH_VEC_ELT (ve->var_expansions, i, var)
{
zero_init = CONST1_RTX (GET_MODE (var));
emit_move_insn (var, zero_init);
@@ -2217,7 +2217,7 @@ combine_var_copies_in_loop_exit (struct var_to_expand *ve, basic_block place)
rtx expr, seq, var, insn;
unsigned i;
- if (VEC_length (rtx, ve->var_expansions) == 0)
+ if (ve->var_expansions.length () == 0)
return;
start_sequence ();
@@ -2227,12 +2227,12 @@ combine_var_copies_in_loop_exit (struct var_to_expand *ve, basic_block place)
/* Note that we only accumulate FMA via the ADD operand. */
case PLUS:
case MINUS:
- FOR_EACH_VEC_ELT (rtx, ve->var_expansions, i, var)
+ FOR_EACH_VEC_ELT (ve->var_expansions, i, var)
sum = simplify_gen_binary (PLUS, GET_MODE (ve->reg), var, sum);
break;
case MULT:
- FOR_EACH_VEC_ELT (rtx, ve->var_expansions, i, var)
+ FOR_EACH_VEC_ELT (ve->var_expansions, i, var)
sum = simplify_gen_binary (MULT, GET_MODE (ve->reg), var, sum);
break;
@@ -2405,7 +2405,7 @@ free_opt_info (struct opt_info *opt_info)
struct var_to_expand *ves;
for (ves = opt_info->var_to_expand_head; ves; ves = ves->next)
- VEC_free (rtx, heap, ves->var_expansions);
+ ves->var_expansions.release ();
htab_delete (opt_info->insns_with_var_to_expand);
}
free (opt_info);
diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c
index 7fbba70fc01..b331273d942 100644
--- a/gcc/lower-subreg.c
+++ b/gcc/lower-subreg.c
@@ -48,8 +48,6 @@ along with GCC; see the file COPYING3. If not see
# define STACK_GROWS_DOWNWARD 0
#endif
-DEF_VEC_P (bitmap);
-DEF_VEC_ALLOC_P (bitmap,heap);
/* Decompose multi-word pseudo-registers into individual
pseudo-registers when possible and profitable. This is possible
@@ -98,7 +96,7 @@ static bitmap subreg_context;
/* Bit N in the bitmap in element M of this array is set if there is a
copy from reg M to reg N. */
-static VEC(bitmap,heap) *reg_copy_graph;
+static vec<bitmap> reg_copy_graph;
struct target_lower_subreg default_target_lower_subreg;
#if SWITCHABLE_TARGET
@@ -383,11 +381,11 @@ find_pseudo_copy (rtx set)
if (HARD_REGISTER_NUM_P (rd) || HARD_REGISTER_NUM_P (rs))
return false;
- b = VEC_index (bitmap, reg_copy_graph, rs);
+ b = reg_copy_graph[rs];
if (b == NULL)
{
b = BITMAP_ALLOC (NULL);
- VEC_replace (bitmap, reg_copy_graph, rs, b);
+ reg_copy_graph[rs] = b;
}
bitmap_set_bit (b, rd);
@@ -419,7 +417,7 @@ propagate_pseudo_copies (void)
EXECUTE_IF_SET_IN_BITMAP (queue, 0, i, iter)
{
- bitmap b = VEC_index (bitmap, reg_copy_graph, i);
+ bitmap b = reg_copy_graph[i];
if (b)
bitmap_ior_and_compl_into (propagate, b, non_decomposable_context);
}
@@ -1405,9 +1403,9 @@ decompose_multiword_subregs (bool decompose_copies)
non_decomposable_context = BITMAP_ALLOC (NULL);
subreg_context = BITMAP_ALLOC (NULL);
- reg_copy_graph = VEC_alloc (bitmap, heap, max);
- VEC_safe_grow (bitmap, heap, reg_copy_graph, max);
- memset (VEC_address (bitmap, reg_copy_graph), 0, sizeof (bitmap) * max);
+ reg_copy_graph.create (max);
+ reg_copy_graph.safe_grow_cleared (max);
+ memset (reg_copy_graph.address (), 0, sizeof (bitmap) * max);
speed_p = optimize_function_for_speed_p (cfun);
FOR_EACH_BB (bb)
@@ -1622,12 +1620,12 @@ decompose_multiword_subregs (bool decompose_copies)
unsigned int i;
bitmap b;
- FOR_EACH_VEC_ELT (bitmap, reg_copy_graph, i, b)
+ FOR_EACH_VEC_ELT (reg_copy_graph, i, b)
if (b)
BITMAP_FREE (b);
}
- VEC_free (bitmap, heap, reg_copy_graph);
+ reg_copy_graph.release ();
BITMAP_FREE (decomposable_context);
BITMAP_FREE (non_decomposable_context);
diff --git a/gcc/lra-lives.c b/gcc/lra-lives.c
index 64643459af0..f2dc359c18b 100644
--- a/gcc/lra-lives.c
+++ b/gcc/lra-lives.c
@@ -382,7 +382,7 @@ bb_has_abnormal_call_pred (basic_block bb)
}
/* Vec containing execution frequencies of program points. */
-static VEC(int,heap) *point_freq_vec;
+static vec<int> point_freq_vec;
/* The start of the above vector elements. */
int *lra_point_freq;
@@ -392,8 +392,8 @@ int *lra_point_freq;
static void
next_program_point (int &point, int freq)
{
- VEC_safe_push (int, heap, point_freq_vec, freq);
- lra_point_freq = VEC_address (int, point_freq_vec);
+ point_freq_vec.safe_push (freq);
+ lra_point_freq = point_freq_vec.address ();
point++;
}
@@ -955,8 +955,8 @@ lra_create_live_ranges (bool all_p)
dead_set = sparseset_alloc (max_regno);
unused_set = sparseset_alloc (max_regno);
curr_point = 0;
- point_freq_vec = VEC_alloc (int, heap, get_max_uid () * 2);
- lra_point_freq = VEC_address (int, point_freq_vec);
+ point_freq_vec.create (get_max_uid () * 2);
+ lra_point_freq = point_freq_vec.address ();
int *post_order_rev_cfg = XNEWVEC (int, last_basic_block);
int n_blocks_inverted = inverted_post_order_compute (post_order_rev_cfg);
lra_assert (n_blocks_inverted == n_basic_blocks);
@@ -991,7 +991,7 @@ lra_clear_live_ranges (void)
for (i = 0; i < max_reg_num (); i++)
free_live_range_list (lra_reg_info[i].live_ranges);
- VEC_free (int, heap, point_freq_vec);
+ point_freq_vec.release ();
}
/* Initialize live ranges data once per function. */
diff --git a/gcc/lra.c b/gcc/lra.c
index b75f4178efb..35b53eccaa5 100644
--- a/gcc/lra.c
+++ b/gcc/lra.c
@@ -1368,11 +1368,8 @@ get_new_reg_value (void)
/* Pools for copies. */
static alloc_pool copy_pool;
-DEF_VEC_P(lra_copy_t);
-DEF_VEC_ALLOC_P(lra_copy_t, heap);
-
/* Vec referring to pseudo copies. */
-static VEC(lra_copy_t,heap) *copy_vec;
+static vec<lra_copy_t> copy_vec;
/* Initialize I-th element of lra_reg_info. */
static inline void
@@ -1408,7 +1405,7 @@ init_reg_info (void)
initialize_lra_reg_info_element (i);
copy_pool
= create_alloc_pool ("lra copies", sizeof (struct lra_copy), 100);
- copy_vec = VEC_alloc (lra_copy_t, heap, 100);
+ copy_vec.create (100);
}
@@ -1423,7 +1420,7 @@ finish_reg_info (void)
free (lra_reg_info);
reg_info_size = 0;
free_alloc_pool (copy_pool);
- VEC_free (lra_copy_t, heap, copy_vec);
+ copy_vec.release ();
}
/* Expand common reg info if it is necessary. */
@@ -1446,9 +1443,9 @@ lra_free_copies (void)
{
lra_copy_t cp;
- while (VEC_length (lra_copy_t, copy_vec) != 0)
+ while (copy_vec.length () != 0)
{
- cp = VEC_pop (lra_copy_t, copy_vec);
+ cp = copy_vec.pop ();
lra_reg_info[cp->regno1].copies = lra_reg_info[cp->regno2].copies = NULL;
pool_free (copy_pool, cp);
}
@@ -1473,7 +1470,7 @@ lra_create_copy (int regno1, int regno2, int freq)
regno1 = temp;
}
cp = (lra_copy_t) pool_alloc (copy_pool);
- VEC_safe_push (lra_copy_t, heap, copy_vec, cp);
+ copy_vec.safe_push (cp);
cp->regno1_dest_p = regno1_dest_p;
cp->freq = freq;
cp->regno1 = regno1;
@@ -1492,9 +1489,9 @@ lra_create_copy (int regno1, int regno2, int freq)
lra_copy_t
lra_get_copy (int n)
{
- if (n >= (int) VEC_length (lra_copy_t, copy_vec))
+ if (n >= (int) copy_vec.length ())
return NULL;
- return VEC_index (lra_copy_t, copy_vec, n);
+ return copy_vec[n];
}
@@ -1730,7 +1727,7 @@ lra_get_insn_regs (int uid)
static sbitmap lra_constraint_insn_stack_bitmap;
/* The stack itself. */
-VEC (rtx, heap) *lra_constraint_insn_stack;
+vec<rtx> lra_constraint_insn_stack;
/* Put INSN on the stack. If ALWAYS_UPDATE is true, always update the reg
info for INSN, otherwise only update it if INSN is not already on the
@@ -1749,7 +1746,7 @@ lra_push_insn_1 (rtx insn, bool always_update)
bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
if (! always_update)
lra_update_insn_regno_info (insn);
- VEC_safe_push (rtx, heap, lra_constraint_insn_stack, insn);
+ lra_constraint_insn_stack.safe_push (insn);
}
/* Put INSN on the stack. */
@@ -1777,7 +1774,7 @@ lra_push_insn_by_uid (unsigned int uid)
rtx
lra_pop_insn (void)
{
- rtx insn = VEC_pop (rtx, lra_constraint_insn_stack);
+ rtx insn = lra_constraint_insn_stack.pop ();
bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
return insn;
}
@@ -1786,7 +1783,7 @@ lra_pop_insn (void)
unsigned int
lra_insn_stack_length (void)
{
- return VEC_length (rtx, lra_constraint_insn_stack);
+ return lra_constraint_insn_stack.length ();
}
/* Push insns FROM to TO (excluding it) going in reverse order. */
@@ -1860,11 +1857,8 @@ struct sloc
typedef struct sloc *sloc_t;
-DEF_VEC_P(sloc_t);
-DEF_VEC_ALLOC_P(sloc_t, heap);
-
/* Locations of the former scratches. */
-static VEC (sloc_t, heap) *scratches;
+static vec<sloc_t> scratches;
/* Bitmap of scratch regnos. */
static bitmap_head scratch_bitmap;
@@ -1899,7 +1893,7 @@ remove_scratches (void)
lra_insn_recog_data_t id;
struct lra_static_insn_data *static_id;
- scratches = VEC_alloc (sloc_t, heap, get_max_uid ());
+ scratches.create (get_max_uid ());
bitmap_initialize (&scratch_bitmap, &reg_obstack);
bitmap_initialize (&scratch_operand_bitmap, &reg_obstack);
FOR_EACH_BB (bb)
@@ -1922,7 +1916,7 @@ remove_scratches (void)
loc = XNEW (struct sloc);
loc->insn = insn;
loc->nop = i;
- VEC_safe_push (sloc_t, heap, scratches, loc);
+ scratches.safe_push (loc);
bitmap_set_bit (&scratch_bitmap, REGNO (*id->operand_loc[i]));
bitmap_set_bit (&scratch_operand_bitmap,
INSN_UID (insn) * MAX_RECOG_OPERANDS + i);
@@ -1942,12 +1936,13 @@ remove_scratches (void)
static void
restore_scratches (void)
{
- int i, regno;
+ int regno;
+ unsigned i;
sloc_t loc;
rtx last = NULL_RTX;
lra_insn_recog_data_t id = NULL;
- for (i = 0; VEC_iterate (sloc_t, scratches, i, loc); i++)
+ for (i = 0; scratches.iterate (i, &loc); i++)
{
if (last != loc->insn)
{
@@ -1970,9 +1965,9 @@ restore_scratches (void)
INSN_UID (loc->insn), loc->nop);
}
}
- for (i = 0; VEC_iterate (sloc_t, scratches, i, loc); i++)
+ for (i = 0; scratches.iterate (i, &loc); i++)
free (loc);
- VEC_free (sloc_t, heap, scratches);
+ scratches.release ();
bitmap_clear (&scratch_bitmap);
bitmap_clear (&scratch_operand_bitmap);
}
@@ -2250,7 +2245,7 @@ lra (FILE *f)
/* We don't DF from now and avoid its using because it is to
expensive when a lot of RTL changes are made. */
df_set_flags (DF_NO_INSN_RESCAN);
- lra_constraint_insn_stack = VEC_alloc (rtx, heap, get_max_uid ());
+ lra_constraint_insn_stack.create (get_max_uid ());
lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
bitmap_clear (lra_constraint_insn_stack_bitmap);
lra_live_ranges_init ();
@@ -2334,7 +2329,7 @@ lra (FILE *f)
lra_constraints_finish ();
finish_reg_info ();
sbitmap_free (lra_constraint_insn_stack_bitmap);
- VEC_free (rtx, heap, lra_constraint_insn_stack);
+ lra_constraint_insn_stack.release ();
finish_insn_recog_data ();
regstat_free_n_sets_and_refs ();
regstat_free_ri ();
diff --git a/gcc/lto-cgraph.c b/gcc/lto-cgraph.c
index b52a8e3a55c..7d936efa2c5 100644
--- a/gcc/lto-cgraph.c
+++ b/gcc/lto-cgraph.c
@@ -48,7 +48,7 @@ along with GCC; see the file COPYING3. If not see
#include "tree-pass.h"
static void output_cgraph_opt_summary (void);
-static void input_cgraph_opt_summary (VEC (symtab_node, heap) * nodes);
+static void input_cgraph_opt_summary (vec<symtab_node> nodes);
/* Number of LDPR values known to GCC. */
#define LDPR_NUM_KNOWN (LDPR_PREVAILING_DEF_IRONLY_EXP + 1)
@@ -84,7 +84,7 @@ lto_symtab_encoder_new (bool for_input)
if (!for_input)
encoder->map = pointer_map_create ();
- encoder->nodes = NULL;
+ encoder->nodes.create (0);
return encoder;
}
@@ -94,7 +94,7 @@ lto_symtab_encoder_new (bool for_input)
void
lto_symtab_encoder_delete (lto_symtab_encoder_t encoder)
{
- VEC_free (lto_encoder_entry, heap, encoder->nodes);
+ encoder->nodes.release ();
if (encoder->map)
pointer_map_destroy (encoder->map);
free (encoder);
@@ -116,8 +116,8 @@ lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
{
lto_encoder_entry entry = {node, false, false, false};
- ref = VEC_length (lto_encoder_entry, encoder->nodes);
- VEC_safe_push (lto_encoder_entry, heap, encoder->nodes, entry);
+ ref = encoder->nodes.length ();
+ encoder->nodes.safe_push (entry);
return ref;
}
@@ -125,11 +125,11 @@ lto_symtab_encoder_encode (lto_symtab_encoder_t encoder,
if (!slot || !*slot)
{
lto_encoder_entry entry = {node, false, false, false};
- ref = VEC_length (lto_encoder_entry, encoder->nodes);
+ ref = encoder->nodes.length ();
if (!slot)
slot = pointer_map_insert (encoder->map, node);
*slot = (void *) (intptr_t) (ref + 1);
- VEC_safe_push (lto_encoder_entry, heap, encoder->nodes, entry);
+ encoder->nodes.safe_push (entry);
}
else
ref = (size_t) *slot - 1;
@@ -152,13 +152,11 @@ lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
return false;
index = (size_t) *slot - 1;
- gcc_checking_assert (VEC_index (lto_encoder_entry,
- encoder->nodes, index).node
- == node);
+ gcc_checking_assert (encoder->nodes[index].node == node);
/* Remove from vector. We do this by swapping node with the last element
of the vector. */
- last_node = VEC_pop (lto_encoder_entry, encoder->nodes);
+ last_node = encoder->nodes.pop ();
if (last_node.node != node)
{
last_slot = pointer_map_contains (encoder->map, last_node.node);
@@ -166,8 +164,7 @@ lto_symtab_encoder_delete_node (lto_symtab_encoder_t encoder,
*last_slot = (void *)(size_t) (index + 1);
/* Move the last element to the original spot of NODE. */
- VEC_replace (lto_encoder_entry, encoder->nodes, index,
- last_node);
+ encoder->nodes[index] = last_node;
}
/* Remove element from hash table. */
@@ -183,7 +180,7 @@ lto_symtab_encoder_encode_body_p (lto_symtab_encoder_t encoder,
struct cgraph_node *node)
{
int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
- return VEC_index (lto_encoder_entry, encoder->nodes, index).body;
+ return encoder->nodes[index].body;
}
/* Return TRUE if we should encode body of NODE (if any). */
@@ -193,9 +190,8 @@ lto_set_symtab_encoder_encode_body (lto_symtab_encoder_t encoder,
struct cgraph_node *node)
{
int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
- gcc_checking_assert (VEC_index (lto_encoder_entry, encoder->nodes,
- index).node == (symtab_node)node);
- VEC_index (lto_encoder_entry, encoder->nodes, index).body = true;
+ gcc_checking_assert (encoder->nodes[index].node == (symtab_node)node);
+ encoder->nodes[index].body = true;
}
/* Return TRUE if we should encode initializer of NODE (if any). */
@@ -207,7 +203,7 @@ lto_symtab_encoder_encode_initializer_p (lto_symtab_encoder_t encoder,
int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
if (index == LCC_NOT_FOUND)
return false;
- return VEC_index (lto_encoder_entry, encoder->nodes, index).initializer;
+ return encoder->nodes[index].initializer;
}
/* Return TRUE if we should encode initializer of NODE (if any). */
@@ -217,7 +213,7 @@ lto_set_symtab_encoder_encode_initializer (lto_symtab_encoder_t encoder,
struct varpool_node *node)
{
int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
- VEC_index (lto_encoder_entry, encoder->nodes, index).initializer = true;
+ encoder->nodes[index].initializer = true;
}
/* Return TRUE if we should encode initializer of NODE (if any). */
@@ -229,7 +225,7 @@ lto_symtab_encoder_in_partition_p (lto_symtab_encoder_t encoder,
int index = lto_symtab_encoder_lookup (encoder, (symtab_node)node);
if (index == LCC_NOT_FOUND)
return false;
- return VEC_index (lto_encoder_entry, encoder->nodes, index).in_partition;
+ return encoder->nodes[index].in_partition;
}
/* Return TRUE if we should encode body of NODE (if any). */
@@ -239,7 +235,7 @@ lto_set_symtab_encoder_in_partition (lto_symtab_encoder_t encoder,
symtab_node node)
{
int index = lto_symtab_encoder_encode (encoder, (symtab_node)node);
- VEC_index (lto_encoder_entry, encoder->nodes, index).in_partition = true;
+ encoder->nodes[index].in_partition = true;
}
/* Output the cgraph EDGE to OB using ENCODER. */
@@ -436,9 +432,8 @@ lto_output_node (struct lto_simple_output_block *ob, struct cgraph_node *node,
streamer_write_hwi_stream (ob->main_stream, node->count_materialization_scale);
streamer_write_hwi_stream (ob->main_stream,
- VEC_length (ipa_opt_pass,
- node->ipa_transforms_to_apply));
- FOR_EACH_VEC_ELT (ipa_opt_pass, node->ipa_transforms_to_apply, i, pass)
+ node->ipa_transforms_to_apply.length ());
+ FOR_EACH_VEC_ELT (node->ipa_transforms_to_apply, i, pass)
streamer_write_hwi_stream (ob->main_stream, pass->pass.static_pass_number);
if (tag == LTO_symtab_analyzed_node)
@@ -897,7 +892,7 @@ static struct cgraph_node *
input_node (struct lto_file_decl_data *file_data,
struct lto_input_block *ib,
enum LTO_symtab_tags tag,
- VEC(symtab_node, heap) *nodes)
+ vec<symtab_node> nodes)
{
tree fn_decl;
struct cgraph_node *node;
@@ -916,8 +911,9 @@ input_node (struct lto_file_decl_data *file_data,
if (clone_ref != LCC_NOT_FOUND)
{
- node = cgraph_clone_node (cgraph (VEC_index (symtab_node, nodes, clone_ref)), fn_decl,
- 0, CGRAPH_FREQ_BASE, false, NULL, false);
+ node = cgraph_clone_node (cgraph (nodes[clone_ref]), fn_decl,
+ 0, CGRAPH_FREQ_BASE, false,
+ vec<cgraph_edge_p>(), false);
}
else
node = cgraph_get_create_node (fn_decl);
@@ -930,7 +926,7 @@ input_node (struct lto_file_decl_data *file_data,
node->count_materialization_scale = streamer_read_hwi (ib);
count = streamer_read_hwi (ib);
- node->ipa_transforms_to_apply = NULL;
+ node->ipa_transforms_to_apply = vec<ipa_opt_pass>();
for (i = 0; i < count; i++)
{
struct opt_pass *pass;
@@ -938,8 +934,7 @@ input_node (struct lto_file_decl_data *file_data,
gcc_assert (pid < passes_by_id_size);
pass = passes_by_id[pid];
- VEC_safe_push (ipa_opt_pass, heap, node->ipa_transforms_to_apply,
- (struct ipa_opt_pass_d *) pass);
+ node->ipa_transforms_to_apply.safe_push ((struct ipa_opt_pass_d *) pass);
}
if (tag == LTO_symtab_analyzed_node)
@@ -1045,7 +1040,7 @@ input_varpool_node (struct lto_file_decl_data *file_data,
static void
input_ref (struct lto_input_block *ib,
symtab_node referring_node,
- VEC(symtab_node, heap) *nodes)
+ vec<symtab_node> nodes)
{
symtab_node node = NULL;
struct bitpack_d bp;
@@ -1053,7 +1048,7 @@ input_ref (struct lto_input_block *ib,
bp = streamer_read_bitpack (ib);
use = (enum ipa_ref_use) bp_unpack_value (&bp, 2);
- node = VEC_index (symtab_node, nodes, streamer_read_hwi (ib));
+ node = nodes[streamer_read_hwi (ib)];
ipa_record_reference (referring_node, node, use, NULL);
}
@@ -1063,7 +1058,7 @@ input_ref (struct lto_input_block *ib,
indirect_unknown_callee set). */
static void
-input_edge (struct lto_input_block *ib, VEC(symtab_node, heap) *nodes,
+input_edge (struct lto_input_block *ib, vec<symtab_node> nodes,
bool indirect)
{
struct cgraph_node *caller, *callee;
@@ -1075,13 +1070,13 @@ input_edge (struct lto_input_block *ib, VEC(symtab_node, heap) *nodes,
struct bitpack_d bp;
int ecf_flags = 0;
- caller = cgraph (VEC_index (symtab_node, nodes, streamer_read_hwi (ib)));
+ caller = cgraph (nodes[streamer_read_hwi (ib)]);
if (caller == NULL || caller->symbol.decl == NULL_TREE)
internal_error ("bytecode stream: no caller found while reading edge");
if (!indirect)
{
- callee = cgraph (VEC_index (symtab_node, nodes, streamer_read_hwi (ib)));
+ callee = cgraph (nodes[streamer_read_hwi (ib)]);
if (callee == NULL || callee->symbol.decl == NULL_TREE)
internal_error ("bytecode stream: no callee found while reading edge");
}
@@ -1126,12 +1121,12 @@ input_edge (struct lto_input_block *ib, VEC(symtab_node, heap) *nodes,
/* Read a cgraph from IB using the info in FILE_DATA. */
-static VEC(symtab_node, heap) *
+static vec<symtab_node>
input_cgraph_1 (struct lto_file_decl_data *file_data,
struct lto_input_block *ib)
{
enum LTO_symtab_tags tag;
- VEC(symtab_node, heap) *nodes = NULL;
+ vec<symtab_node> nodes = vec<symtab_node>();
symtab_node node;
unsigned i;
@@ -1146,7 +1141,7 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
else if (tag == LTO_symtab_variable)
{
node = (symtab_node)input_varpool_node (file_data, ib);
- VEC_safe_push (symtab_node, heap, nodes, node);
+ nodes.safe_push (node);
lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
}
else
@@ -1154,7 +1149,7 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
node = (symtab_node)input_node (file_data, ib, tag, nodes);
if (node == NULL || node->symbol.decl == NULL_TREE)
internal_error ("bytecode stream: found empty cgraph node");
- VEC_safe_push (symtab_node, heap, nodes, node);
+ nodes.safe_push (node);
lto_symtab_encoder_encode (file_data->symtab_node_encoder, node);
}
@@ -1165,10 +1160,10 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
/* AUX pointers should be all non-zero for function nodes read from the stream. */
#ifdef ENABLE_CHECKING
- FOR_EACH_VEC_ELT (symtab_node, nodes, i, node)
+ FOR_EACH_VEC_ELT (nodes, i, node)
gcc_assert (node->symbol.aux || !is_a <cgraph_node> (node));
#endif
- FOR_EACH_VEC_ELT (symtab_node, nodes, i, node)
+ FOR_EACH_VEC_ELT (nodes, i, node)
{
int ref;
if (cgraph_node *cnode = dyn_cast <cgraph_node> (node))
@@ -1182,7 +1177,7 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
/* Fixup inlined_to from reference to pointer. */
if (ref != LCC_NOT_FOUND)
- cnode->global.inlined_to = cgraph (VEC_index (symtab_node, nodes, ref));
+ cgraph (node)->global.inlined_to = cgraph (nodes[ref]);
else
cnode->global.inlined_to = NULL;
}
@@ -1191,11 +1186,11 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
/* Fixup same_comdat_group from reference to pointer. */
if (ref != LCC_NOT_FOUND)
- node->symbol.same_comdat_group = VEC_index (symtab_node, nodes, ref);
+ node->symbol.same_comdat_group = nodes[ref];
else
node->symbol.same_comdat_group = NULL;
}
- FOR_EACH_VEC_ELT (symtab_node, nodes, i, node)
+ FOR_EACH_VEC_ELT (nodes, i, node)
node->symbol.aux = is_a <cgraph_node> (node) ? (void *)1 : NULL;
return nodes;
}
@@ -1204,7 +1199,7 @@ input_cgraph_1 (struct lto_file_decl_data *file_data,
static void
input_refs (struct lto_input_block *ib,
- VEC(symtab_node, heap) *nodes)
+ vec<symtab_node> nodes)
{
int count;
int idx;
@@ -1215,7 +1210,7 @@ input_refs (struct lto_input_block *ib,
if (!count)
break;
idx = streamer_read_uhwi (ib);
- node = VEC_index (symtab_node, nodes, idx);
+ node = nodes[idx];
while (count)
{
input_ref (ib, node, nodes);
@@ -1345,7 +1340,7 @@ input_symtab (void)
const char *data;
size_t len;
struct lto_input_block *ib;
- VEC(symtab_node, heap) *nodes;
+ vec<symtab_node> nodes;
ib = lto_create_simple_input_block (file_data, LTO_section_symtab_nodes,
&data, &len);
@@ -1366,7 +1361,7 @@ input_symtab (void)
ib, data, len);
if (flag_ltrans)
input_cgraph_opt_summary (nodes);
- VEC_free (symtab_node, heap, nodes);
+ nodes.release ();
}
merge_profile_summaries (file_data_vec);
@@ -1433,9 +1428,8 @@ output_node_opt_summary (struct output_block *ob,
}
else
streamer_write_uhwi (ob, 0);
- streamer_write_uhwi (ob, VEC_length (ipa_replace_map_p,
- node->clone.tree_map));
- FOR_EACH_VEC_ELT (ipa_replace_map_p, node->clone.tree_map, i, map)
+ streamer_write_uhwi (ob, vec_safe_length (node->clone.tree_map));
+ FOR_EACH_VEC_SAFE_ELT (node->clone.tree_map, i, map)
{
int parm_num;
tree parm;
@@ -1543,7 +1537,7 @@ input_node_opt_summary (struct cgraph_node *node,
{
struct ipa_replace_map *map = ggc_alloc_ipa_replace_map ();
- VEC_safe_push (ipa_replace_map_p, gc, node->clone.tree_map, map);
+ vec_safe_push (node->clone.tree_map, map);
map->parm_num = streamer_read_uhwi (ib_main);
map->old_tree = NULL;
map->new_tree = stream_read_tree (ib_main, data_in);
@@ -1561,8 +1555,8 @@ input_node_opt_summary (struct cgraph_node *node,
static void
input_cgraph_opt_section (struct lto_file_decl_data *file_data,
- const char *data, size_t len, VEC (symtab_node,
- heap) * nodes)
+ const char *data, size_t len,
+ vec<symtab_node> nodes)
{
const struct lto_function_header *header =
(const struct lto_function_header *) data;
@@ -1579,13 +1573,14 @@ input_cgraph_opt_section (struct lto_file_decl_data *file_data,
data_in =
lto_data_in_create (file_data, (const char *) data + string_offset,
- header->string_size, NULL);
+ header->string_size,
+ vec<ld_plugin_symbol_resolution_t>());
count = streamer_read_uhwi (&ib_main);
for (i = 0; i < count; i++)
{
int ref = streamer_read_uhwi (&ib_main);
- input_node_opt_summary (cgraph (VEC_index (symtab_node, nodes, ref)),
+ input_node_opt_summary (cgraph (nodes[ref]),
&ib_main, data_in);
}
lto_free_section_data (file_data, LTO_section_cgraph_opt_sum, NULL, data,
@@ -1596,7 +1591,7 @@ input_cgraph_opt_section (struct lto_file_decl_data *file_data,
/* Input optimization summary of cgraph. */
static void
-input_cgraph_opt_summary (VEC (symtab_node, heap) * nodes)
+input_cgraph_opt_summary (vec<symtab_node> nodes)
{
struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
struct lto_file_decl_data *file_data;
diff --git a/gcc/lto-section-out.c b/gcc/lto-section-out.c
index 3de48e03df4..e4e8a594062 100644
--- a/gcc/lto-section-out.c
+++ b/gcc/lto-section-out.c
@@ -42,12 +42,12 @@ along with GCC; see the file COPYING3. If not see
#include "lto-streamer.h"
#include "lto-compress.h"
-static VEC(lto_out_decl_state_ptr, heap) *decl_state_stack;
+static vec<lto_out_decl_state_ptr> decl_state_stack;
/* List of out decl states used by functions. We use this to
generate the decl directory later. */
-VEC(lto_out_decl_state_ptr, heap) *lto_function_decl_states;
+vec<lto_out_decl_state_ptr> lto_function_decl_states;
/* Returns a hash code for P. */
hashval_t
@@ -293,7 +293,7 @@ lto_output_decl_index (struct lto_output_stream *obs,
new_slot->t = name;
new_slot->slot_num = index;
*slot = new_slot;
- VEC_safe_push (tree, heap, encoder->trees, name);
+ encoder->trees.safe_push (name);
new_entry_p = TRUE;
}
else
@@ -481,7 +481,7 @@ lto_delete_out_decl_state (struct lto_out_decl_state *state)
struct lto_out_decl_state *
lto_get_out_decl_state (void)
{
- return VEC_last (lto_out_decl_state_ptr, decl_state_stack);
+ return decl_state_stack.last ();
}
/* Push STATE to top of out decl stack. */
@@ -489,7 +489,7 @@ lto_get_out_decl_state (void)
void
lto_push_out_decl_state (struct lto_out_decl_state *state)
{
- VEC_safe_push (lto_out_decl_state_ptr, heap, decl_state_stack, state);
+ decl_state_stack.safe_push (state);
}
/* Pop the currently used out-decl state from top of stack. */
@@ -497,7 +497,7 @@ lto_push_out_decl_state (struct lto_out_decl_state *state)
struct lto_out_decl_state *
lto_pop_out_decl_state (void)
{
- return VEC_pop (lto_out_decl_state_ptr, decl_state_stack);
+ return decl_state_stack.pop ();
}
/* Record STATE after it has been used in serializing the body of
@@ -518,6 +518,5 @@ lto_record_function_out_decl_state (tree fn_decl,
state->streams[i].tree_hash_table = NULL;
}
state->fn_decl = fn_decl;
- VEC_safe_push (lto_out_decl_state_ptr, heap, lto_function_decl_states,
- state);
+ lto_function_decl_states.safe_push (state);
}
diff --git a/gcc/lto-streamer-in.c b/gcc/lto-streamer-in.c
index 15905f859a4..2c5a760880a 100644
--- a/gcc/lto-streamer-in.c
+++ b/gcc/lto-streamer-in.c
@@ -203,7 +203,7 @@ lto_input_tree_ref (struct lto_input_block *ib, struct data_in *data_in,
case LTO_ssa_name_ref:
ix_u = streamer_read_uhwi (ib);
- result = VEC_index (tree, SSANAMES (fn), ix_u);
+ result = (*SSANAMES (fn))[ix_u];
break;
case LTO_field_decl_ref:
@@ -403,24 +403,22 @@ static void
fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
{
unsigned i;
- VEC(eh_region,gc) *eh_array = fn->eh->region_array;
- VEC(eh_landing_pad,gc) *lp_array = fn->eh->lp_array;
+ vec<eh_region, va_gc> *eh_array = fn->eh->region_array;
+ vec<eh_landing_pad, va_gc> *lp_array = fn->eh->lp_array;
eh_region r;
eh_landing_pad lp;
gcc_assert (eh_array && lp_array);
gcc_assert (root_region >= 0);
- fn->eh->region_tree = VEC_index (eh_region, eh_array, root_region);
+ fn->eh->region_tree = (*eh_array)[root_region];
-#define FIXUP_EH_REGION(r) (r) = VEC_index (eh_region, eh_array, \
- (HOST_WIDE_INT) (intptr_t) (r))
-#define FIXUP_EH_LP(p) (p) = VEC_index (eh_landing_pad, lp_array, \
- (HOST_WIDE_INT) (intptr_t) (p))
+#define FIXUP_EH_REGION(r) (r) = (*eh_array)[(HOST_WIDE_INT) (intptr_t) (r)]
+#define FIXUP_EH_LP(p) (p) = (*lp_array)[(HOST_WIDE_INT) (intptr_t) (p)]
/* Convert all the index numbers stored in pointer fields into
pointers to the corresponding slots in the EH region array. */
- FOR_EACH_VEC_ELT (eh_region, eh_array, i, r)
+ FOR_EACH_VEC_ELT (*eh_array, i, r)
{
/* The array may contain NULL regions. */
if (r == NULL)
@@ -435,7 +433,7 @@ fixup_eh_region_pointers (struct function *fn, HOST_WIDE_INT root_region)
/* Convert all the index numbers stored in pointer fields into
pointers to the corresponding slots in the EH landing pad array. */
- FOR_EACH_VEC_ELT (eh_landing_pad, lp_array, i, lp)
+ FOR_EACH_VEC_ELT (*lp_array, i, lp)
{
/* The array may contain NULL landing pads. */
if (lp == NULL)
@@ -504,11 +502,11 @@ input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
gcc_assert (len == (int) len);
if (len > 0)
{
- VEC_safe_grow (eh_region, gc, fn->eh->region_array, len);
+ vec_safe_grow_cleared (fn->eh->region_array, len);
for (i = 0; i < len; i++)
{
eh_region r = input_eh_region (ib, data_in, i);
- VEC_replace (eh_region, fn->eh->region_array, i, r);
+ (*fn->eh->region_array)[i] = r;
}
}
@@ -517,11 +515,11 @@ input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
gcc_assert (len == (int) len);
if (len > 0)
{
- VEC_safe_grow (eh_landing_pad, gc, fn->eh->lp_array, len);
+ vec_safe_grow_cleared (fn->eh->lp_array, len);
for (i = 0; i < len; i++)
{
eh_landing_pad lp = input_eh_lp (ib, data_in, i);
- VEC_replace (eh_landing_pad, fn->eh->lp_array, i, lp);
+ (*fn->eh->lp_array)[i] = lp;
}
}
@@ -530,11 +528,11 @@ input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
gcc_assert (len == (int) len);
if (len > 0)
{
- VEC_safe_grow (tree, gc, fn->eh->ttype_data, len);
+ vec_safe_grow_cleared (fn->eh->ttype_data, len);
for (i = 0; i < len; i++)
{
tree ttype = stream_read_tree (ib, data_in);
- VEC_replace (tree, fn->eh->ttype_data, i, ttype);
+ (*fn->eh->ttype_data)[i] = ttype;
}
}
@@ -545,20 +543,20 @@ input_eh_regions (struct lto_input_block *ib, struct data_in *data_in,
{
if (targetm.arm_eabi_unwinder)
{
- VEC_safe_grow (tree, gc, fn->eh->ehspec_data.arm_eabi, len);
+ vec_safe_grow_cleared (fn->eh->ehspec_data.arm_eabi, len);
for (i = 0; i < len; i++)
{
tree t = stream_read_tree (ib, data_in);
- VEC_replace (tree, fn->eh->ehspec_data.arm_eabi, i, t);
+ (*fn->eh->ehspec_data.arm_eabi)[i] = t;
}
}
else
{
- VEC_safe_grow (uchar, gc, fn->eh->ehspec_data.other, len);
+ vec_safe_grow_cleared (fn->eh->ehspec_data.other, len);
for (i = 0; i < len; i++)
{
uchar c = streamer_read_uchar (ib);
- VEC_replace (uchar, fn->eh->ehspec_data.other, i, c);
+ (*fn->eh->ehspec_data.other)[i] = c;
}
}
}
@@ -605,13 +603,11 @@ input_cfg (struct lto_input_block *ib, struct function *fn,
bb_count = streamer_read_uhwi (ib);
last_basic_block_for_function (fn) = bb_count;
- if (bb_count > VEC_length (basic_block, basic_block_info_for_function (fn)))
- VEC_safe_grow_cleared (basic_block, gc,
- basic_block_info_for_function (fn), bb_count);
+ if (bb_count > basic_block_info_for_function (fn)->length ())
+ vec_safe_grow_cleared (basic_block_info_for_function (fn), bb_count);
- if (bb_count > VEC_length (basic_block, label_to_block_map_for_function (fn)))
- VEC_safe_grow_cleared (basic_block, gc,
- label_to_block_map_for_function (fn), bb_count);
+ if (bb_count > label_to_block_map_for_function (fn)->length ())
+ vec_safe_grow_cleared (label_to_block_map_for_function (fn), bb_count);
index = streamer_read_hwi (ib);
while (index != -1)
@@ -685,8 +681,8 @@ input_ssa_names (struct lto_input_block *ib, struct data_in *data_in,
bool is_default_def;
/* Skip over the elements that had been freed. */
- while (VEC_length (tree, SSANAMES (fn)) < i)
- VEC_quick_push (tree, SSANAMES (fn), NULL_TREE);
+ while (SSANAMES (fn)->length () < i)
+ SSANAMES (fn)->quick_push (NULL_TREE);
is_default_def = (streamer_read_uchar (ib) != 0);
name = stream_read_tree (ib, data_in);
@@ -762,11 +758,11 @@ input_struct_function_base (struct function *fn, struct data_in *data_in,
if (len > 0)
{
int i;
- VEC_safe_grow (tree, gc, fn->local_decls, len);
+ vec_safe_grow_cleared (fn->local_decls, len);
for (i = 0; i < len; i++)
{
tree t = stream_read_tree (ib, data_in);
- VEC_replace (tree, fn->local_decls, i, t);
+ (*fn->local_decls)[i] = t;
}
}
@@ -929,7 +925,8 @@ lto_read_body (struct lto_file_decl_data *file_data, tree fn_decl,
header->main_size);
data_in = lto_data_in_create (file_data, data + string_offset,
- header->string_size, NULL);
+ header->string_size,
+ vec<ld_plugin_symbol_resolution_t>());
/* Make sure the file was generated by the exact same compiler. */
lto_check_version (header->lto_header.major_version,
@@ -957,16 +954,16 @@ lto_read_body (struct lto_file_decl_data *file_data, tree fn_decl,
input_cfg (&ib_cfg, fn, node->count_materialization_scale);
/* Set up the struct function. */
- from = VEC_length (tree, data_in->reader_cache->nodes);
+ from = data_in->reader_cache->nodes.length ();
input_function (fn_decl, data_in, &ib_main);
/* And fixup types we streamed locally. */
{
struct streamer_tree_cache_d *cache = data_in->reader_cache;
- unsigned len = VEC_length (tree, cache->nodes);
+ unsigned len = cache->nodes.length ();
unsigned i;
for (i = len; i-- > from;)
{
- tree t = VEC_index (tree, cache->nodes, i);
+ tree t = cache->nodes[i];
if (t == NULL_TREE)
continue;
@@ -1127,7 +1124,8 @@ lto_input_toplevel_asms (struct lto_file_decl_data *file_data, int order_base)
header->main_size);
data_in = lto_data_in_create (file_data, data + string_offset,
- header->string_size, NULL);
+ header->string_size,
+ vec<ld_plugin_symbol_resolution_t>());
/* Make sure the file was generated by the exact same compiler. */
lto_check_version (header->lto_header.major_version,
@@ -1166,7 +1164,7 @@ lto_reader_init (void)
struct data_in *
lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
unsigned len,
- VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
+ vec<ld_plugin_symbol_resolution_t> resolutions)
{
struct data_in *data_in = XCNEW (struct data_in);
data_in->file_data = file_data;
@@ -1184,7 +1182,7 @@ lto_data_in_create (struct lto_file_decl_data *file_data, const char *strings,
void
lto_data_in_delete (struct data_in *data_in)
{
- VEC_free (ld_plugin_symbol_resolution_t, heap, data_in->globals_resolution);
+ data_in->globals_resolution.release ();
streamer_tree_cache_delete (data_in->reader_cache);
free (data_in->labels);
free (data_in);
diff --git a/gcc/lto-streamer-out.c b/gcc/lto-streamer-out.c
index 6b689ee5ba1..0bddb3dfcbb 100644
--- a/gcc/lto-streamer-out.c
+++ b/gcc/lto-streamer-out.c
@@ -543,35 +543,33 @@ output_eh_regions (struct output_block *ob, struct function *fn)
streamer_write_hwi (ob, fn->eh->region_tree->index);
/* Emit all the EH regions in the region array. */
- streamer_write_hwi (ob, VEC_length (eh_region, fn->eh->region_array));
- FOR_EACH_VEC_ELT (eh_region, fn->eh->region_array, i, eh)
+ streamer_write_hwi (ob, vec_safe_length (fn->eh->region_array));
+ FOR_EACH_VEC_SAFE_ELT (fn->eh->region_array, i, eh)
output_eh_region (ob, eh);
/* Emit all landing pads. */
- streamer_write_hwi (ob, VEC_length (eh_landing_pad, fn->eh->lp_array));
- FOR_EACH_VEC_ELT (eh_landing_pad, fn->eh->lp_array, i, lp)
+ streamer_write_hwi (ob, vec_safe_length (fn->eh->lp_array));
+ FOR_EACH_VEC_SAFE_ELT (fn->eh->lp_array, i, lp)
output_eh_lp (ob, lp);
/* Emit all the runtime type data. */
- streamer_write_hwi (ob, VEC_length (tree, fn->eh->ttype_data));
- FOR_EACH_VEC_ELT (tree, fn->eh->ttype_data, i, ttype)
+ streamer_write_hwi (ob, vec_safe_length (fn->eh->ttype_data));
+ FOR_EACH_VEC_SAFE_ELT (fn->eh->ttype_data, i, ttype)
stream_write_tree (ob, ttype, true);
/* Emit the table of action chains. */
if (targetm.arm_eabi_unwinder)
{
tree t;
- streamer_write_hwi (ob, VEC_length (tree,
- fn->eh->ehspec_data.arm_eabi));
- FOR_EACH_VEC_ELT (tree, fn->eh->ehspec_data.arm_eabi, i, t)
+ streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.arm_eabi));
+ FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.arm_eabi, i, t)
stream_write_tree (ob, t, true);
}
else
{
uchar c;
- streamer_write_hwi (ob, VEC_length (uchar,
- fn->eh->ehspec_data.other));
- FOR_EACH_VEC_ELT (uchar, fn->eh->ehspec_data.other, i, c)
+ streamer_write_hwi (ob, vec_safe_length (fn->eh->ehspec_data.other));
+ FOR_EACH_VEC_SAFE_ELT (fn->eh->ehspec_data.other, i, c)
streamer_write_char_stream (ob->main_stream, c);
}
}
@@ -589,12 +587,12 @@ output_ssa_names (struct output_block *ob, struct function *fn)
{
unsigned int i, len;
- len = VEC_length (tree, SSANAMES (fn));
+ len = vec_safe_length (SSANAMES (fn));
streamer_write_uhwi (ob, len);
for (i = 1; i < len; i++)
{
- tree ptr = VEC_index (tree, SSANAMES (fn), i);
+ tree ptr = (*SSANAMES (fn))[i];
if (ptr == NULL_TREE
|| SSA_NAME_IN_FREE_LIST (ptr)
@@ -730,8 +728,8 @@ output_struct_function_base (struct output_block *ob, struct function *fn)
stream_write_tree (ob, fn->nonlocal_goto_save_area, true);
/* Output all the local variables in the function. */
- streamer_write_hwi (ob, VEC_length (tree, fn->local_decls));
- FOR_EACH_VEC_ELT (tree, fn->local_decls, i, t)
+ streamer_write_hwi (ob, vec_safe_length (fn->local_decls));
+ FOR_EACH_VEC_SAFE_ELT (fn->local_decls, i, t)
stream_write_tree (ob, t, true);
/* Output current IL state of the function. */
@@ -940,7 +938,7 @@ copy_function (struct cgraph_node *node)
must be empty where we reach here. */
gcc_assert (lto_tree_ref_encoder_size (encoder) == 0);
for (j = 0; j < n; j++)
- VEC_safe_push (tree, heap, encoder->trees, trees[j]);
+ encoder->trees.safe_push (trees[j]);
encoder->next_index = n;
}
@@ -1342,12 +1340,12 @@ produce_asm_for_decls (void)
/* Write the global symbols. */
out_state = lto_get_out_decl_state ();
- num_fns = VEC_length (lto_out_decl_state_ptr, lto_function_decl_states);
+ num_fns = lto_function_decl_states.length ();
lto_output_decl_state_streams (ob, out_state);
for (idx = 0; idx < num_fns; idx++)
{
fn_out_state =
- VEC_index (lto_out_decl_state_ptr, lto_function_decl_states, idx);
+ lto_function_decl_states[idx];
lto_output_decl_state_streams (ob, fn_out_state);
}
@@ -1364,7 +1362,7 @@ produce_asm_for_decls (void)
for (idx = 0; idx < num_fns; idx++)
{
fn_out_state =
- VEC_index (lto_out_decl_state_ptr, lto_function_decl_states, idx);
+ lto_function_decl_states[idx];
decl_state_size += lto_out_decl_state_written_size (fn_out_state);
}
header.decl_state_size = decl_state_size;
@@ -1387,7 +1385,7 @@ produce_asm_for_decls (void)
for (idx = 0; idx < num_fns; idx++)
{
fn_out_state =
- VEC_index (lto_out_decl_state_ptr, lto_function_decl_states, idx);
+ lto_function_decl_states[idx];
lto_output_decl_state_refs (ob, decl_state_stream, fn_out_state);
}
lto_write_stream (decl_state_stream);
@@ -1410,12 +1408,11 @@ produce_asm_for_decls (void)
for (idx = 0; idx < num_fns; idx++)
{
fn_out_state =
- VEC_index (lto_out_decl_state_ptr, lto_function_decl_states, idx);
+ lto_function_decl_states[idx];
lto_delete_out_decl_state (fn_out_state);
}
lto_symtab_encoder_delete (ob->decl_state->symtab_node_encoder);
- VEC_free (lto_out_decl_state_ptr, heap, lto_function_decl_states);
- lto_function_decl_states = NULL;
+ lto_function_decl_states.release ();
destroy_output_block (ob);
}
diff --git a/gcc/lto-streamer.h b/gcc/lto-streamer.h
index 4ad2bc98c85..befebcabd8e 100644
--- a/gcc/lto-streamer.h
+++ b/gcc/lto-streamer.h
@@ -29,7 +29,6 @@ along with GCC; see the file COPYING3. If not see
#include "target.h"
#include "cgraph.h"
#include "vec.h"
-#include "vecprim.h"
#include "alloc-pool.h"
#include "gcov-io.h"
#include "diagnostic.h"
@@ -266,8 +265,6 @@ typedef enum
} lto_decl_stream_e_t;
typedef enum ld_plugin_symbol_resolution ld_plugin_symbol_resolution_t;
-DEF_VEC_I(ld_plugin_symbol_resolution_t);
-DEF_VEC_ALLOC_I(ld_plugin_symbol_resolution_t, heap);
/* Macro to define convenience functions for type and decl streams
@@ -439,13 +436,11 @@ typedef struct
unsigned int initializer:1;
} lto_encoder_entry;
-DEF_VEC_O(lto_encoder_entry);
-DEF_VEC_ALLOC_O(lto_encoder_entry, heap);
/* Encoder data structure used to stream callgraph nodes. */
struct lto_symtab_encoder_d
{
- VEC(lto_encoder_entry,heap) *nodes;
+ vec<lto_encoder_entry> nodes;
pointer_map_t *map;
};
@@ -486,7 +481,7 @@ struct lto_tree_ref_encoder
{
htab_t tree_hash_table; /* Maps pointers to indices. */
unsigned int next_index; /* Next available index. */
- VEC(tree,heap) *trees; /* Maps indices to pointers. */
+ vec<tree> trees; /* Maps indices to pointers. */
};
@@ -522,8 +517,6 @@ struct lto_out_decl_state
typedef struct lto_out_decl_state *lto_out_decl_state_ptr;
-DEF_VEC_P(lto_out_decl_state_ptr);
-DEF_VEC_ALLOC_P(lto_out_decl_state_ptr, heap);
/* Compact representation of a index <-> resolution pair. Unpacked to an
vector later. */
@@ -534,8 +527,6 @@ struct res_pair
};
typedef struct res_pair res_pair;
-DEF_VEC_O(res_pair);
-DEF_VEC_ALLOC_O(res_pair, heap);
/* One of these is allocated for each object file that being compiled
by lto. This structure contains the tables that are needed by the
@@ -572,7 +563,7 @@ struct GTY(()) lto_file_decl_data
unsigned HOST_WIDE_INT id;
/* Symbol resolutions for this file */
- VEC(res_pair, heap) * GTY((skip)) respairs;
+ vec<res_pair> GTY((skip)) respairs;
unsigned max_index;
struct gcov_ctr_summary GTY((skip)) profile_info;
@@ -705,7 +696,7 @@ struct data_in
int current_col;
/* Maps each reference number to the resolution done by the linker. */
- VEC(ld_plugin_symbol_resolution_t,heap) *globals_resolution;
+ vec<ld_plugin_symbol_resolution_t> globals_resolution;
/* Cache of pickled nodes. */
struct streamer_tree_cache_d *reader_cache;
@@ -810,7 +801,7 @@ extern void lto_input_constructors_and_inits (struct lto_file_decl_data *,
extern void lto_input_toplevel_asms (struct lto_file_decl_data *, int);
extern struct data_in *lto_data_in_create (struct lto_file_decl_data *,
const char *, unsigned,
- VEC(ld_plugin_symbol_resolution_t,heap) *);
+ vec<ld_plugin_symbol_resolution_t> );
extern void lto_data_in_delete (struct data_in *);
extern void lto_input_data_block (struct lto_input_block *, void *, size_t);
location_t lto_input_location (struct bitpack_d *, struct data_in *);
@@ -867,7 +858,7 @@ lto_symtab_encoder_t compute_ltrans_boundary (lto_symtab_encoder_t encoder);
extern void lto_symtab_merge_decls (void);
extern void lto_symtab_merge_cgraph_nodes (void);
extern tree lto_symtab_prevailing_decl (tree decl);
-extern GTY(()) VEC(tree,gc) *lto_global_var_decls;
+extern GTY(()) vec<tree, va_gc> *lto_global_var_decls;
/* In lto-opts.c. */
@@ -882,7 +873,7 @@ extern const char *lto_section_name[];
/* Holds all the out decl states of functions output so far in the
current output file. */
-extern VEC(lto_out_decl_state_ptr, heap) *lto_function_decl_states;
+extern vec<lto_out_decl_state_ptr> lto_function_decl_states;
/* Return true if LTO tag TAG corresponds to a tree code. */
static inline bool
@@ -967,7 +958,7 @@ lto_init_tree_ref_encoder (struct lto_tree_ref_encoder *encoder,
{
encoder->tree_hash_table = htab_create (37, hash_fn, eq_fn, free);
encoder->next_index = 0;
- encoder->trees = NULL;
+ encoder->trees.create (0);
}
@@ -979,14 +970,14 @@ lto_destroy_tree_ref_encoder (struct lto_tree_ref_encoder *encoder)
/* Hash table may be delete already. */
if (encoder->tree_hash_table)
htab_delete (encoder->tree_hash_table);
- VEC_free (tree, heap, encoder->trees);
+ encoder->trees.release ();
}
/* Return the number of trees encoded in ENCODER. */
static inline unsigned int
lto_tree_ref_encoder_size (struct lto_tree_ref_encoder *encoder)
{
- return VEC_length (tree, encoder->trees);
+ return encoder->trees.length ();
}
/* Return the IDX-th tree in ENCODER. */
@@ -994,7 +985,7 @@ static inline tree
lto_tree_ref_encoder_get_tree (struct lto_tree_ref_encoder *encoder,
unsigned int idx)
{
- return VEC_index (tree, encoder->trees, idx);
+ return encoder->trees[idx];
}
@@ -1009,7 +1000,7 @@ emit_label_in_global_context_p (tree label)
static inline int
lto_symtab_encoder_size (lto_symtab_encoder_t encoder)
{
- return VEC_length (lto_encoder_entry, encoder->nodes);
+ return encoder->nodes.length ();
}
/* Value used to represent failure of lto_symtab_encoder_lookup. */
@@ -1044,24 +1035,21 @@ lsei_next (lto_symtab_encoder_iterator *lsei)
static inline symtab_node
lsei_node (lto_symtab_encoder_iterator lsei)
{
- return VEC_index (lto_encoder_entry,
- lsei.encoder->nodes, lsei.index).node;
+ return lsei.encoder->nodes[lsei.index].node;
}
/* Return the node pointed to by LSI. */
static inline struct cgraph_node *
lsei_cgraph_node (lto_symtab_encoder_iterator lsei)
{
- return cgraph (VEC_index (lto_encoder_entry,
- lsei.encoder->nodes, lsei.index).node);
+ return cgraph (lsei.encoder->nodes[lsei.index].node);
}
/* Return the node pointed to by LSI. */
static inline struct varpool_node *
lsei_varpool_node (lto_symtab_encoder_iterator lsei)
{
- return varpool (VEC_index (lto_encoder_entry,
- lsei.encoder->nodes, lsei.index).node);
+ return varpool (lsei.encoder->nodes[lsei.index].node);
}
/* Return the cgraph node corresponding to REF using ENCODER. */
@@ -1072,7 +1060,7 @@ lto_symtab_encoder_deref (lto_symtab_encoder_t encoder, int ref)
if (ref == LCC_NOT_FOUND)
return NULL;
- return VEC_index (lto_encoder_entry, encoder->nodes, ref).node;
+ return encoder->nodes[ref].node;
}
/* Return an iterator to the first node in LSI. */
diff --git a/gcc/lto-symtab.c b/gcc/lto-symtab.c
index 737c38b0f39..bee26f1d944 100644
--- a/gcc/lto-symtab.c
+++ b/gcc/lto-symtab.c
@@ -30,7 +30,7 @@ along with GCC; see the file COPYING3. If not see
#include "lto-streamer.h"
/* Vector to keep track of external variables we've seen so far. */
-VEC(tree,gc) *lto_global_var_decls;
+vec<tree, va_gc> *lto_global_var_decls;
/* Replace the cgraph node NODE with PREVAILING_NODE in the cgraph, merging
all edges and removing the old node. */
@@ -353,7 +353,7 @@ static void
lto_symtab_merge_decls_2 (symtab_node first, bool diagnosed_p)
{
symtab_node prevailing, e;
- VEC(tree, heap) *mismatches = NULL;
+ vec<tree> mismatches = vec<tree>();
unsigned i;
tree decl;
@@ -368,13 +368,13 @@ lto_symtab_merge_decls_2 (symtab_node first, bool diagnosed_p)
{
if (!lto_symtab_merge (prevailing, e)
&& !diagnosed_p)
- VEC_safe_push (tree, heap, mismatches, e->symbol.decl);
+ mismatches.safe_push (e->symbol.decl);
}
- if (VEC_empty (tree, mismatches))
+ if (mismatches.is_empty ())
return;
/* Diagnose all mismatched re-declarations. */
- FOR_EACH_VEC_ELT (tree, mismatches, i, decl)
+ FOR_EACH_VEC_ELT (mismatches, i, decl)
{
if (!types_compatible_p (TREE_TYPE (prevailing->symbol.decl),
TREE_TYPE (decl)))
@@ -395,7 +395,7 @@ lto_symtab_merge_decls_2 (symtab_node first, bool diagnosed_p)
inform (DECL_SOURCE_LOCATION (prevailing->symbol.decl),
"previously declared here");
- VEC_free (tree, heap, mismatches);
+ mismatches.release ();
}
/* Helper to process the decl chain for the symbol table entry *SLOT. */
@@ -445,8 +445,7 @@ lto_symtab_merge_decls_1 (symtab_node first)
/* Record the prevailing variable. */
if (TREE_CODE (prevailing->symbol.decl) == VAR_DECL)
- VEC_safe_push (tree, gc, lto_global_var_decls,
- prevailing->symbol.decl);
+ vec_safe_push (lto_global_var_decls, prevailing->symbol.decl);
/* Diagnose mismatched objects. */
for (e = prevailing->symbol.next_sharing_asm_name;
diff --git a/gcc/lto/ChangeLog b/gcc/lto/ChangeLog
index 19b9c1914d5..841d2500561 100644
--- a/gcc/lto/ChangeLog
+++ b/gcc/lto/ChangeLog
@@ -1,3 +1,12 @@
+2012-11-16 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * lto-lang.c: Use new vec API in vec.h.
+ * lto-partition.c: Likewise.
+ * lto-partition.h: Likewise.
+ * lto.c: Likewise.
+
2012-10-31 Lawrence Crowl <crowl@google.com>
* lto.c (lto_wpa_write_files): Change symtab checking to a checked
diff --git a/gcc/lto/lto-lang.c b/gcc/lto/lto-lang.c
index edfab74de0a..04664371e20 100644
--- a/gcc/lto/lto-lang.c
+++ b/gcc/lto/lto-lang.c
@@ -1061,11 +1061,11 @@ lto_getdecls (void)
static void
lto_write_globals (void)
{
- tree *vec = VEC_address (tree, lto_global_var_decls);
- int len = VEC_length (tree, lto_global_var_decls);
+ tree *vec = lto_global_var_decls->address ();
+ int len = lto_global_var_decls->length ();
wrapup_global_declarations (vec, len);
emit_debug_global_declarations (vec, len);
- VEC_free (tree, gc, lto_global_var_decls);
+ vec_free (lto_global_var_decls);
}
static tree
@@ -1235,7 +1235,7 @@ lto_init (void)
lto_register_canonical_types (global_trees[i]);
/* Initialize LTO-specific data structures. */
- lto_global_var_decls = VEC_alloc (tree, gc, 256);
+ vec_alloc (lto_global_var_decls, 256);
in_lto_p = true;
return true;
diff --git a/gcc/lto/lto-partition.c b/gcc/lto/lto-partition.c
index a642a6c5f17..363d2ffbee5 100644
--- a/gcc/lto/lto-partition.c
+++ b/gcc/lto/lto-partition.c
@@ -44,7 +44,7 @@ enum symbol_class
SYMBOL_DUPLICATE
};
-VEC(ltrans_partition, heap) *ltrans_partitions;
+vec<ltrans_partition> ltrans_partitions;
static void add_symbol_to_partition (ltrans_partition part, symtab_node node);
@@ -102,7 +102,7 @@ new_partition (const char *name)
part->encoder = lto_symtab_encoder_new (false);
part->name = name;
part->insns = 0;
- VEC_safe_push (ltrans_partition, heap, ltrans_partitions, part);
+ ltrans_partitions.safe_push (part);
return part;
}
@@ -113,14 +113,14 @@ free_ltrans_partitions (void)
{
unsigned int idx;
ltrans_partition part;
- for (idx = 0; VEC_iterate (ltrans_partition, ltrans_partitions, idx, part); idx++)
+ for (idx = 0; ltrans_partitions.iterate (idx, &part); idx++)
{
if (part->initializers_visited)
pointer_set_destroy (part->initializers_visited);
/* Symtab encoder is freed after streaming. */
free (part);
}
- VEC_free (ltrans_partition, heap, ltrans_partitions);
+ ltrans_partitions.release ();
}
/* Return true if symbol is already in some partition. */
@@ -344,9 +344,8 @@ lto_1_to_1_map (void)
npartitions++;
}
}
- else if (!file_data
- && VEC_length (ltrans_partition, ltrans_partitions))
- partition = VEC_index (ltrans_partition, ltrans_partitions, 0);
+ else if (!file_data && ltrans_partitions.length ())
+ partition = ltrans_partitions[0];
else
{
partition = new_partition ("");
@@ -790,11 +789,11 @@ lto_promote_cross_file_statics (void)
gcc_assert (flag_wpa);
/* First compute boundaries. */
- n_sets = VEC_length (ltrans_partition, ltrans_partitions);
+ n_sets = ltrans_partitions.length ();
for (i = 0; i < n_sets; i++)
{
ltrans_partition part
- = VEC_index (ltrans_partition, ltrans_partitions, i);
+ = ltrans_partitions[i];
part->encoder = compute_ltrans_boundary (part->encoder);
}
@@ -804,7 +803,7 @@ lto_promote_cross_file_statics (void)
lto_symtab_encoder_iterator lsei;
lto_symtab_encoder_t encoder;
ltrans_partition part
- = VEC_index (ltrans_partition, ltrans_partitions, i);
+ = ltrans_partitions[i];
encoder = part->encoder;
for (lsei = lsei_start (encoder); !lsei_end_p (lsei);
diff --git a/gcc/lto/lto-partition.h b/gcc/lto/lto-partition.h
index 5bf4055269f..86b21a0d347 100644
--- a/gcc/lto/lto-partition.h
+++ b/gcc/lto/lto-partition.h
@@ -29,10 +29,8 @@ struct ltrans_partition_def
};
typedef struct ltrans_partition_def *ltrans_partition;
-DEF_VEC_P(ltrans_partition);
-DEF_VEC_ALLOC_P(ltrans_partition,heap);
-extern VEC(ltrans_partition, heap) *ltrans_partitions;
+extern vec<ltrans_partition> ltrans_partitions;
void lto_1_to_1_map (void);
void lto_max_map (void);
diff --git a/gcc/lto/lto.c b/gcc/lto/lto.c
index 857e8f6032b..da55b7efbbe 100644
--- a/gcc/lto/lto.c
+++ b/gcc/lto/lto.c
@@ -305,8 +305,6 @@ struct type_pair_d
signed char same_p;
};
typedef struct type_pair_d *type_pair_t;
-DEF_VEC_P(type_pair_t);
-DEF_VEC_ALLOC_P(type_pair_t,heap);
#define GIMPLE_TYPE_PAIR_SIZE 16381
struct type_pair_d *type_pair_cache;
@@ -432,7 +430,7 @@ compare_type_names_p (tree t1, tree t2)
static bool
gimple_types_compatible_p_1 (tree, tree, type_pair_t,
- VEC(type_pair_t, heap) **,
+ vec<type_pair_t> *,
struct pointer_map_t *, struct obstack *);
/* DFS visit the edge from the callers type pair with state *STATE to
@@ -444,7 +442,7 @@ gimple_types_compatible_p_1 (tree, tree, type_pair_t,
static bool
gtc_visit (tree t1, tree t2,
struct sccs *state,
- VEC(type_pair_t, heap) **sccstack,
+ vec<type_pair_t> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
@@ -558,7 +556,7 @@ gtc_visit (tree t1, tree t2,
static bool
gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
- VEC(type_pair_t, heap) **sccstack,
+ vec<type_pair_t> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
@@ -569,7 +567,7 @@ gimple_types_compatible_p_1 (tree t1, tree t2, type_pair_t p,
state = XOBNEW (sccstate_obstack, struct sccs);
*pointer_map_insert (sccstate, p) = state;
- VEC_safe_push (type_pair_t, heap, *sccstack, p);
+ sccstack->safe_push (p);
state->dfsnum = gtc_next_dfs_num++;
state->low = state->dfsnum;
state->on_sccstack = true;
@@ -857,7 +855,7 @@ pop:
do
{
struct sccs *cstate;
- x = VEC_pop (type_pair_t, *sccstack);
+ x = sccstack->pop ();
cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
cstate->on_sccstack = false;
x->same_p = state->u.same_p;
@@ -875,7 +873,7 @@ pop:
static bool
gimple_types_compatible_p (tree t1, tree t2)
{
- VEC(type_pair_t, heap) *sccstack = NULL;
+ vec<type_pair_t> sccstack = vec<type_pair_t>();
struct pointer_map_t *sccstate;
struct obstack sccstate_obstack;
type_pair_t p = NULL;
@@ -970,7 +968,7 @@ gimple_types_compatible_p (tree t1, tree t2)
gcc_obstack_init (&sccstate_obstack);
res = gimple_types_compatible_p_1 (t1, t2, p,
&sccstack, sccstate, &sccstate_obstack);
- VEC_free (type_pair_t, heap, sccstack);
+ sccstack.release ();
pointer_map_destroy (sccstate);
obstack_free (&sccstate_obstack, NULL);
@@ -978,7 +976,7 @@ gimple_types_compatible_p (tree t1, tree t2)
}
static hashval_t
-iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
+iterative_hash_gimple_type (tree, hashval_t, vec<tree> *,
struct pointer_map_t *, struct obstack *);
/* DFS visit the edge from the callers type with state *STATE to T.
@@ -988,7 +986,7 @@ iterative_hash_gimple_type (tree, hashval_t, VEC(tree, heap) **,
static hashval_t
visit (tree t, struct sccs *state, hashval_t v,
- VEC (tree, heap) **sccstack,
+ vec<tree> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
@@ -1081,7 +1079,7 @@ type_hash_pair_compare (const void *p1_, const void *p2_)
static hashval_t
iterative_hash_gimple_type (tree type, hashval_t val,
- VEC(tree, heap) **sccstack,
+ vec<tree> *sccstack,
struct pointer_map_t *sccstate,
struct obstack *sccstate_obstack)
{
@@ -1094,7 +1092,7 @@ iterative_hash_gimple_type (tree type, hashval_t val,
state = XOBNEW (sccstate_obstack, struct sccs);
*pointer_map_insert (sccstate, type) = state;
- VEC_safe_push (tree, heap, *sccstack, type);
+ sccstack->safe_push (type);
state->dfsnum = next_dfs_num++;
state->low = state->dfsnum;
state->on_sccstack = true;
@@ -1216,7 +1214,7 @@ iterative_hash_gimple_type (tree type, hashval_t val,
struct tree_int_map *m;
/* Pop off the SCC and set its hash values. */
- x = VEC_pop (tree, *sccstack);
+ x = sccstack->pop ();
/* Optimize SCC size one. */
if (x == type)
{
@@ -1234,10 +1232,10 @@ iterative_hash_gimple_type (tree type, hashval_t val,
unsigned first, i, size, j;
struct type_hash_pair *pairs;
/* Pop off the SCC and build an array of type, hash pairs. */
- first = VEC_length (tree, *sccstack) - 1;
- while (VEC_index (tree, *sccstack, first) != type)
+ first = sccstack->length () - 1;
+ while ((*sccstack)[first] != type)
--first;
- size = VEC_length (tree, *sccstack) - first + 1;
+ size = sccstack->length () - first + 1;
pairs = XALLOCAVEC (struct type_hash_pair, size);
i = 0;
cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
@@ -1246,7 +1244,7 @@ iterative_hash_gimple_type (tree type, hashval_t val,
pairs[i].hash = cstate->u.hash;
do
{
- x = VEC_pop (tree, *sccstack);
+ x = sccstack->pop ();
cstate = (struct sccs *)*pointer_map_contains (sccstate, x);
cstate->on_sccstack = false;
++i;
@@ -1300,7 +1298,7 @@ static hashval_t
gimple_type_hash (const void *p)
{
const_tree t = (const_tree) p;
- VEC(tree, heap) *sccstack = NULL;
+ vec<tree> sccstack = vec<tree>();
struct pointer_map_t *sccstate;
struct obstack sccstate_obstack;
hashval_t val;
@@ -1318,7 +1316,7 @@ gimple_type_hash (const void *p)
gcc_obstack_init (&sccstate_obstack);
val = iterative_hash_gimple_type (CONST_CAST_TREE (t), 0,
&sccstack, sccstate, &sccstate_obstack);
- VEC_free (tree, heap, sccstack);
+ sccstack.release ();
pointer_map_destroy (sccstate);
obstack_free (&sccstate_obstack, NULL);
@@ -1581,13 +1579,13 @@ lto_ft_binfo (tree t)
LTO_FIXUP_TREE (BINFO_OFFSET (t));
LTO_FIXUP_TREE (BINFO_VIRTUALS (t));
LTO_FIXUP_TREE (BINFO_VPTR_FIELD (t));
- n = VEC_length (tree, BINFO_BASE_ACCESSES (t));
+ n = vec_safe_length (BINFO_BASE_ACCESSES (t));
for (i = 0; i < n; i++)
{
saved_base = base = BINFO_BASE_ACCESS (t, i);
LTO_FIXUP_TREE (base);
if (base != saved_base)
- VEC_replace (tree, BINFO_BASE_ACCESSES (t), i, base);
+ (*BINFO_BASE_ACCESSES (t))[i] = base;
}
LTO_FIXUP_TREE (BINFO_INHERITANCE_CHAIN (t));
LTO_FIXUP_TREE (BINFO_SUBVTT_INDEX (t));
@@ -1598,7 +1596,7 @@ lto_ft_binfo (tree t)
saved_base = base = BINFO_BASE_BINFO (t, i);
LTO_FIXUP_TREE (base);
if (base != saved_base)
- VEC_replace (tree, BINFO_BASE_BINFOS (t), i, base);
+ (*BINFO_BASE_BINFOS (t))[i] = base;
}
}
@@ -1612,9 +1610,7 @@ lto_ft_constructor (tree t)
lto_ft_typed (t);
- for (idx = 0;
- VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (t), idx, ce);
- idx++)
+ for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (t), idx, &ce); idx++)
{
LTO_FIXUP_TREE (ce->index);
LTO_FIXUP_TREE (ce->value);
@@ -1713,18 +1709,15 @@ lto_fixup_types (tree t)
static enum ld_plugin_symbol_resolution
get_resolution (struct data_in *data_in, unsigned index)
{
- if (data_in->globals_resolution)
+ if (data_in->globals_resolution.exists ())
{
ld_plugin_symbol_resolution_t ret;
/* We can have references to not emitted functions in
DECL_FUNCTION_PERSONALITY at least. So we can and have
to indeed return LDPR_UNKNOWN in some cases. */
- if (VEC_length (ld_plugin_symbol_resolution_t,
- data_in->globals_resolution) <= index)
+ if (data_in->globals_resolution.length () <= index)
return LDPR_UNKNOWN;
- ret = VEC_index (ld_plugin_symbol_resolution_t,
- data_in->globals_resolution,
- index);
+ ret = data_in->globals_resolution[index];
return ret;
}
else
@@ -1773,7 +1766,7 @@ lto_register_var_decl_in_symtab (struct data_in *data_in, tree decl)
ASM_FORMAT_PRIVATE_NAME (label, name, DECL_UID (decl));
SET_DECL_ASSEMBLER_NAME (decl, get_identifier (label));
rest_of_decl_compilation (decl, 1, 0);
- VEC_safe_push (tree, gc, lto_global_var_decls, decl);
+ vec_safe_push (lto_global_var_decls, decl);
}
/* If this variable has already been declared, queue the
@@ -1855,7 +1848,7 @@ static void
uniquify_nodes (struct data_in *data_in, unsigned from)
{
struct streamer_tree_cache_d *cache = data_in->reader_cache;
- unsigned len = VEC_length (tree, cache->nodes);
+ unsigned len = cache->nodes.length ();
unsigned i;
/* Go backwards because children streamed for the first time come
@@ -1866,7 +1859,7 @@ uniquify_nodes (struct data_in *data_in, unsigned from)
them and computing hashes. */
for (i = len; i-- > from;)
{
- tree t = VEC_index (tree, cache->nodes, i);
+ tree t = cache->nodes[i];
if (t && TYPE_P (t))
{
tree newt = gimple_register_type (t);
@@ -1881,7 +1874,7 @@ uniquify_nodes (struct data_in *data_in, unsigned from)
/* Second fixup all trees in the new cache entries. */
for (i = len; i-- > from;)
{
- tree t = VEC_index (tree, cache->nodes, i);
+ tree t = cache->nodes[i];
tree oldt = t;
if (!t)
continue;
@@ -2042,7 +2035,7 @@ uniquify_nodes (struct data_in *data_in, unsigned from)
make sure it is done last. */
for (i = len; i-- > from;)
{
- tree t = VEC_index (tree, cache->nodes, i);
+ tree t = cache->nodes[i];
if (t == NULL_TREE)
continue;
@@ -2065,7 +2058,7 @@ uniquify_nodes (struct data_in *data_in, unsigned from)
static void
lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
- VEC(ld_plugin_symbol_resolution_t,heap) *resolutions)
+ vec<ld_plugin_symbol_resolution_t> resolutions)
{
const struct lto_decl_header *header = (const struct lto_decl_header *) data;
const int decl_offset = sizeof (struct lto_decl_header);
@@ -2090,7 +2083,7 @@ lto_read_decls (struct lto_file_decl_data *decl_data, const void *data,
while (ib_main.p < ib_main.len)
{
tree t;
- unsigned from = VEC_length (tree, data_in->reader_cache->nodes);
+ unsigned from = data_in->reader_cache->nodes.length ();
t = stream_read_tree (&ib_main, data_in);
gcc_assert (t && ib_main.p <= ib_main.len);
uniquify_nodes (data_in, from);
@@ -2242,7 +2235,7 @@ lto_resolution_read (splay_tree file_ids, FILE *resolution, lto_file *file)
format that is only unpacked later when the subfile is processed. */
rp.res = r;
rp.index = index;
- VEC_safe_push (res_pair, heap, file_data->respairs, rp);
+ file_data->respairs.safe_push (rp);
if (file_data->max_index < index)
file_data->max_index = index;
}
@@ -2324,18 +2317,17 @@ lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
{
const char *data;
size_t len;
- VEC(ld_plugin_symbol_resolution_t,heap) *resolutions = NULL;
+ vec<ld_plugin_symbol_resolution_t>
+ resolutions = vec<ld_plugin_symbol_resolution_t>();
int i;
res_pair *rp;
/* Create vector for fast access of resolution. We do this lazily
to save memory. */
- VEC_safe_grow_cleared (ld_plugin_symbol_resolution_t, heap,
- resolutions,
- file_data->max_index + 1);
- for (i = 0; VEC_iterate (res_pair, file_data->respairs, i, rp); i++)
- VEC_replace (ld_plugin_symbol_resolution_t, resolutions, rp->index, rp->res);
- VEC_free (res_pair, heap, file_data->respairs);
+ resolutions.safe_grow_cleared (file_data->max_index + 1);
+ for (i = 0; file_data->respairs.iterate (i, &rp); i++)
+ resolutions[rp->index] = rp->res;
+ file_data->respairs.release ();
file_data->renaming_hash_table = lto_create_renaming_table ();
file_data->file_name = file->filename;
@@ -2353,7 +2345,7 @@ lto_file_finalize (struct lto_file_decl_data *file_data, lto_file *file)
/* Finalize FILE_DATA in FILE and increase COUNT. */
static int
-lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
+lto_create_files_from_ids (lto_file *file, struct lto_file_decl_data *file_data,
int *count)
{
lto_file_finalize (file_data, file);
@@ -2608,7 +2600,7 @@ lto_wpa_write_files (void)
timevar_push (TV_WHOPR_WPA);
- FOR_EACH_VEC_ELT (ltrans_partition, ltrans_partitions, i, part)
+ FOR_EACH_VEC_ELT (ltrans_partitions, i, part)
lto_stats.num_output_symtab_nodes += lto_symtab_encoder_size (part->encoder);
/* Find out statics that need to be promoted
@@ -2630,17 +2622,18 @@ lto_wpa_write_files (void)
temp_filename[blen - sizeof (".out") + 1] = '\0';
blen = strlen (temp_filename);
- n_sets = VEC_length (ltrans_partition, ltrans_partitions);
+ n_sets = ltrans_partitions.length ();
/* Sort partitions by size so small ones are compiled last.
FIXME: Even when not reordering we may want to output one list for parallel make
and other for final link command. */
- VEC_qsort (ltrans_partition, ltrans_partitions,
- flag_toplevel_reorder ? cmp_partitions_size : cmp_partitions_order);
+ ltrans_partitions.qsort (flag_toplevel_reorder
+ ? cmp_partitions_size
+ : cmp_partitions_order);
for (i = 0; i < n_sets; i++)
{
size_t len;
- ltrans_partition part = VEC_index (ltrans_partition, ltrans_partitions, i);
+ ltrans_partition part = ltrans_partitions[i];
/* Write all the nodes in SET. */
sprintf (temp_filename + blen, "%u.o", i);
@@ -3091,9 +3084,7 @@ read_cgraph_and_symbols (unsigned nfiles, const char **fnames)
this field into ltrans compilation. */
if (flag_ltrans)
FOR_EACH_DEFINED_FUNCTION (node)
- VEC_safe_push (ipa_opt_pass, heap,
- node->ipa_transforms_to_apply,
- (ipa_opt_pass)&pass_ipa_inline);
+ node->ipa_transforms_to_apply.safe_push ((ipa_opt_pass)&pass_ipa_inline);
timevar_pop (TV_IPA_LTO_CGRAPH_MERGE);
@@ -3148,7 +3139,7 @@ materialize_cgraph (void)
set_cfun (NULL);
/* Inform the middle end about the global variables we have seen. */
- FOR_EACH_VEC_ELT (tree, lto_global_var_decls, i, decl)
+ FOR_EACH_VEC_ELT (*lto_global_var_decls, i, decl)
rest_of_decl_compilation (decl, 1, 0);
if (!quiet_flag)
@@ -3253,8 +3244,7 @@ do_whole_program_analysis (void)
FOR_EACH_SYMBOL (node)
node->symbol.aux = NULL;
- lto_stats.num_cgraph_partitions += VEC_length (ltrans_partition,
- ltrans_partitions);
+ lto_stats.num_cgraph_partitions += ltrans_partitions.length ();
timevar_pop (TV_WHOPR_PARTITIONING);
timevar_stop (TV_PHASE_OPT_GEN);
diff --git a/gcc/mcf.c b/gcc/mcf.c
index f5985c134ab..99454965fda 100644
--- a/gcc/mcf.c
+++ b/gcc/mcf.c
@@ -95,13 +95,11 @@ typedef struct fixup_edge_d
typedef fixup_edge_type *fixup_edge_p;
-DEF_VEC_P (fixup_edge_p);
-DEF_VEC_ALLOC_P (fixup_edge_p, heap);
/* Structure to represent a vertex in the fixup graph. */
typedef struct fixup_vertex_d
{
- VEC (fixup_edge_p, heap) *succ_edges;
+ vec<fixup_edge_p> succ_edges;
} fixup_vertex_type;
typedef fixup_vertex_type *fixup_vertex_p;
@@ -295,9 +293,9 @@ dump_fixup_graph (FILE *file, fixup_graph_type *fixup_graph, const char *msg)
{
pfvertex = fvertex_list + i;
fprintf (file, "vertex_list[%d]: %d succ fixup edges.\n",
- i, VEC_length (fixup_edge_p, pfvertex->succ_edges));
+ i, pfvertex->succ_edges.length ());
- for (j = 0; VEC_iterate (fixup_edge_p, pfvertex->succ_edges, j, pfedge);
+ for (j = 0; pfvertex->succ_edges.iterate (j, &pfedge);
j++)
{
/* Distinguish forward edges and backward edges in the residual flow
@@ -375,7 +373,7 @@ add_edge (fixup_graph_type *fixup_graph, int src, int dest, gcov_type cost)
fixup_graph->num_edges++;
if (dump_file)
dump_fixup_edge (dump_file, fixup_graph, curr_edge);
- VEC_safe_push (fixup_edge_p, heap, curr_vertex->succ_edges, curr_edge);
+ curr_vertex->succ_edges.safe_push (curr_edge);
return curr_edge;
}
@@ -424,7 +422,7 @@ find_fixup_edge (fixup_graph_type *fixup_graph, int src, int dest)
pfvertex = fixup_graph->vertex_list + src;
- for (j = 0; VEC_iterate (fixup_edge_p, pfvertex->succ_edges, j, pfedge);
+ for (j = 0; pfvertex->succ_edges.iterate (j, &pfedge);
j++)
if (pfedge->dest == dest)
return pfedge;
@@ -443,7 +441,7 @@ delete_fixup_graph (fixup_graph_type *fixup_graph)
fixup_vertex_p pfvertex = fixup_graph->vertex_list;
for (i = 0; i < fnum_vertices; i++, pfvertex++)
- VEC_free (fixup_edge_p, heap, pfvertex->succ_edges);
+ pfvertex->succ_edges.release ();
free (fixup_graph->vertex_list);
free (fixup_graph->edge_list);
@@ -990,7 +988,7 @@ find_augmenting_path (fixup_graph_type *fixup_graph,
u = dequeue (queue_list);
is_visited[u] = 1;
pfvertex = fvertex_list + u;
- for (i = 0; VEC_iterate (fixup_edge_p, pfvertex->succ_edges, i, pfedge);
+ for (i = 0; pfvertex->succ_edges.iterate (i, &pfedge);
i++)
{
int dest = pfedge->dest;
@@ -1365,7 +1363,7 @@ find_minimum_cost_flow (fixup_graph_type *fixup_graph)
/* Compute the sum of the edge counts in TO_EDGES. */
gcov_type
-sum_edge_counts (VEC (edge, gc) *to_edges)
+sum_edge_counts (vec<edge, va_gc> *to_edges)
{
gcov_type sum = 0;
edge e;
diff --git a/gcc/modulo-sched.c b/gcc/modulo-sched.c
index 2d7c9e932a4..04d297d579d 100644
--- a/gcc/modulo-sched.c
+++ b/gcc/modulo-sched.c
@@ -160,8 +160,6 @@ struct ps_reg_move_info
};
typedef struct ps_reg_move_info ps_reg_move_info;
-DEF_VEC_O (ps_reg_move_info);
-DEF_VEC_ALLOC_O (ps_reg_move_info, heap);
/* Holds the partial schedule as an array of II rows. Each entry of the
array points to a linked list of PS_INSNs, which represents the
@@ -176,7 +174,7 @@ struct partial_schedule
/* All the moves added for this partial schedule. Index X has
a ps_insn id of X + g->num_nodes. */
- VEC (ps_reg_move_info, heap) *reg_moves;
+ vec<ps_reg_move_info> reg_moves;
/* rows_length[i] holds the number of instructions in the row.
It is used only (as an optimization) to back off quickly from
@@ -229,7 +227,7 @@ static void remove_node_from_ps (partial_schedule_ptr, ps_insn_ptr);
#define NODE_ASAP(node) ((node)->aux.count)
-#define SCHED_PARAMS(x) (&VEC_index (node_sched_params, node_sched_param_vec, x))
+#define SCHED_PARAMS(x) (&node_sched_param_vec[x])
#define SCHED_TIME(x) (SCHED_PARAMS (x)->time)
#define SCHED_ROW(x) (SCHED_PARAMS (x)->row)
#define SCHED_STAGE(x) (SCHED_PARAMS (x)->stage)
@@ -249,8 +247,6 @@ typedef struct node_sched_params
} *node_sched_params_ptr;
typedef struct node_sched_params node_sched_params;
-DEF_VEC_O (node_sched_params);
-DEF_VEC_ALLOC_O (node_sched_params, heap);
/* The following three functions are copied from the current scheduler
code in order to use sched_analyze() for computing the dependencies.
@@ -305,7 +301,7 @@ static struct ps_reg_move_info *
ps_reg_move (partial_schedule_ptr ps, int id)
{
gcc_checking_assert (id >= ps->g->num_nodes);
- return &VEC_index (ps_reg_move_info, ps->reg_moves, id - ps->g->num_nodes);
+ return &ps->reg_moves[id - ps->g->num_nodes];
}
/* Return the rtl instruction that is being scheduled by partial schedule
@@ -443,24 +439,22 @@ res_MII (ddg_ptr g)
/* A vector that contains the sched data for each ps_insn. */
-static VEC (node_sched_params, heap) *node_sched_param_vec;
+static vec<node_sched_params> node_sched_param_vec;
/* Allocate sched_params for each node and initialize it. */
static void
set_node_sched_params (ddg_ptr g)
{
- VEC_truncate (node_sched_params, node_sched_param_vec, 0);
- VEC_safe_grow_cleared (node_sched_params, heap,
- node_sched_param_vec, g->num_nodes);
+ node_sched_param_vec.truncate (0);
+ node_sched_param_vec.safe_grow_cleared (g->num_nodes);
}
/* Make sure that node_sched_param_vec has an entry for every move in PS. */
static void
extend_node_sched_params (partial_schedule_ptr ps)
{
- VEC_safe_grow_cleared (node_sched_params, heap, node_sched_param_vec,
- ps->g->num_nodes + VEC_length (ps_reg_move_info,
- ps->reg_moves));
+ node_sched_param_vec.safe_grow_cleared (ps->g->num_nodes
+ + ps->reg_moves.length ());
}
/* Update the sched_params (time, row and stage) for node U using the II,
@@ -747,9 +741,8 @@ schedule_reg_moves (partial_schedule_ptr ps)
continue;
/* Create NREG_MOVES register moves. */
- first_move = VEC_length (ps_reg_move_info, ps->reg_moves);
- VEC_safe_grow_cleared (ps_reg_move_info, heap, ps->reg_moves,
- first_move + nreg_moves);
+ first_move = ps->reg_moves.length ();
+ ps->reg_moves.safe_grow_cleared (first_move + nreg_moves);
extend_node_sched_params (ps);
/* Record the moves associated with this node. */
@@ -824,7 +817,7 @@ apply_reg_moves (partial_schedule_ptr ps)
ps_reg_move_info *move;
int i;
- FOR_EACH_VEC_ELT (ps_reg_move_info, ps->reg_moves, i, move)
+ FOR_EACH_VEC_ELT (ps->reg_moves, i, move)
{
unsigned int i_use;
sbitmap_iterator sbi;
@@ -1758,7 +1751,7 @@ sms_schedule (void)
}
free_partial_schedule (ps);
- VEC_free (node_sched_params, heap, node_sched_param_vec);
+ node_sched_param_vec.release ();
free (node_order);
free_ddg (g);
}
@@ -2849,7 +2842,7 @@ create_partial_schedule (int ii, ddg_ptr g, int history)
partial_schedule_ptr ps = XNEW (struct partial_schedule);
ps->rows = (ps_insn_ptr *) xcalloc (ii, sizeof (ps_insn_ptr));
ps->rows_length = (int *) xcalloc (ii, sizeof (int));
- ps->reg_moves = NULL;
+ ps->reg_moves.create (0);
ps->ii = ii;
ps->history = history;
ps->min_cycle = INT_MAX;
@@ -2890,9 +2883,9 @@ free_partial_schedule (partial_schedule_ptr ps)
if (!ps)
return;
- FOR_EACH_VEC_ELT (ps_reg_move_info, ps->reg_moves, i, move)
+ FOR_EACH_VEC_ELT (ps->reg_moves, i, move)
sbitmap_free (move->uses);
- VEC_free (ps_reg_move_info, heap, ps->reg_moves);
+ ps->reg_moves.release ();
free_ps_insns (ps);
free (ps->rows);
diff --git a/gcc/objc/ChangeLog b/gcc/objc/ChangeLog
index 575d4733925..965ed0e427c 100644
--- a/gcc/objc/ChangeLog
+++ b/gcc/objc/ChangeLog
@@ -1,3 +1,16 @@
+2012-11-16 Diego Novillo <dnovillo@google.com>
+
+ Adjust for new vec API (http://gcc.gnu.org/wiki/cxx-conversion/cxx-vec)
+
+ * objc-act.c: Use new vec API in vec.h.
+ * objc-act.h: Likewise.
+ * objc-gnu-runtime-abi-01.c: Likewise.
+ * objc-next-runtime-abi-01.c: Likewise.
+ * objc-next-runtime-abi-02.c: Likewise.
+ * objc-runtime-hooks.h: Likewise.
+ * objc-runtime-shared-support.c: Likewise.
+ * objc-runtime-shared-support.h: Likewise.
+
2012-10-01 Lawrence Crowl <crowl@google.com>
* Make-lang.in (OBJC_OBJS): Add dependence on hash-table.o.
diff --git a/gcc/objc/objc-act.c b/gcc/objc/objc-act.c
index ed1a28f63c4..3ef84cd3687 100644
--- a/gcc/objc/objc-act.c
+++ b/gcc/objc/objc-act.c
@@ -246,7 +246,7 @@ static char *errbuf; /* Buffer for error diagnostics */
/* An array of all the local variables in the current function that
need to be marked as volatile. */
-VEC(tree,gc) *local_variables_to_volatilize = NULL;
+vec<tree, va_gc> *local_variables_to_volatilize = NULL;
/* Store all constructed constant strings in a hash table so that
they get uniqued properly. */
@@ -2067,7 +2067,7 @@ objc_build_struct (tree klass, tree fields, tree super_name)
tree s = objc_start_struct (name);
tree super = (super_name ? xref_tag (RECORD_TYPE, super_name) : NULL_TREE);
tree t;
- VEC(tree,heap) *objc_info = NULL;
+ vec<tree> objc_info = vec<tree>();
int i;
if (super)
@@ -2127,7 +2127,7 @@ objc_build_struct (tree klass, tree fields, tree super_name)
for (t = TYPE_MAIN_VARIANT (s); t; t = TYPE_NEXT_VARIANT (t))
{
INIT_TYPE_OBJC_INFO (t);
- VEC_safe_push (tree, heap, objc_info, TYPE_OBJC_INFO (t));
+ objc_info.safe_push (TYPE_OBJC_INFO (t));
}
s = objc_finish_struct (s, fields);
@@ -2158,12 +2158,12 @@ objc_build_struct (tree klass, tree fields, tree super_name)
/* Replace TYPE_OBJC_INFO with the saved one. This restores any
protocol information that may have been associated with the
type. */
- TYPE_OBJC_INFO (t) = VEC_index (tree, objc_info, i);
+ TYPE_OBJC_INFO (t) = objc_info[i];
/* Replace the IDENTIFIER_NODE with an actual @interface now
that we have it. */
TYPE_OBJC_INTERFACE (t) = klass;
}
- VEC_free (tree, heap, objc_info);
+ objc_info.release ();
/* Use TYPE_BINFO structures to point at the super class, if any. */
objc_xref_basetypes (s, super);
@@ -2187,9 +2187,9 @@ objc_volatilize_decl (tree decl)
|| TREE_CODE (decl) == PARM_DECL))
{
if (local_variables_to_volatilize == NULL)
- local_variables_to_volatilize = VEC_alloc (tree, gc, 8);
+ vec_alloc (local_variables_to_volatilize, 8);
- VEC_safe_push (tree, gc, local_variables_to_volatilize, decl);
+ vec_safe_push (local_variables_to_volatilize, decl);
}
}
@@ -2208,7 +2208,7 @@ objc_finish_function (void)
{
int i;
tree decl;
- FOR_EACH_VEC_ELT (tree, local_variables_to_volatilize, i, decl)
+ FOR_EACH_VEC_ELT (*local_variables_to_volatilize, i, decl)
{
tree t = TREE_TYPE (decl);
@@ -2223,7 +2223,7 @@ objc_finish_function (void)
}
/* Now we delete the vector. This sets it to NULL as well. */
- VEC_free (tree, gc, local_variables_to_volatilize);
+ vec_free (local_variables_to_volatilize);
}
}
@@ -2688,7 +2688,7 @@ objc_xref_basetypes (tree ref, tree basetype)
tree base_binfo = objc_copy_binfo (TYPE_BINFO (basetype));
BINFO_INHERITANCE_CHAIN (base_binfo) = binfo;
- BINFO_BASE_ACCESSES (binfo) = VEC_alloc (tree, gc, 1);
+ vec_alloc (BINFO_BASE_ACCESSES (binfo), 1);
BINFO_BASE_APPEND (binfo, base_binfo);
BINFO_BASE_ACCESS_APPEND (binfo, access_public_node);
}
@@ -3198,7 +3198,7 @@ objc_build_string_object (tree string)
with type TYPE and elements ELTS. */
tree
-objc_build_constructor (tree type, VEC(constructor_elt,gc) *elts)
+objc_build_constructor (tree type, vec<constructor_elt, va_gc> *elts)
{
tree constructor = build_constructor (type, elts);
@@ -3209,7 +3209,7 @@ objc_build_constructor (tree type, VEC(constructor_elt,gc) *elts)
#ifdef OBJCPLUS
/* Adjust for impedance mismatch. We should figure out how to build
CONSTRUCTORs that consistently please both the C and C++ gods. */
- if (!VEC_index (constructor_elt, elts, 0).index)
+ if (!(*elts)[0].index)
TREE_TYPE (constructor) = init_list_type_node;
#endif
@@ -4995,7 +4995,7 @@ tree
build_function_type_for_method (tree return_type, tree method,
int context, bool super_flag)
{
- VEC(tree,gc) *argtypes = make_tree_vector ();
+ vec<tree, va_gc> *argtypes = make_tree_vector ();
tree t, ftype;
bool is_varargs = false;
@@ -5016,7 +5016,7 @@ build_function_type_for_method (tree return_type, tree method,
appropriate. */
arg_type = objc_decay_parm_type (arg_type);
- VEC_safe_push (tree, gc, argtypes, arg_type);
+ vec_safe_push (argtypes, arg_type);
}
if (METHOD_ADD_ARGS (method))
@@ -5028,7 +5028,7 @@ build_function_type_for_method (tree return_type, tree method,
arg_type = objc_decay_parm_type (arg_type);
- VEC_safe_push (tree, gc, argtypes, arg_type);
+ vec_safe_push (argtypes, arg_type);
}
if (METHOD_ADD_ARGS_ELLIPSIS_P (method))
diff --git a/gcc/objc/objc-act.h b/gcc/objc/objc-act.h
index e958e669af0..b0c3d0a48fd 100644
--- a/gcc/objc/objc-act.h
+++ b/gcc/objc/objc-act.h
@@ -254,7 +254,7 @@ struct GTY(()) hashed_entry {
/* An array of all the local variables in the current function that
need to be marked as volatile. */
-extern GTY(()) VEC(tree,gc) *local_variables_to_volatilize;
+extern GTY(()) vec<tree, va_gc> *local_variables_to_volatilize;
/* Objective-C/Objective-C++ @implementation list. */
diff --git a/gcc/objc/objc-gnu-runtime-abi-01.c b/gcc/objc/objc-gnu-runtime-abi-01.c
index 6bdffac10e7..5cd39ef9307 100644
--- a/gcc/objc/objc-gnu-runtime-abi-01.c
+++ b/gcc/objc/objc-gnu-runtime-abi-01.c
@@ -104,8 +104,8 @@ static tree gnu_runtime_abi_01_get_class_super_ref (location_t, struct imp_entry
static tree gnu_runtime_abi_01_get_category_super_ref (location_t, struct imp_entry *, bool);
static tree gnu_runtime_abi_01_receiver_is_class_object (tree);
-static void gnu_runtime_abi_01_get_arg_type_list_base (VEC(tree,gc) **, tree,
- int, int);
+static void gnu_runtime_abi_01_get_arg_type_list_base (vec<tree, va_gc> **,
+ tree, int, int);
static tree gnu_runtime_abi_01_build_objc_method_call (location_t, tree, tree,
tree, tree, tree, int);
@@ -585,8 +585,8 @@ gnu_runtime_abi_01_get_class_reference (tree ident)
prototype. */
static void
-gnu_runtime_abi_01_get_arg_type_list_base (VEC(tree,gc) **argtypes, tree meth,
- int context,
+gnu_runtime_abi_01_get_arg_type_list_base (vec<tree, va_gc> **argtypes,
+ tree meth, int context,
int superflag ATTRIBUTE_UNUSED)
{
tree receiver_type;
@@ -596,9 +596,9 @@ gnu_runtime_abi_01_get_arg_type_list_base (VEC(tree,gc) **argtypes, tree meth,
else
receiver_type = objc_object_type;
- VEC_safe_push (tree, gc, *argtypes, receiver_type);
+ vec_safe_push (*argtypes, receiver_type);
/* Selector type - will eventually change to `int'. */
- VEC_safe_push (tree, gc, *argtypes, objc_selector_type);
+ vec_safe_push (*argtypes, objc_selector_type);
}
/* Unused for GNU runtime. */
@@ -662,8 +662,8 @@ build_objc_method_call (location_t loc, int super_flag, tree method_prototype,
: (flag_objc_direct_dispatch ? umsg_fast_decl
: umsg_decl));
tree rcv_p = (super_flag ? objc_super_type : objc_object_type);
- VEC(tree, gc) *parms;
- VEC(tree, gc) *tv;
+ vec<tree, va_gc> *parms;
+ vec<tree, va_gc> *tv;
unsigned nparm = (method_params ? list_length (method_params) : 0);
/* If a prototype for the method to be called exists, then cast
@@ -692,30 +692,30 @@ build_objc_method_call (location_t loc, int super_flag, tree method_prototype,
lookup_object = save_expr (lookup_object);
/* Param list + 2 slots for object and selector. */
- parms = VEC_alloc (tree, gc, nparm + 2);
- tv = VEC_alloc (tree, gc, 2);
+ vec_alloc (parms, nparm + 2);
+ vec_alloc (tv, 2);
/* First, call the lookup function to get a pointer to the method,
then cast the pointer, then call it with the method arguments. */
- VEC_quick_push (tree, tv, lookup_object);
- VEC_quick_push (tree, tv, selector);
+ tv->quick_push (lookup_object);
+ tv->quick_push (selector);
method = build_function_call_vec (loc, sender, tv, NULL);
- VEC_free (tree, gc, tv);
+ vec_free (tv);
/* Pass the appropriate object to the method. */
- VEC_quick_push (tree, parms, (super_flag ? self_decl : lookup_object));
+ parms->quick_push ((super_flag ? self_decl : lookup_object));
/* Pass the selector to the method. */
- VEC_quick_push (tree, parms, selector);
+ parms->quick_push (selector);
/* Now append the remainder of the parms. */
if (nparm)
for (; method_params; method_params = TREE_CHAIN (method_params))
- VEC_quick_push (tree, parms, TREE_VALUE (method_params));
+ parms->quick_push (TREE_VALUE (method_params));
/* Build an obj_type_ref, with the correct cast for the method call. */
t = build3 (OBJ_TYPE_REF, sender_cast, method, lookup_object, size_zero_node);
t = build_function_call_vec (loc, t, parms, NULL);
- VEC_free (tree, gc, parms);
+ vec_free (parms);
return t;
}
@@ -900,7 +900,7 @@ gnu_runtime_abi_01_build_const_string_constructor (location_t loc, tree string,
int length)
{
tree constructor, fields;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* GNU: (NXConstantString *) & ((__builtin_ObjCString) { NULL, string, length }) */
fields = TYPE_FIELDS (internal_const_str_type);
@@ -1087,7 +1087,7 @@ build_protocol_initializer (tree type, tree protocol_name, tree protocol_list,
{
tree expr, ttyp;
location_t loc;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
/* TODO: pass the loc in or find it from args. */
loc = input_location;
@@ -1122,7 +1122,7 @@ static tree
generate_protocol_list (tree i_or_p, tree klass_ctxt)
{
tree array_type, ptype, refs_decl, lproto, e, plist;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
char buf[BUFSIZE];
int size = 0;
@@ -1200,7 +1200,7 @@ generate_v1_meth_descriptor_table (tree chain, tree protocol, const char *prefix
{
tree method_list_template, initlist, decl;
int size;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
char buf[BUFSIZE];
if (!chain || !prefix)
@@ -1348,7 +1348,7 @@ static tree
generate_dispatch_table (tree chain, const char *name)
{
tree decl, method_list_template, initlist;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int size = list_length (chain);
if (!objc_method_template)
@@ -1380,7 +1380,7 @@ build_category_initializer (tree type, tree cat_name, tree class_name,
{
tree expr, ltyp;
location_t loc;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* TODO: pass the loc in or find it from args. */
/* TODO: pass the loc in or find it from args. */
@@ -1488,7 +1488,7 @@ build_shared_structure_initializer (tree type, tree isa, tree super,
tree protocol_list)
{
tree expr, ltyp;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* isa = */
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, isa);
@@ -1584,7 +1584,7 @@ generate_ivars_list (tree chain, const char *name)
{
tree initlist, ivar_list_template, decl;
int size;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
if (!chain)
return NULL_TREE;
@@ -1807,8 +1807,8 @@ static void
build_gnu_selector_translation_table (void)
{
tree chain, expr;
- VEC(constructor_elt,gc) *inits = NULL;
- VEC(constructor_elt,gc) *v ;
+ vec<constructor_elt, va_gc> *inits = NULL;
+ vec<constructor_elt, va_gc> *v ;
/* Cause the selector table (previously forward-declared)
to be actually output. */
@@ -1857,7 +1857,7 @@ generate_static_references (void)
= build_array_type (build_pointer_type (void_type_node), NULL_TREE);
int num_inst, num_class;
char buf[BUFSIZE];
- VEC(constructor_elt,gc) *decls = NULL;
+ vec<constructor_elt, va_gc> *decls = NULL;
/* FIXME: Remove NeXT runtime code. */
if (flag_next_runtime)
@@ -1866,7 +1866,7 @@ generate_static_references (void)
for (cl_chain = objc_static_instances, num_class = 0;
cl_chain; cl_chain = TREE_CHAIN (cl_chain), num_class++)
{
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
for (num_inst = 0, in_chain = TREE_PURPOSE (cl_chain);
in_chain; num_inst++, in_chain = TREE_CHAIN (in_chain));
@@ -1917,7 +1917,7 @@ init_def_list (tree type)
tree expr;
struct imp_entry *impent;
location_t loc;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
if (imp_count)
for (impent = imp_list; impent; impent = impent->next)
@@ -2004,7 +2004,7 @@ init_objc_symtab (tree type)
{
tree field, expr, ltyp;
location_t loc;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
loc = UNKNOWN_LOCATION;
@@ -2207,12 +2207,13 @@ static tree
build_throw_stmt (location_t loc, tree throw_expr, bool rethrown ATTRIBUTE_UNUSED)
{
tree t;
- VEC(tree, gc) *parms = VEC_alloc (tree, gc, 1);
+ vec<tree, va_gc> *parms;
+ vec_alloc (parms, 1);
/* A throw is just a call to the runtime throw function with the
object as a parameter. */
- VEC_quick_push (tree, parms, throw_expr);
+ parms->quick_push (throw_expr);
t = build_function_call_vec (loc, objc_exception_throw_decl, parms, NULL);
- VEC_free (tree, gc, parms);
+ vec_free (parms);
return add_stmt (t);
}
diff --git a/gcc/objc/objc-next-runtime-abi-01.c b/gcc/objc/objc-next-runtime-abi-01.c
index cf245911b90..598141c68ad 100644
--- a/gcc/objc/objc-next-runtime-abi-01.c
+++ b/gcc/objc/objc-next-runtime-abi-01.c
@@ -123,8 +123,8 @@ static tree next_runtime_abi_01_get_class_super_ref (location_t, struct imp_entr
static tree next_runtime_abi_01_get_category_super_ref (location_t, struct imp_entry *, bool);
static tree next_runtime_abi_01_receiver_is_class_object (tree);
-static void next_runtime_abi_01_get_arg_type_list_base (VEC(tree,gc) **, tree,
- int, int);
+static void next_runtime_abi_01_get_arg_type_list_base (vec<tree, va_gc> **,
+ tree, int, int);
static tree next_runtime_abi_01_build_objc_method_call (location_t, tree, tree,
tree, tree, tree, int);
static bool next_runtime_abi_01_setup_const_string_class_decl (void);
@@ -730,8 +730,9 @@ next_runtime_abi_01_get_class_reference (tree ident)
prototype. */
static void
-next_runtime_abi_01_get_arg_type_list_base (VEC(tree,gc) **argtypes, tree meth,
- int context, int superflag)
+next_runtime_abi_01_get_arg_type_list_base (vec<tree, va_gc> **argtypes,
+ tree meth, int context,
+ int superflag)
{
tree receiver_type;
@@ -742,9 +743,9 @@ next_runtime_abi_01_get_arg_type_list_base (VEC(tree,gc) **argtypes, tree meth,
else
receiver_type = objc_object_type;
- VEC_safe_push (tree, gc, *argtypes, receiver_type);
+ vec_safe_push (*argtypes, receiver_type);
/* Selector type - will eventually change to `int'. */
- VEC_safe_push (tree, gc, *argtypes, objc_selector_type);
+ vec_safe_push (*argtypes, objc_selector_type);
}
static tree
@@ -820,7 +821,7 @@ build_objc_method_call (location_t loc, int super_flag, tree method_prototype,
{
tree sender, sender_cast, method, t;
tree rcv_p = (super_flag ? objc_super_type : objc_object_type);
- VEC(tree, gc) *parms;
+ vec<tree, va_gc> *parms;
unsigned nparm = (method_params ? list_length (method_params) : 0);
/* If a prototype for the method to be called exists, then cast
@@ -846,7 +847,7 @@ build_objc_method_call (location_t loc, int super_flag, tree method_prototype,
lookup_object = save_expr (lookup_object);
/* Param list + 2 slots for object and selector. */
- parms = VEC_alloc (tree, gc, nparm + 2);
+ vec_alloc (parms, nparm + 2);
/* If we are returning a struct in memory, and the address
of that memory location is passed as a hidden first
@@ -869,19 +870,19 @@ build_objc_method_call (location_t loc, int super_flag, tree method_prototype,
method = build_fold_addr_expr_loc (loc, sender);
/* Pass the object to the method. */
- VEC_quick_push (tree, parms, lookup_object);
+ parms->quick_push (lookup_object);
/* Pass the selector to the method. */
- VEC_quick_push (tree, parms, selector);
+ parms->quick_push (selector);
/* Now append the remainder of the parms. */
if (nparm)
for (; method_params; method_params = TREE_CHAIN (method_params))
- VEC_quick_push (tree, parms, TREE_VALUE (method_params));
+ parms->quick_push (TREE_VALUE (method_params));
/* Build an obj_type_ref, with the correct cast for the method call. */
t = build3 (OBJ_TYPE_REF, sender_cast, method,
lookup_object, size_zero_node);
t = build_function_call_vec (loc, t, parms, NULL);
- VEC_free (tree, gc, parms);
+ vec_free (parms);
return t;
}
@@ -1005,7 +1006,7 @@ next_runtime_abi_01_build_const_string_constructor (location_t loc, tree string,
int length)
{
tree constructor, fields, var;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* NeXT: (NSConstantString *) & ((__builtin_ObjCString) { isa, string, length }) */
fields = TYPE_FIELDS (internal_const_str_type);
@@ -1145,7 +1146,7 @@ generate_v1_meth_descriptor_table (tree chain, tree protocol,
{
tree method_list_template, initlist, decl;
int size;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
char buf[BUFSIZE];
if (!chain || !prefix)
@@ -1186,7 +1187,7 @@ generate_v1_objc_protocol_extension (tree proto_interface,
{
int size;
location_t loc;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
tree decl, expr;
char buf[BUFSIZE];
@@ -1277,7 +1278,7 @@ static tree
build_v1_property_table_initializer (tree type, tree context)
{
tree x;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
if (TREE_CODE (context) == PROTOCOL_INTERFACE_TYPE)
x = CLASS_PROPERTY_DECL (context);
@@ -1286,7 +1287,7 @@ build_v1_property_table_initializer (tree type, tree context)
for (; x; x = TREE_CHAIN (x))
{
- VEC(constructor_elt,gc) *elemlist = NULL;
+ vec<constructor_elt, va_gc> *elemlist = NULL;
tree attribute, name_ident = PROPERTY_NAME (x);
CONSTRUCTOR_APPEND_ELT (elemlist, NULL_TREE,
@@ -1313,7 +1314,7 @@ generate_v1_property_table (tree context, tree klass_ctxt)
{
tree x, decl, initlist, property_list_template;
bool is_proto = false;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
int init_val, size = 0;
char buf[BUFSIZE];
@@ -1365,7 +1366,7 @@ generate_v1_protocol_list (tree i_or_p, tree klass_ctxt)
{
tree array_type, ptype, refs_decl, lproto, e, plist, attr;
int size = 0;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
char buf[BUFSIZE];
switch (TREE_CODE (i_or_p))
@@ -1447,7 +1448,7 @@ build_v1_protocol_initializer (tree type, tree protocol_name, tree protocol_list
{
tree expr, ttyp;
location_t loc;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
if (!objc_protocol_extension_template)
build_v1_objc_protocol_extension_template ();
@@ -1659,7 +1660,7 @@ static tree
generate_dispatch_table (tree chain, const char *name, tree attr)
{
tree decl, method_list_template, initlist;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int size;;
if (!chain || !name || !(size = list_length (chain)))
@@ -1694,7 +1695,7 @@ build_v1_category_initializer (tree type, tree cat_name, tree class_name,
location_t loc)
{
tree expr, ltyp;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, cat_name);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, class_name);
@@ -1807,7 +1808,7 @@ generate_objc_class_ext (tree property_list, tree context)
tree weak_ivar_layout_tree;
int size;
location_t loc;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
char buf[BUFSIZE];
/* TODO: pass the loc in or find it from args. */
@@ -1879,7 +1880,7 @@ build_v1_shared_structure_initializer (tree type, tree isa, tree super,
{
tree expr, ltyp;
location_t loc;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* TODO: fish the location out of the input data. */
loc = UNKNOWN_LOCATION;
@@ -1964,7 +1965,7 @@ generate_ivars_list (tree chain, const char *name, tree attr)
{
tree initlist, ivar_list_template, decl;
int size;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
if (!chain)
return NULL_TREE;
@@ -2138,7 +2139,7 @@ init_def_list (tree type)
tree expr;
location_t loc;
struct imp_entry *impent;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
if (imp_count)
for (impent = imp_list; impent; impent = impent->next)
@@ -2215,7 +2216,7 @@ build_objc_symtab_template (void)
static tree
init_objc_symtab (tree type)
{
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* sel_ref_cnt = { ..., 5, ... } */
@@ -2345,7 +2346,7 @@ generate_objc_image_info (void)
int flags
= ((flag_replace_objc_classes && imp_count ? 1 : 0)
| (flag_objc_gc ? 2 : 0));
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
tree array_type;
array_type = build_sized_array_type (integer_type_node, 2);
@@ -2892,12 +2893,13 @@ static tree
build_throw_stmt (location_t loc, tree throw_expr, bool rethrown ATTRIBUTE_UNUSED)
{
tree t;
- VEC(tree, gc) *parms = VEC_alloc (tree, gc, 1);
+ vec<tree, va_gc> *parms;
+ vec_alloc (parms, 1);
/* A throw is just a call to the runtime throw function with the
object as a parameter. */
- VEC_quick_push (tree, parms, throw_expr);
+ parms->quick_push (throw_expr);
t = build_function_call_vec (loc, objc_exception_throw_decl, parms, NULL);
- VEC_free (tree, gc, parms);
+ vec_free (parms);
return add_stmt (t);
}
diff --git a/gcc/objc/objc-next-runtime-abi-02.c b/gcc/objc/objc-next-runtime-abi-02.c
index cf899d379bd..636c3cf029e 100644
--- a/gcc/objc/objc-next-runtime-abi-02.c
+++ b/gcc/objc/objc-next-runtime-abi-02.c
@@ -208,8 +208,8 @@ static tree next_runtime_abi_02_get_class_super_ref (location_t, struct imp_entr
static tree next_runtime_abi_02_get_category_super_ref (location_t, struct imp_entry *, bool);
static tree next_runtime_abi_02_receiver_is_class_object (tree);
-static void next_runtime_abi_02_get_arg_type_list_base (VEC(tree,gc) **, tree,
- int, int);
+static void next_runtime_abi_02_get_arg_type_list_base (vec<tree, va_gc> **,
+ tree, int, int);
static tree next_runtime_abi_02_build_objc_method_call (location_t, tree, tree,
tree, tree, tree, int);
static bool next_runtime_abi_02_setup_const_string_class_decl (void);
@@ -1015,8 +1015,6 @@ typedef struct GTY(()) ident_data_tuple {
tree ident;
tree data;
} ident_data_tuple ;
-DEF_VEC_O(ident_data_tuple);
-DEF_VEC_ALLOC_O(ident_data_tuple, gc);
/* This routine creates a file scope static variable of type 'Class'
to hold the address of a class. */
@@ -1038,7 +1036,7 @@ build_v2_class_reference_decl (tree ident)
ident is replaced with address of the class metadata (of type
'Class') in the output routine. */
-static GTY (()) VEC (ident_data_tuple, gc) * classrefs;
+static GTY (()) vec<ident_data_tuple, va_gc> *classrefs;
static tree
objc_v2_get_class_reference (tree ident)
@@ -1049,7 +1047,7 @@ objc_v2_get_class_reference (tree ident)
{
int count;
ident_data_tuple *ref;
- FOR_EACH_VEC_ELT (ident_data_tuple, classrefs, count, ref)
+ FOR_EACH_VEC_ELT (*classrefs, count, ref)
{
if (ref->ident == ident)
{
@@ -1061,14 +1059,14 @@ objc_v2_get_class_reference (tree ident)
}
else
/* Somewhat arbitrary initial provision. */
- classrefs = VEC_alloc (ident_data_tuple, gc, 16);
+ vec_alloc (classrefs, 16);
/* We come here if we don't find the entry - or if the table was yet
to be created. */
decl = build_v2_class_reference_decl (ident);
e.ident = ident;
e.data = decl;
- VEC_safe_push (ident_data_tuple, gc, classrefs, e);
+ vec_safe_push (classrefs, e);
return decl;
}
@@ -1080,17 +1078,17 @@ next_runtime_abi_02_get_class_reference (tree ident)
else
{
/* We fall back to using objc_getClass (). */
- VEC(tree,gc) *vec = VEC_alloc (tree, gc, 1);
+ vec<tree, va_gc> *v;
+ vec_alloc (v, 1);
tree t;
/* ??? add_class_reference (ident); - is pointless, since the
system lib does not export the equivalent symbols. Maybe we
need to build a class ref anyway. */
t = my_build_string_pointer (IDENTIFIER_LENGTH (ident) + 1,
IDENTIFIER_POINTER (ident));
- VEC_quick_push (tree, vec, t);
- t = build_function_call_vec (input_location, objc_get_class_decl,
- vec, NULL);
- VEC_free (tree, gc, vec);
+ v->quick_push (t);
+ t = build_function_call_vec (input_location, objc_get_class_decl, v, 0);
+ vec_free (v);
return t;
}
}
@@ -1103,8 +1101,9 @@ next_runtime_abi_02_get_class_reference (tree ident)
prototype. */
static void
-next_runtime_abi_02_get_arg_type_list_base (VEC(tree,gc) **argtypes, tree meth,
- int context, int superflag)
+next_runtime_abi_02_get_arg_type_list_base (vec<tree, va_gc> **argtypes,
+ tree meth, int context,
+ int superflag)
{
tree receiver_type;
@@ -1115,12 +1114,11 @@ next_runtime_abi_02_get_arg_type_list_base (VEC(tree,gc) **argtypes, tree meth,
else
receiver_type = objc_object_type;
- VEC_safe_push (tree, gc, *argtypes, receiver_type);
+ vec_safe_push (*argtypes, receiver_type);
/* Selector type - will eventually change to `int'. */
- VEC_safe_push (tree, gc, *argtypes,
- (superflag
- ? objc_v2_super_selector_type
- : objc_v2_selector_type));
+ vec_safe_push (*argtypes,
+ superflag ? objc_v2_super_selector_type
+ : objc_v2_selector_type);
}
/* TODO: Merge this with the message refs. */
@@ -1201,10 +1199,8 @@ typedef struct GTY(()) msgref_entry {
tree selname;
tree refdecl;
} msgref_entry;
-DEF_VEC_O(msgref_entry);
-DEF_VEC_ALLOC_O(msgref_entry, gc);
-static GTY (()) VEC (msgref_entry, gc) * msgrefs;
+static GTY (()) vec<msgref_entry, va_gc> *msgrefs;
/* Build the list of (objc_msgSend_fixup_xxx, selector name), used
later on to initialize the table of 'struct message_ref_t'
@@ -1219,13 +1215,13 @@ build_v2_selector_messenger_reference (tree sel_name, tree message_func_decl)
{
int count;
msgref_entry *ref;
- FOR_EACH_VEC_ELT (msgref_entry, msgrefs, count, ref)
+ FOR_EACH_VEC_ELT (*msgrefs, count, ref)
if (ref->func == message_func_decl && ref->selname == sel_name)
return ref->refdecl;
}
else
/* Somewhat arbitrary initial provision. */
- msgrefs = VEC_alloc (msgref_entry, gc, 32);
+ vec_alloc (msgrefs, 32);
/* We come here if we don't find a match or at the start. */
decl = build_v2_message_reference_decl (sel_name,
@@ -1233,7 +1229,7 @@ build_v2_selector_messenger_reference (tree sel_name, tree message_func_decl)
e.func = message_func_decl;
e.selname = sel_name;
e.refdecl = decl;
- VEC_safe_push (msgref_entry, gc, msgrefs, e);
+ vec_safe_push (msgrefs, e);
return decl;
}
@@ -1258,9 +1254,7 @@ typedef struct GTY(()) prot_list_entry {
tree id;
tree refdecl;
} prot_list_entry;
-DEF_VEC_O(prot_list_entry);
-DEF_VEC_ALLOC_O(prot_list_entry, gc);
-static GTY (()) VEC (prot_list_entry, gc) * protrefs;
+static GTY (()) vec<prot_list_entry, va_gc> *protrefs;
static tree
objc_v2_get_protocol_reference (tree ident)
@@ -1271,7 +1265,7 @@ objc_v2_get_protocol_reference (tree ident)
{
int count;
prot_list_entry *ref;
- FOR_EACH_VEC_ELT (prot_list_entry, protrefs, count, ref)
+ FOR_EACH_VEC_ELT (*protrefs, count, ref)
{
if (ref->id == ident)
{
@@ -1283,14 +1277,14 @@ objc_v2_get_protocol_reference (tree ident)
}
else
/* Somewhat arbitrary initial provision. */
- protrefs = VEC_alloc (prot_list_entry, gc, 32);
+ vec_alloc (protrefs, 32);
/* We come here if we don't find the entry - or if the table was yet
to be created. */
decl = build_v2_protocollist_ref_decl (ident);
e.id = ident;
e.refdecl = decl;
- VEC_safe_push (prot_list_entry, gc, protrefs, e);
+ vec_safe_push (protrefs, e);
return decl;
}
@@ -1436,8 +1430,8 @@ build_v2_superclass_ref_decl (tree ident, bool inst)
return decl;
}
-static GTY (()) VEC (ident_data_tuple, gc) * class_super_refs;
-static GTY (()) VEC (ident_data_tuple, gc) * metaclass_super_refs;
+static GTY (()) vec<ident_data_tuple, va_gc> *class_super_refs;
+static GTY (()) vec<ident_data_tuple, va_gc> *metaclass_super_refs;
static tree
next_runtime_abi_02_get_class_super_ref (location_t loc ATTRIBUTE_UNUSED,
@@ -1446,14 +1440,14 @@ next_runtime_abi_02_get_class_super_ref (location_t loc ATTRIBUTE_UNUSED,
tree decl;
ident_data_tuple e;
tree id = CLASS_NAME (imp->imp_context);
- VEC (ident_data_tuple, gc) *list = inst_meth ? class_super_refs
+ vec<ident_data_tuple, va_gc> *list = inst_meth ? class_super_refs
: metaclass_super_refs;
if (list)
{
int count;
ident_data_tuple *ref;
- FOR_EACH_VEC_ELT (ident_data_tuple, list, count, ref)
+ FOR_EACH_VEC_ELT (*list, count, ref)
{
if (ref->ident == id)
{
@@ -1467,16 +1461,22 @@ next_runtime_abi_02_get_class_super_ref (location_t loc ATTRIBUTE_UNUSED,
{
/* Somewhat arbitrary initial provision. */
if (inst_meth)
- list = class_super_refs = VEC_alloc (ident_data_tuple, gc, 16);
+ {
+ vec_alloc (class_super_refs, 16);
+ list = class_super_refs;
+ }
else
- list = metaclass_super_refs = VEC_alloc (ident_data_tuple, gc, 16);
+ {
+ vec_alloc (metaclass_super_refs, 16);
+ list = metaclass_super_refs;
+ }
}
/* We come here if we don't find the entry - or if the table was yet
to be created. */
decl = build_v2_superclass_ref_decl (id, inst_meth);
e.ident = id;
e.data = decl;
- VEC_safe_push (ident_data_tuple, gc, list, e);
+ vec_safe_push (list, e);
return decl;
}
@@ -1522,13 +1522,12 @@ next_runtime_abi_02_receiver_is_class_object (tree receiver)
{
if (TREE_CODE (receiver) == VAR_DECL
&& IS_CLASS (TREE_TYPE (receiver))
- && classrefs
- && VEC_length (ident_data_tuple, classrefs))
+ && vec_safe_length (classrefs))
{
int count;
ident_data_tuple *ref;
/* The receiver is a variable created by build_class_reference_decl. */
- FOR_EACH_VEC_ELT (ident_data_tuple, classrefs, count, ref)
+ FOR_EACH_VEC_ELT (*classrefs, count, ref)
if (ref->data == receiver)
return ref->ident;
}
@@ -1626,7 +1625,7 @@ build_v2_build_objc_method_call (int super_flag, tree method_prototype,
if (TREE_CODE (ret_type) == RECORD_TYPE
|| TREE_CODE (ret_type) == UNION_TYPE)
{
- VEC(constructor_elt,gc) *rtt = NULL;
+ vec<constructor_elt, va_gc> *rtt = NULL;
/* ??? CHECKME. hmmm..... think we need something more
here. */
CONSTRUCTOR_APPEND_ELT (rtt, NULL_TREE, NULL_TREE);
@@ -1748,7 +1747,7 @@ next_runtime_abi_02_build_const_string_constructor (location_t loc, tree string,
int length)
{
tree constructor, fields, var;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* NeXT: (NSConstantString *) & ((__builtin_ObjCString) { isa, string, length }) */
fields = TYPE_FIELDS (internal_const_str_type);
@@ -1896,12 +1895,12 @@ void build_v2_message_ref_translation_table (void)
int count;
msgref_entry *ref;
- if (!msgrefs || !VEC_length (msgref_entry,msgrefs))
+ if (!vec_safe_length (msgrefs))
return;
- FOR_EACH_VEC_ELT (msgref_entry, msgrefs, count, ref)
+ FOR_EACH_VEC_ELT (*msgrefs, count, ref)
{
- VEC(constructor_elt,gc) *initializer;
+ vec<constructor_elt, va_gc> *initializer;
tree expr, constructor;
tree struct_type = TREE_TYPE (ref->refdecl);
location_t loc = DECL_SOURCE_LOCATION (ref->refdecl);
@@ -1929,10 +1928,10 @@ build_v2_classrefs_table (void)
int count;
ident_data_tuple *ref;
- if (!classrefs || !VEC_length (ident_data_tuple, classrefs))
+ if (!vec_safe_length (classrefs))
return;
- FOR_EACH_VEC_ELT (ident_data_tuple, classrefs, count, ref)
+ FOR_EACH_VEC_ELT (*classrefs, count, ref)
{
tree expr = ref->ident;
tree decl = ref->data;
@@ -1960,13 +1959,13 @@ build_v2_super_classrefs_table (bool metaclass)
{
int count;
ident_data_tuple *ref;
- VEC (ident_data_tuple, gc) *list = metaclass ? metaclass_super_refs
+ vec<ident_data_tuple, va_gc> *list = metaclass ? metaclass_super_refs
: class_super_refs;
- if (!list || !VEC_length (ident_data_tuple, list))
+ if (!vec_safe_length (list))
return;
- FOR_EACH_VEC_ELT (ident_data_tuple, list, count, ref)
+ FOR_EACH_VEC_ELT (*list, count, ref)
{
tree expr = ref->ident;
tree decl = ref->data;
@@ -1986,17 +1985,15 @@ build_v2_super_classrefs_table (bool metaclass)
/* Add the global class meta-data declaration to the list which later
on ends up in the __class_list section. */
-static GTY(()) VEC(tree,gc) *class_list;
+static GTY(()) vec<tree, va_gc> *class_list;
static void
objc_v2_add_to_class_list (tree global_class_decl)
{
- if (!class_list)
- class_list = VEC_alloc (tree, gc, imp_count?imp_count:1);
- VEC_safe_push (tree, gc, class_list, global_class_decl);
+ vec_safe_push (class_list, global_class_decl);
}
-static GTY(()) VEC(tree,gc) *nonlazy_class_list;
+static GTY(()) vec<tree, va_gc> *nonlazy_class_list;
/* Add the global class meta-data declaration to the list which later
on ends up in the __nonlazy_class section. */
@@ -2004,12 +2001,10 @@ static GTY(()) VEC(tree,gc) *nonlazy_class_list;
static void
objc_v2_add_to_nonlazy_class_list (tree global_class_decl)
{
- if (!nonlazy_class_list)
- nonlazy_class_list = VEC_alloc (tree, gc, imp_count?imp_count:1);
- VEC_safe_push (tree, gc, nonlazy_class_list, global_class_decl);
+ vec_safe_push (nonlazy_class_list, global_class_decl);
}
-static GTY(()) VEC(tree,gc) *category_list;
+static GTY(()) vec<tree, va_gc> *category_list;
/* Add the category meta-data declaration to the list which later on
ends up in the __nonlazy_category section. */
@@ -2017,12 +2012,10 @@ static GTY(()) VEC(tree,gc) *category_list;
static void
objc_v2_add_to_category_list (tree decl)
{
- if (!category_list)
- category_list = VEC_alloc (tree, gc, cat_count?cat_count:1);
- VEC_safe_push (tree, gc, category_list, decl);
+ vec_safe_push (category_list, decl);
}
-static GTY(()) VEC(tree,gc) *nonlazy_category_list;
+static GTY(()) vec<tree, va_gc> *nonlazy_category_list;
/* Add the category meta-data declaration to the list which later on
ends up in the __category_list section. */
@@ -2030,9 +2023,7 @@ static GTY(()) VEC(tree,gc) *nonlazy_category_list;
static void
objc_v2_add_to_nonlazy_category_list (tree decl)
{
- if (!nonlazy_category_list)
- nonlazy_category_list = VEC_alloc (tree, gc, cat_count?cat_count:1);
- VEC_safe_push (tree, gc, nonlazy_category_list, decl);
+ vec_safe_push (nonlazy_category_list, decl);
}
static bool
@@ -2054,16 +2045,16 @@ has_load_impl (tree clsmeth)
all @implemented {class,category} meta-data. */
static void
-build_v2_address_table (VEC(tree,gc) *src, const char *nam, tree attr)
+build_v2_address_table (vec<tree, va_gc> *src, const char *nam, tree attr)
{
int count=0;
tree type, decl, expr;
- VEC(constructor_elt,gc) *initlist = NULL;
+ vec<constructor_elt, va_gc> *initlist = NULL;
- if (!src || !VEC_length(tree,src))
+ if (!vec_safe_length (src))
return;
- FOR_EACH_VEC_ELT (tree, src, count, decl)
+ FOR_EACH_VEC_ELT (*src, count, decl)
{
#ifndef OBJCPLUS
tree purpose = build_int_cst (NULL_TREE, count);
@@ -2098,7 +2089,7 @@ build_v2_protocol_list_translation_table (void)
if (!protrefs)
return;
- FOR_EACH_VEC_ELT (prot_list_entry, protrefs, count, ref)
+ FOR_EACH_VEC_ELT (*protrefs, count, ref)
{
char buf[BUFSIZE];
tree expr;
@@ -2112,7 +2103,7 @@ build_v2_protocol_list_translation_table (void)
/* TODO: Maybe we could explicitly delete the vec. now? */
}
-static GTY (()) VEC (prot_list_entry, gc) * protlist;
+static GTY (()) vec<prot_list_entry, va_gc> *protlist;
/* Add the local protocol meta-data declaration to the list which
later on ends up in the __protocol_list section. */
@@ -2123,10 +2114,10 @@ objc_add_to_protocol_list (tree protocol_interface_decl, tree protocol_decl)
prot_list_entry e;
if (!protlist)
/* Arbitrary init count. */
- protlist = VEC_alloc (prot_list_entry, gc, 32);
+ vec_alloc (protlist, 32);
e.id = protocol_interface_decl;
e.refdecl = protocol_decl;
- VEC_safe_push (prot_list_entry, gc, protlist, e);
+ vec_safe_push (protlist, e);
}
/* Build the __protocol_list section table containing address of all
@@ -2137,10 +2128,10 @@ build_v2_protocol_list_address_table (void)
{
int count;
prot_list_entry *ref;
- if (!protlist || !VEC_length (prot_list_entry, protlist))
+ if (!vec_safe_length (protlist))
return;
- FOR_EACH_VEC_ELT (prot_list_entry, protlist, count, ref)
+ FOR_EACH_VEC_ELT (*protlist, count, ref)
{
tree decl, expr;
char buf[BUFSIZE];
@@ -2165,7 +2156,7 @@ generate_v2_protocol_list (tree i_or_p, tree klass_ctxt)
{
tree refs_decl, lproto, e, plist, ptempl_p_t;
int size = 0;
- VEC(constructor_elt,gc) *initlist = NULL;
+ vec<constructor_elt, va_gc> *initlist = NULL;
char buf[BUFSIZE];
if (TREE_CODE (i_or_p) == CLASS_INTERFACE_TYPE
@@ -2243,10 +2234,10 @@ generate_v2_protocol_list (tree i_or_p, tree klass_ctxt)
static tree
build_v2_descriptor_table_initializer (tree type, tree entries)
{
- VEC(constructor_elt,gc) *initlist = NULL;
+ vec<constructor_elt, va_gc> *initlist = NULL;
do
{
- VEC(constructor_elt,gc) *eltlist = NULL;
+ vec<constructor_elt, va_gc> *eltlist = NULL;
CONSTRUCTOR_APPEND_ELT (eltlist, NULL_TREE,
build_selector (METHOD_SEL_NAME (entries)));
CONSTRUCTOR_APPEND_ELT (eltlist, NULL_TREE,
@@ -2301,7 +2292,7 @@ generate_v2_meth_descriptor_table (tree chain, tree protocol,
{
tree method_list_template, initlist, decl, methods;
int size, entsize;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
char buf[BUFSIZE];
if (!chain || !prefix)
@@ -2345,7 +2336,7 @@ static tree
build_v2_property_table_initializer (tree type, tree context)
{
tree x;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
if (TREE_CODE (context) == PROTOCOL_INTERFACE_TYPE)
x = CLASS_PROPERTY_DECL (context);
else
@@ -2353,7 +2344,7 @@ build_v2_property_table_initializer (tree type, tree context)
for (; x; x = TREE_CHAIN (x))
{
- VEC(constructor_elt,gc) *elemlist = NULL;
+ vec<constructor_elt, va_gc> *elemlist = NULL;
/* NOTE! sections where property name/attribute go MUST change
later. */
tree attribute, name_ident = PROPERTY_NAME (x);
@@ -2412,7 +2403,7 @@ generate_v2_property_table (tree context, tree klass_ctxt)
{
tree x, decl, initlist, property_list_template;
bool is_proto = false;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
int init_val, size = 0;
char buf[BUFSIZE];
@@ -2468,7 +2459,7 @@ build_v2_protocol_initializer (tree type, tree protocol_name, tree protocol_list
{
tree expr, ttyp;
location_t loc;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
/* TODO: find a better representation of location from the inputs. */
loc = UNKNOWN_LOCATION;
@@ -2615,7 +2606,7 @@ static tree
generate_v2_dispatch_table (tree chain, const char *name, tree attr)
{
tree decl, method_list_template, initlist;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int size, init_val;
if (!chain || !name || !(size = list_length (chain)))
@@ -2649,7 +2640,7 @@ build_v2_category_initializer (tree type, tree cat_name, tree class_name,
location_t loc)
{
tree expr, ltyp;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, cat_name);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, class_name);
@@ -2766,10 +2757,8 @@ typedef struct GTY(()) ivarref_entry
tree decl;
tree offset;
} ivarref_entry;
-DEF_VEC_O(ivarref_entry);
-DEF_VEC_ALLOC_O(ivarref_entry, gc);
-static GTY (()) VEC (ivarref_entry, gc) * ivar_offset_refs;
+static GTY (()) vec<ivarref_entry, va_gc> *ivar_offset_refs;
static tree
ivar_offset_ref (tree class_name, tree field_decl)
@@ -2786,13 +2775,13 @@ ivar_offset_ref (tree class_name, tree field_decl)
{
int count;
ivarref_entry *ref;
- FOR_EACH_VEC_ELT (ivarref_entry, ivar_offset_refs, count, ref)
+ FOR_EACH_VEC_ELT (*ivar_offset_refs, count, ref)
if (DECL_NAME (ref->decl) == field_decl_id)
return ref->decl;
}
else
/* Somewhat arbitrary initial provision. */
- ivar_offset_refs = VEC_alloc (ivarref_entry, gc, 32);
+ vec_alloc (ivar_offset_refs, 32);
/* We come here if we don't find a match or at the start. */
global_var = (TREE_PUBLIC (field_decl) || TREE_PROTECTED (field_decl));
@@ -2806,7 +2795,7 @@ ivar_offset_ref (tree class_name, tree field_decl)
e.decl = decl;
e.offset = byte_position (field_decl);
- VEC_safe_push (ivarref_entry, gc, ivar_offset_refs, e);
+ vec_safe_push (ivar_offset_refs, e);
return decl;
}
@@ -2818,11 +2807,11 @@ ivar_offset_ref (tree class_name, tree field_decl)
static tree
build_v2_ivar_list_initializer (tree class_name, tree type, tree field_decl)
{
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
do
{
- VEC(constructor_elt,gc) *ivar = NULL;
+ vec<constructor_elt, va_gc> *ivar = NULL;
int val;
tree id;
@@ -2914,7 +2903,7 @@ static tree
generate_v2_ivars_list (tree chain, const char *name, tree attr, tree templ)
{
tree decl, initlist, ivar_list_template;
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
int size, ivar_t_size;
if (!chain || !name || !(size = ivar_list_length (chain)))
@@ -2947,7 +2936,7 @@ static tree
build_v2_class_t_initializer (tree type, tree isa, tree superclass,
tree ro, tree cache, tree vtable)
{
- VEC(constructor_elt,gc) *initlist = NULL;
+ vec<constructor_elt, va_gc> *initlist = NULL;
/* isa */
CONSTRUCTOR_APPEND_ELT (initlist, NULL_TREE, isa);
@@ -2985,7 +2974,7 @@ build_v2_class_ro_t_initializer (tree type, tree name,
{
tree expr, unsigned_char_star, ltyp;
location_t loc;
- VEC(constructor_elt,gc) *initlist = NULL;
+ vec<constructor_elt, va_gc> *initlist = NULL;
/* TODO: fish out the real location from somewhere. */
loc = UNKNOWN_LOCATION;
@@ -3059,7 +3048,7 @@ build_v2_class_ro_t_initializer (tree type, tree name,
return objc_build_constructor (type, initlist);
}
-static GTY (()) VEC (ident_data_tuple, gc) * ehtype_list;
+static GTY (()) vec<ident_data_tuple, va_gc> *ehtype_list;
/* Record a name as needing a catcher. */
static void
@@ -3071,18 +3060,18 @@ objc_v2_add_to_ehtype_list (tree name)
int count = 0;
ident_data_tuple *ref;
- FOR_EACH_VEC_ELT (ident_data_tuple, ehtype_list, count, ref)
+ FOR_EACH_VEC_ELT (*ehtype_list, count, ref)
if (ref->ident == name)
return; /* Already entered. */
}
else
/* Arbitrary initial count. */
- ehtype_list = VEC_alloc (ident_data_tuple, gc, 8);
+ vec_alloc (ehtype_list, 8);
/* Not found, or new list. */
e.ident = name;
e.data = NULL_TREE;
- VEC_safe_push (ident_data_tuple, gc, ehtype_list, e);
+ vec_safe_push (ehtype_list, e);
}
static void
@@ -3333,10 +3322,10 @@ build_v2_ivar_offset_ref_table (void)
int count;
ivarref_entry *ref;
- if (!ivar_offset_refs || !VEC_length (ivarref_entry, ivar_offset_refs))
+ if (!vec_safe_length (ivar_offset_refs))
return;
- FOR_EACH_VEC_ELT (ivarref_entry, ivar_offset_refs, count, ref)
+ FOR_EACH_VEC_ELT (*ivar_offset_refs, count, ref)
finish_var_decl (ref->decl, ref->offset);
}
@@ -3346,7 +3335,7 @@ static void
generate_v2_objc_image_info (void)
{
tree decl, array_type;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
int flags =
((flag_replace_objc_classes && imp_count ? 1 : 0)
| (flag_objc_gc ? 2 : 0));
@@ -3467,7 +3456,7 @@ build_v2_ehtype_template (void)
static tree
objc2_build_ehtype_initializer (tree name, tree cls)
{
- VEC(constructor_elt,gc) *initlist = NULL;
+ vec<constructor_elt, va_gc> *initlist = NULL;
tree addr, offs;
/* This is done the same way as c++, missing the two first entries
@@ -3555,10 +3544,10 @@ static void build_v2_eh_catch_objects (void)
int count=0;
ident_data_tuple *ref;
- if (!ehtype_list || !VEC_length (ident_data_tuple, ehtype_list))
+ if (!vec_safe_length (ehtype_list))
return;
- FOR_EACH_VEC_ELT (ident_data_tuple, ehtype_list, count, ref)
+ FOR_EACH_VEC_ELT (*ehtype_list, count, ref)
{
char buf[BUFSIZE];
bool impl = is_implemented (ref->ident);
@@ -3579,10 +3568,10 @@ lookup_ehtype_ref (tree id)
int count=0;
ident_data_tuple *ref;
- if (!ehtype_list || !VEC_length (ident_data_tuple, ehtype_list))
+ if (!vec_safe_length (ehtype_list))
return NULL_TREE;
- FOR_EACH_VEC_ELT (ident_data_tuple, ehtype_list, count, ref)
+ FOR_EACH_VEC_ELT (*ehtype_list, count, ref)
if (ref->ident == id)
return ref->data;
return NULL_TREE;
@@ -3662,10 +3651,11 @@ build_throw_stmt (location_t loc, tree throw_expr, bool rethrown)
else
{
/* Throw like the others... */
- VEC(tree, gc) *parms = VEC_alloc (tree, gc, 1);
- VEC_quick_push (tree, parms, throw_expr);
- t = build_function_call_vec (loc, objc_exception_throw_decl, parms, NULL);
- VEC_free (tree, gc, parms);
+ vec<tree, va_gc> *parms;
+ vec_alloc (parms, 1);
+ parms->quick_push (throw_expr);
+ t = build_function_call_vec (loc, objc_exception_throw_decl, parms, 0);
+ vec_free (parms);
}
return add_stmt (t);
}
diff --git a/gcc/objc/objc-runtime-hooks.h b/gcc/objc/objc-runtime-hooks.h
index 9145681bfa9..8f307cfb78e 100644
--- a/gcc/objc/objc-runtime-hooks.h
+++ b/gcc/objc/objc-runtime-hooks.h
@@ -75,7 +75,7 @@ typedef struct _objc_runtime_hooks_r
/* Receiver is class Object, check runtime-specific. */
tree (*receiver_is_class_object) (tree);
/* Get the start of a method argument type list (receiver, _cmd). */
- void (*get_arg_type_list_base) (VEC(tree,gc) **, tree, int, int);
+ void (*get_arg_type_list_base) (vec<tree, va_gc> **, tree, int, int);
/* Build method call. */
tree (*build_objc_method_call) (location_t, tree, tree, tree, tree, tree, int);
diff --git a/gcc/objc/objc-runtime-shared-support.c b/gcc/objc/objc-runtime-shared-support.c
index 0b6f93716b8..594b66bb1fc 100644
--- a/gcc/objc/objc-runtime-shared-support.c
+++ b/gcc/objc/objc-runtime-shared-support.c
@@ -346,11 +346,11 @@ add_objc_string (tree ident, string_section section)
tree
build_descriptor_table_initializer (tree type, tree entries)
{
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
do
{
- VEC(constructor_elt,gc) *elts = NULL;
+ vec<constructor_elt, va_gc> *elts = NULL;
CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE,
build_selector (METHOD_SEL_NAME (entries)));
@@ -371,11 +371,11 @@ build_descriptor_table_initializer (tree type, tree entries)
tree
build_dispatch_table_initializer (tree type, tree entries)
{
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
do
{
- VEC(constructor_elt,gc) *elems = NULL;
+ vec<constructor_elt, va_gc> *elems = NULL;
tree expr;
CONSTRUCTOR_APPEND_ELT (elems, NULL_TREE,
@@ -433,7 +433,7 @@ init_module_descriptor (tree type, long vers)
{
tree expr, ltyp;
location_t loc;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
/* No really useful place to point to. */
loc = UNKNOWN_LOCATION;
@@ -528,11 +528,11 @@ build_module_descriptor (long vers, tree attr)
tree
build_ivar_list_initializer (tree type, tree field_decl)
{
- VEC(constructor_elt,gc) *inits = NULL;
+ vec<constructor_elt, va_gc> *inits = NULL;
do
{
- VEC(constructor_elt,gc) *ivar = NULL;
+ vec<constructor_elt, va_gc> *ivar = NULL;
tree id;
/* Set name. */
diff --git a/gcc/objc/objc-runtime-shared-support.h b/gcc/objc/objc-runtime-shared-support.h
index 0db8bed1240..e5dc644e366 100644
--- a/gcc/objc/objc-runtime-shared-support.h
+++ b/gcc/objc/objc-runtime-shared-support.h
@@ -29,7 +29,7 @@ extern tree add_field_decl (tree, const char *, tree **);
extern tree build_sized_array_type (tree, int);
extern tree lookup_interface (tree);
-extern tree objc_build_constructor (tree, VEC(constructor_elt,gc) *);
+extern tree objc_build_constructor (tree, vec<constructor_elt, va_gc> *);
extern tree start_var_decl (tree, const char *);
extern void finish_var_decl (tree, tree);
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index a59841384ca..54997072df4 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -551,12 +551,12 @@ workshare_safe_to_combine_p (basic_block ws_entry_bb)
parallel+workshare call. WS_STMT is the workshare directive being
expanded. */
-static VEC(tree,gc) *
+static vec<tree, va_gc> *
get_ws_args_for (gimple ws_stmt)
{
tree t;
location_t loc = gimple_location (ws_stmt);
- VEC(tree,gc) *ws_args;
+ vec<tree, va_gc> *ws_args;
if (gimple_code (ws_stmt) == GIMPLE_OMP_FOR)
{
@@ -564,21 +564,21 @@ get_ws_args_for (gimple ws_stmt)
extract_omp_for_data (ws_stmt, &fd, NULL);
- ws_args = VEC_alloc (tree, gc, 3 + (fd.chunk_size != 0));
+ vec_alloc (ws_args, 3 + (fd.chunk_size != 0));
t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n1);
- VEC_quick_push (tree, ws_args, t);
+ ws_args->quick_push (t);
t = fold_convert_loc (loc, long_integer_type_node, fd.loop.n2);
- VEC_quick_push (tree, ws_args, t);
+ ws_args->quick_push (t);
t = fold_convert_loc (loc, long_integer_type_node, fd.loop.step);
- VEC_quick_push (tree, ws_args, t);
+ ws_args->quick_push (t);
if (fd.chunk_size)
{
t = fold_convert_loc (loc, long_integer_type_node, fd.chunk_size);
- VEC_quick_push (tree, ws_args, t);
+ ws_args->quick_push (t);
}
return ws_args;
@@ -590,8 +590,8 @@ get_ws_args_for (gimple ws_stmt)
the exit of the sections region. */
basic_block bb = single_succ (gimple_bb (ws_stmt));
t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
- ws_args = VEC_alloc (tree, gc, 1);
- VEC_quick_push (tree, ws_args, t);
+ vec_alloc (ws_args, 1);
+ ws_args->quick_push (t);
return ws_args;
}
@@ -2937,7 +2937,7 @@ gimple_build_cond_empty (tree cond)
static void
expand_parallel_call (struct omp_region *region, basic_block bb,
- gimple entry_stmt, VEC(tree,gc) *ws_args)
+ gimple entry_stmt, vec<tree, va_gc> *ws_args)
{
tree t, t1, t2, val, cond, c, clauses;
gimple_stmt_iterator gsi;
@@ -2945,7 +2945,7 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
enum built_in_function start_ix;
int start_ix2;
location_t clause_loc;
- VEC(tree,gc) *args;
+ vec<tree, va_gc> *args;
clauses = gimple_omp_parallel_clauses (entry_stmt);
@@ -3076,11 +3076,12 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
t1 = build_fold_addr_expr (t);
t2 = build_fold_addr_expr (gimple_omp_parallel_child_fn (entry_stmt));
- args = VEC_alloc (tree, gc, 3 + VEC_length (tree, ws_args));
- VEC_quick_push (tree, args, t2);
- VEC_quick_push (tree, args, t1);
- VEC_quick_push (tree, args, val);
- VEC_splice (tree, args, ws_args);
+ vec_alloc (args, 3 + vec_safe_length (ws_args));
+ args->quick_push (t2);
+ args->quick_push (t1);
+ args->quick_push (val);
+ if (ws_args)
+ args->splice (*ws_args);
t = build_call_expr_loc_vec (UNKNOWN_LOCATION,
builtin_decl_explicit (start_ix), args);
@@ -3190,12 +3191,12 @@ maybe_catch_exception (gimple_seq body)
/* Chain all the DECLs in LIST by their TREE_CHAIN fields. */
static tree
-vec2chain (VEC(tree,gc) *v)
+vec2chain (vec<tree, va_gc> *v)
{
tree chain = NULL_TREE, t;
unsigned ix;
- FOR_EACH_VEC_ELT_REVERSE (tree, v, ix, t)
+ FOR_EACH_VEC_SAFE_ELT_REVERSE (v, ix, t)
{
DECL_CHAIN (t) = chain;
chain = t;
@@ -3388,7 +3389,7 @@ expand_omp_taskreg (struct omp_region *region)
gimple_stmt_iterator gsi;
gimple entry_stmt, stmt;
edge e;
- VEC(tree,gc) *ws_args;
+ vec<tree, va_gc> *ws_args;
entry_stmt = last_stmt (region->entry);
child_fn = gimple_omp_taskreg_child_fn (entry_stmt);
@@ -3563,18 +3564,18 @@ expand_omp_taskreg (struct omp_region *region)
single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
/* Remove non-local VAR_DECLs from child_cfun->local_decls list. */
- num = VEC_length (tree, child_cfun->local_decls);
+ num = vec_safe_length (child_cfun->local_decls);
for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
{
- t = VEC_index (tree, child_cfun->local_decls, srcidx);
+ t = (*child_cfun->local_decls)[srcidx];
if (DECL_CONTEXT (t) == cfun->decl)
continue;
if (srcidx != dstidx)
- VEC_replace (tree, child_cfun->local_decls, dstidx, t);
+ (*child_cfun->local_decls)[dstidx] = t;
dstidx++;
}
if (dstidx != num)
- VEC_truncate (tree, child_cfun->local_decls, dstidx);
+ vec_safe_truncate (child_cfun->local_decls, dstidx);
/* Inform the callgraph about the new function. */
DECL_STRUCT_FUNCTION (child_fn)->curr_properties
@@ -4561,7 +4562,7 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
gimple_stmt_iterator psi;
gimple phi;
edge re, ene;
- edge_var_map_vector head;
+ edge_var_map_vector *head;
edge_var_map *vm;
size_t i;
@@ -4574,7 +4575,7 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
ene = single_succ_edge (entry_bb);
psi = gsi_start_phis (fin_bb);
- for (i = 0; !gsi_end_p (psi) && VEC_iterate (edge_var_map, head, i, vm);
+ for (i = 0; !gsi_end_p (psi) && head->iterate (i, &vm);
gsi_next (&psi), ++i)
{
gimple nphi;
@@ -4596,7 +4597,7 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
locus = redirect_edge_var_map_location (vm);
add_phi_arg (nphi, redirect_edge_var_map_def (vm), re, locus);
}
- gcc_assert (!gsi_end_p (psi) && i == VEC_length (edge_var_map, head));
+ gcc_assert (!gsi_end_p (psi) && i == head->length ());
redirect_edge_var_map_clear (re);
while (1)
{
@@ -4721,7 +4722,7 @@ static void
expand_omp_sections (struct omp_region *region)
{
tree t, u, vin = NULL, vmain, vnext, l2;
- VEC (tree,heap) *label_vec;
+ vec<tree> label_vec;
unsigned len;
basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
gimple_stmt_iterator si, switch_si;
@@ -4772,9 +4773,9 @@ expand_omp_sections (struct omp_region *region)
and a default case to abort if something goes wrong. */
len = EDGE_COUNT (l0_bb->succs);
- /* Use VEC_quick_push on label_vec throughout, since we know the size
+ /* Use vec::quick_push on label_vec throughout, since we know the size
in advance. */
- label_vec = VEC_alloc (tree, heap, len);
+ label_vec.create (len);
/* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
GIMPLE_OMP_SECTIONS statement. */
@@ -4819,7 +4820,7 @@ expand_omp_sections (struct omp_region *region)
}
t = build_case_label (build_int_cst (unsigned_type_node, 0), NULL, l2);
- VEC_quick_push (tree, label_vec, t);
+ label_vec.quick_push (t);
i = 1;
/* Convert each GIMPLE_OMP_SECTION into a CASE_LABEL_EXPR. */
@@ -4843,7 +4844,7 @@ expand_omp_sections (struct omp_region *region)
t = gimple_block_label (s_entry_bb);
u = build_int_cst (unsigned_type_node, casei);
u = build_case_label (u, NULL, t);
- VEC_quick_push (tree, label_vec, u);
+ label_vec.quick_push (u);
si = gsi_last_bb (s_entry_bb);
gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_SECTION);
@@ -4869,7 +4870,7 @@ expand_omp_sections (struct omp_region *region)
stmt = gimple_build_switch (vmain, u, label_vec);
gsi_insert_after (&switch_si, stmt, GSI_SAME_STMT);
gsi_remove (&switch_si, true);
- VEC_free (tree, heap, label_vec);
+ label_vec.release ();
si = gsi_start_bb (default_bb);
stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_TRAP), 0);
diff --git a/gcc/opts-common.c b/gcc/opts-common.c
index e024537fa0f..356d941761e 100644
--- a/gcc/opts-common.c
+++ b/gcc/opts-common.c
@@ -1142,13 +1142,15 @@ set_option (struct gcc_options *opts, struct gcc_options *opts_set,
case CLVC_DEFER:
{
- VEC(cl_deferred_option,heap) *vec
- = (VEC(cl_deferred_option,heap) *) *(void **) flag_var;
+ vec<cl_deferred_option> *v
+ = (vec<cl_deferred_option> *) *(void **) flag_var;
cl_deferred_option p = {opt_index, arg, value};
- VEC_safe_push (cl_deferred_option, heap, vec, p);
- *(void **) flag_var = vec;
+ if (!v)
+ v = XCNEW (vec<cl_deferred_option>);
+ v->safe_push (p);
+ *(void **) flag_var = v;
if (set_flag_var)
- *(void **) set_flag_var = vec;
+ *(void **) set_flag_var = v;
}
break;
}
diff --git a/gcc/opts-global.c b/gcc/opts-global.c
index 5ebc6309e6b..38cd62bc361 100644
--- a/gcc/opts-global.c
+++ b/gcc/opts-global.c
@@ -39,10 +39,8 @@ along with GCC; see the file COPYING3. If not see
#include "tree-pass.h"
typedef const char *const_char_p; /* For DEF_VEC_P. */
-DEF_VEC_P(const_char_p);
-DEF_VEC_ALLOC_P(const_char_p,heap);
-static VEC(const_char_p,heap) *ignored_options;
+static vec<const_char_p> ignored_options;
/* Input file names. */
const char **in_fnames;
@@ -122,7 +120,7 @@ complain_wrong_lang (const struct cl_decoded_option *decoded,
static void
postpone_unknown_option_warning (const char *opt)
{
- VEC_safe_push (const_char_p, heap, ignored_options, opt);
+ ignored_options.safe_push (opt);
}
/* Produce a warning for each option previously buffered. */
@@ -130,11 +128,11 @@ postpone_unknown_option_warning (const char *opt)
void
print_ignored_options (void)
{
- while (!VEC_empty (const_char_p, ignored_options))
+ while (!ignored_options.is_empty ())
{
const char *opt;
- opt = VEC_pop (const_char_p, ignored_options);
+ opt = ignored_options.pop ();
warning_at (UNKNOWN_LOCATION, 0,
"unrecognized command line option \"%s\"", opt);
}
@@ -350,8 +348,12 @@ handle_common_deferred_options (void)
{
unsigned int i;
cl_deferred_option *opt;
- VEC(cl_deferred_option,heap) *vec
- = (VEC(cl_deferred_option,heap) *) common_deferred_options;
+ vec<cl_deferred_option> v;
+
+ if (common_deferred_options)
+ v = *((vec<cl_deferred_option> *) common_deferred_options);
+ else
+ v = vec<cl_deferred_option>();
if (flag_dump_all_passed)
enable_rtl_dump_file ();
@@ -359,7 +361,7 @@ handle_common_deferred_options (void)
if (flag_opt_info)
opt_info_switch_p (NULL);
- FOR_EACH_VEC_ELT (cl_deferred_option, vec, i, opt)
+ FOR_EACH_VEC_ELT (v, i, opt)
{
switch (opt->opt_index)
{
diff --git a/gcc/opts.c b/gcc/opts.c
index 34c5698ba30..26a0bb878c5 100644
--- a/gcc/opts.c
+++ b/gcc/opts.c
@@ -190,8 +190,6 @@ base_of_path (const char *path, const char **base_out)
static const char undocumented_msg[] = N_("This switch lacks documentation");
typedef char *char_p; /* For DEF_VEC_P. */
-DEF_VEC_P(char_p);
-DEF_VEC_ALLOC_P(char_p,heap);
static void handle_param (struct gcc_options *opts,
struct gcc_options *opts_set, location_t loc,
@@ -239,7 +237,9 @@ add_comma_separated_to_vector (void **pvec, const char *arg)
char *r;
char *w;
char *token_start;
- VEC(char_p,heap) *vec = (VEC(char_p,heap) *) *pvec;
+ vec<char_p> *v = (vec<char_p> *) *pvec;
+
+ vec_check_alloc (v, 1);
/* We never free this string. */
tmp = xstrdup (arg);
@@ -254,7 +254,7 @@ add_comma_separated_to_vector (void **pvec, const char *arg)
{
*w++ = '\0';
++r;
- VEC_safe_push (char_p, heap, vec, token_start);
+ v->safe_push (token_start);
token_start = w;
}
if (*r == '\\' && r[1] == ',')
@@ -266,9 +266,9 @@ add_comma_separated_to_vector (void **pvec, const char *arg)
*w++ = *r++;
}
if (*token_start != '\0')
- VEC_safe_push (char_p, heap, vec, token_start);
+ v->safe_push (token_start);
- *pvec = vec;
+ *pvec = v;
}
/* Initialize OPTS and OPTS_SET before using them in parsing options. */
diff --git a/gcc/opts.h b/gcc/opts.h
index a9336af74bc..b6a43612935 100644
--- a/gcc/opts.h
+++ b/gcc/opts.h
@@ -257,8 +257,6 @@ typedef struct
const char *arg;
int value;
} cl_deferred_option;
-DEF_VEC_O(cl_deferred_option);
-DEF_VEC_ALLOC_O(cl_deferred_option,heap);
/* Structure describing a single option-handling callback. */
diff --git a/gcc/passes.c b/gcc/passes.c
index d4115b38ae7..7e224fb7a4c 100644
--- a/gcc/passes.c
+++ b/gcc/passes.c
@@ -633,9 +633,7 @@ register_pass_name (struct opt_pass *pass, const char *name)
/* Map from pass id to canonicalized pass name. */
typedef const char *char_ptr;
-DEF_VEC_P(char_ptr);
-DEF_VEC_ALLOC_P(char_ptr, heap);
-static VEC(char_ptr, heap) *pass_tab = NULL;
+static vec<char_ptr> pass_tab = vec<char_ptr>();
/* Callback function for traversing NAME_TO_PASS_MAP. */
@@ -646,10 +644,9 @@ pass_traverse (void **slot, void *data ATTRIBUTE_UNUSED)
struct opt_pass *pass = (*p)->pass;
gcc_assert (pass->static_pass_number > 0);
- gcc_assert (pass_tab);
+ gcc_assert (pass_tab.exists ());
- VEC_replace (char_ptr, pass_tab, pass->static_pass_number,
- (*p)->unique_name);
+ pass_tab[pass->static_pass_number] = (*p)->unique_name;
return 1;
}
@@ -663,8 +660,7 @@ create_pass_tab (void)
if (!flag_dump_passes)
return;
- VEC_safe_grow_cleared (char_ptr, heap,
- pass_tab, passes_by_id_size + 1);
+ pass_tab.safe_grow_cleared (passes_by_id_size + 1);
htab_traverse (name_to_pass_map, pass_traverse, NULL);
}
@@ -686,7 +682,7 @@ dump_one_pass (struct opt_pass *pass, int pass_indent)
if (pass->static_pass_number <= 0)
pn = pass->name;
else
- pn = VEC_index (char_ptr, pass_tab, pass->static_pass_number);
+ pn = pass_tab[pass->static_pass_number];
fprintf (stderr, "%*s%-40s%*s:%s%s\n", indent, " ", pn,
(15 - indent < 0 ? 0 : 15 - indent), " ",
@@ -772,11 +768,11 @@ struct uid_range
typedef struct uid_range *uid_range_p;
-DEF_VEC_P(uid_range_p);
-DEF_VEC_ALLOC_P(uid_range_p, heap);
-static VEC(uid_range_p, heap) *enabled_pass_uid_range_tab = NULL;
-static VEC(uid_range_p, heap) *disabled_pass_uid_range_tab = NULL;
+static vec<uid_range_p>
+ enabled_pass_uid_range_tab = vec<uid_range_p>();
+static vec<uid_range_p>
+ disabled_pass_uid_range_tab = vec<uid_range_p>();
/* Parse option string for -fdisable- and -fenable-
@@ -795,7 +791,7 @@ enable_disable_pass (const char *arg, bool is_enable)
struct opt_pass *pass;
char *range_str, *phase_name;
char *argstr = xstrdup (arg);
- VEC(uid_range_p, heap) **tab = 0;
+ vec<uid_range_p> *tab = 0;
range_str = strchr (argstr,'=');
if (range_str)
@@ -830,9 +826,8 @@ enable_disable_pass (const char *arg, bool is_enable)
else
tab = &disabled_pass_uid_range_tab;
- if ((unsigned) pass->static_pass_number >= VEC_length (uid_range_p, *tab))
- VEC_safe_grow_cleared (uid_range_p, heap,
- *tab, pass->static_pass_number + 1);
+ if ((unsigned) pass->static_pass_number >= tab->length ())
+ tab->safe_grow_cleared (pass->static_pass_number + 1);
if (!range_str)
{
@@ -842,10 +837,9 @@ enable_disable_pass (const char *arg, bool is_enable)
new_range->start = 0;
new_range->last = (unsigned)-1;
- slot = VEC_index (uid_range_p, *tab, pass->static_pass_number);
+ slot = (*tab)[pass->static_pass_number];
new_range->next = slot;
- VEC_replace (uid_range_p, *tab, pass->static_pass_number,
- new_range);
+ (*tab)[pass->static_pass_number] = new_range;
if (is_enable)
inform (UNKNOWN_LOCATION, "enable pass %s for functions in the range "
"of [%u, %u]", phase_name, new_range->start, new_range->last);
@@ -925,10 +919,9 @@ enable_disable_pass (const char *arg, bool is_enable)
new_range->last = (unsigned) last;
}
- slot = VEC_index (uid_range_p, *tab, pass->static_pass_number);
+ slot = (*tab)[pass->static_pass_number];
new_range->next = slot;
- VEC_replace (uid_range_p, *tab, pass->static_pass_number,
- new_range);
+ (*tab)[pass->static_pass_number] = new_range;
if (is_enable)
{
if (new_range->assem_name)
@@ -980,18 +973,18 @@ disable_pass (const char *arg)
static bool
is_pass_explicitly_enabled_or_disabled (struct opt_pass *pass,
tree func,
- VEC(uid_range_p, heap) *tab)
+ vec<uid_range_p> tab)
{
uid_range_p slot, range;
int cgraph_uid;
const char *aname = NULL;
- if (!tab
- || (unsigned) pass->static_pass_number >= VEC_length (uid_range_p, tab)
+ if (!tab.exists ()
+ || (unsigned) pass->static_pass_number >= tab.length ()
|| pass->static_pass_number == -1)
return false;
- slot = VEC_index (uid_range_p, tab, pass->static_pass_number);
+ slot = tab[pass->static_pass_number];
if (!slot)
return false;
@@ -2203,18 +2196,13 @@ execute_all_ipa_transforms (void)
return;
node = cgraph_get_node (current_function_decl);
- if (node->ipa_transforms_to_apply)
+ if (node->ipa_transforms_to_apply.exists ())
{
unsigned int i;
- for (i = 0; i < VEC_length (ipa_opt_pass, node->ipa_transforms_to_apply);
- i++)
- execute_one_ipa_transform_pass (node,
- VEC_index (ipa_opt_pass,
- node->ipa_transforms_to_apply,
- i));
- VEC_free (ipa_opt_pass, heap, node->ipa_transforms_to_apply);
- node->ipa_transforms_to_apply = NULL;
+ for (i = 0; i < node->ipa_transforms_to_apply.length (); i++)
+ execute_one_ipa_transform_pass (node, node->ipa_transforms_to_apply[i]);
+ node->ipa_transforms_to_apply.release ();
}
}
@@ -2224,7 +2212,7 @@ static void
apply_ipa_transforms (void *data)
{
struct cgraph_node *node = cgraph_get_node (current_function_decl);
- if (!node->global.inlined_to && node->ipa_transforms_to_apply)
+ if (!node->global.inlined_to && node->ipa_transforms_to_apply.exists ())
{
*(bool *)data = true;
execute_all_ipa_transforms();
@@ -2372,8 +2360,7 @@ execute_one_pass (struct opt_pass *pass)
{
struct cgraph_node *node;
FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
- VEC_safe_push (ipa_opt_pass, heap, node->ipa_transforms_to_apply,
- (struct ipa_opt_pass_d *)pass);
+ node->ipa_transforms_to_apply.safe_push ((struct ipa_opt_pass_d *)pass);
}
if (!current_function_decl)
diff --git a/gcc/predict.c b/gcc/predict.c
index 8b9d62b286b..aceca1dc3c9 100644
--- a/gcc/predict.c
+++ b/gcc/predict.c
@@ -1391,7 +1391,7 @@ predict_loops (void)
{
basic_block bb, *bbs;
unsigned j, n_exits;
- VEC (edge, heap) *exits;
+ vec<edge> exits;
struct tree_niter_desc niter_desc;
edge ex;
struct nb_iter_bound *nb_iter;
@@ -1402,14 +1402,14 @@ predict_loops (void)
gimple stmt = NULL;
exits = get_loop_exit_edges (loop);
- n_exits = VEC_length (edge, exits);
+ n_exits = exits.length ();
if (!n_exits)
{
- VEC_free (edge, heap, exits);
+ exits.release ();
continue;
}
- FOR_EACH_VEC_ELT (edge, exits, j, ex)
+ FOR_EACH_VEC_ELT (exits, j, ex)
{
tree niter = NULL;
HOST_WIDE_INT nitercst;
@@ -1452,7 +1452,7 @@ predict_loops (void)
probability = ((REG_BR_PROB_BASE + nitercst / 2) / nitercst);
predict_edge (ex, predictor, probability);
}
- VEC_free (edge, heap, exits);
+ exits.release ();
/* Find information about loop bound variables. */
for (nb_iter = loop->bounds; nb_iter;
diff --git a/gcc/print-tree.c b/gcc/print-tree.c
index 27fb72f8eb4..b12d1f38637 100644
--- a/gcc/print-tree.c
+++ b/gcc/print-tree.c
@@ -65,7 +65,7 @@ debug_tree (tree node)
down to a depth of six. */
DEBUG_FUNCTION void
-debug_vec_tree (VEC(tree,gc) *vec)
+debug_vec_tree (vec<tree, va_gc> *vec)
{
table = XCNEWVEC (struct bucket *, HASH_SIZE);
print_vec_tree (stderr, "", vec, 0);
@@ -880,7 +880,7 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
{
unsigned HOST_WIDE_INT cnt;
tree index, value;
- len = VEC_length (constructor_elt, CONSTRUCTOR_ELTS (node));
+ len = vec_safe_length (CONSTRUCTOR_ELTS (node));
fprintf (file, " lngt %d", len);
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (node),
cnt, index, value)
@@ -994,7 +994,7 @@ print_node (FILE *file, const char *prefix, tree node, int indent)
starting in column INDENT. */
void
-print_vec_tree (FILE *file, const char *prefix, VEC(tree,gc) *vec, int indent)
+print_vec_tree (FILE *file, const char *prefix, vec<tree, va_gc> *vec, int indent)
{
tree elt;
unsigned ix;
@@ -1004,9 +1004,9 @@ print_vec_tree (FILE *file, const char *prefix, VEC(tree,gc) *vec, int indent)
/* Print the slot this node is in, and its code, and address. */
fprintf (file, "%s <VEC", prefix);
- dump_addr (file, " ", vec);
+ dump_addr (file, " ", vec->address ());
- FOR_EACH_VEC_ELT (tree, vec, ix, elt)
+ FOR_EACH_VEC_ELT (*vec, ix, elt)
{
char temp[10];
sprintf (temp, "elt %d", ix);
diff --git a/gcc/profile.c b/gcc/profile.c
index 6d4a8d8beaf..194ef91f24f 100644
--- a/gcc/profile.c
+++ b/gcc/profile.c
@@ -156,9 +156,9 @@ instrument_values (histogram_values values)
/* Emit code to generate the histograms before the insns. */
- for (i = 0; i < VEC_length (histogram_value, values); i++)
+ for (i = 0; i < values.length (); i++)
{
- histogram_value hist = VEC_index (histogram_value, values, i);
+ histogram_value hist = values[i];
unsigned t = COUNTER_FOR_HIST_TYPE (hist->type);
if (!coverage_counter_alloc (t, hist->n_counters))
@@ -385,7 +385,7 @@ get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum)
static bool
-is_edge_inconsistent (VEC(edge,gc) *edges)
+is_edge_inconsistent (vec<edge, va_gc> *edges)
{
edge e;
edge_iterator ei;
@@ -950,9 +950,9 @@ compute_value_histograms (histogram_values values, unsigned cfg_checksum,
for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
n_histogram_counters[t] = 0;
- for (i = 0; i < VEC_length (histogram_value, values); i++)
+ for (i = 0; i < values.length (); i++)
{
- histogram_value hist = VEC_index (histogram_value, values, i);
+ histogram_value hist = values[i];
n_histogram_counters[(int) hist->type] += hist->n_counters;
}
@@ -976,9 +976,9 @@ compute_value_histograms (histogram_values values, unsigned cfg_checksum,
if (!any)
return;
- for (i = 0; i < VEC_length (histogram_value, values); i++)
+ for (i = 0; i < values.length (); i++)
{
- histogram_value hist = VEC_index (histogram_value, values, i);
+ histogram_value hist = values[i];
gimple stmt = hist->hvalue.stmt;
t = (int) hist->type;
@@ -1069,7 +1069,7 @@ branch_prob (void)
unsigned num_edges, ignored_edges;
unsigned num_instrumented;
struct edge_list *el;
- histogram_values values = NULL;
+ histogram_values values = histogram_values();
unsigned cfg_checksum, lineno_checksum;
total_num_times_called++;
@@ -1396,7 +1396,7 @@ branch_prob (void)
free_aux_for_edges ();
- VEC_free (histogram_value, heap, values);
+ values.release ();
free_edge_list (el);
coverage_end_function (lineno_checksum, cfg_checksum);
}
diff --git a/gcc/profile.h b/gcc/profile.h
index a77d3c51e1d..c97d8efee54 100644
--- a/gcc/profile.h
+++ b/gcc/profile.h
@@ -42,7 +42,7 @@ struct edge_info
a minimum cost flow algorithm. */
extern void mcf_smooth_cfg (void);
-extern gcov_type sum_edge_counts (VEC (edge, gc) *edges);
+extern gcov_type sum_edge_counts (vec<edge, va_gc> *edges);
extern void init_node_map (void);
extern void del_node_map (void);
diff --git a/gcc/read-rtl.c b/gcc/read-rtl.c
index 30c2fb69484..027ad91acc8 100644
--- a/gcc/read-rtl.c
+++ b/gcc/read-rtl.c
@@ -59,8 +59,6 @@ struct mapping {
/* Vector definitions for the above. */
typedef struct mapping *mapping_ptr;
-DEF_VEC_P (mapping_ptr);
-DEF_VEC_ALLOC_P (mapping_ptr, heap);
/* A structure for abstracting the common parts of iterators. */
struct iterator_group {
@@ -87,8 +85,6 @@ struct iterator_use {
/* Vector definitions for the above. */
typedef struct iterator_use iterator_use;
-DEF_VEC_O (iterator_use);
-DEF_VEC_ALLOC_O (iterator_use, heap);
/* Records one use of an attribute (the "<[iterator:]attribute>" syntax)
in a non-string rtx field. */
@@ -105,8 +101,6 @@ struct attribute_use {
/* Vector definitions for the above. */
typedef struct attribute_use attribute_use;
-DEF_VEC_O (attribute_use);
-DEF_VEC_ALLOC_O (attribute_use, heap);
static void validate_const_int (const char *);
static rtx read_rtx_code (const char *);
@@ -117,13 +111,13 @@ static rtx read_rtx_variadic (rtx);
static struct iterator_group modes, codes, ints;
/* All iterators used in the current rtx. */
-static VEC (mapping_ptr, heap) *current_iterators;
+static vec<mapping_ptr> current_iterators;
/* The list of all iterator uses in the current rtx. */
-static VEC (iterator_use, heap) *iterator_uses;
+static vec<iterator_use> iterator_uses;
/* The list of all attribute uses in the current rtx. */
-static VEC (attribute_use, heap) *attribute_uses;
+static vec<attribute_use> attribute_uses;
/* Implementations of the iterator_group callbacks for modes. */
@@ -211,7 +205,7 @@ map_attr_string (const char *p)
attr++;
}
- FOR_EACH_VEC_ELT (mapping_ptr, current_iterators, i, iterator)
+ FOR_EACH_VEC_ELT (current_iterators, i, iterator)
{
/* If an iterator name was specified, check that it matches. */
if (iterator_name_len >= 0
@@ -372,7 +366,7 @@ apply_attribute_uses (void)
attribute_use *ause;
unsigned int i;
- FOR_EACH_VEC_ELT (attribute_use, attribute_uses, i, ause)
+ FOR_EACH_VEC_ELT (attribute_uses, i, ause)
{
v = map_attr_string (ause->value);
if (!v)
@@ -392,7 +386,7 @@ add_current_iterators (void **slot, void *data ATTRIBUTE_UNUSED)
iterator = (struct mapping *) *slot;
if (iterator->current_value)
- VEC_safe_push (mapping_ptr, heap, current_iterators, iterator);
+ current_iterators.safe_push (iterator);
return 1;
}
@@ -409,7 +403,7 @@ apply_iterators (rtx original, rtx *queue)
struct map_value *v;
rtx x;
- if (VEC_empty (iterator_use, iterator_uses))
+ if (iterator_uses.is_empty ())
{
/* Raise an error if any attributes were used. */
apply_attribute_uses ();
@@ -419,12 +413,12 @@ apply_iterators (rtx original, rtx *queue)
}
/* Clear out the iterators from the previous run. */
- FOR_EACH_VEC_ELT (mapping_ptr, current_iterators, i, iterator)
+ FOR_EACH_VEC_ELT (current_iterators, i, iterator)
iterator->current_value = NULL;
- VEC_truncate (mapping_ptr, current_iterators, 0);
+ current_iterators.truncate (0);
/* Mark the iterators that we need this time. */
- FOR_EACH_VEC_ELT (iterator_use, iterator_uses, i, iuse)
+ FOR_EACH_VEC_ELT (iterator_uses, i, iuse)
iuse->iterator->current_value = iuse->iterator->values;
/* Get the list of iterators that are in use, preserving the
@@ -432,14 +426,14 @@ apply_iterators (rtx original, rtx *queue)
htab_traverse (modes.iterators, add_current_iterators, NULL);
htab_traverse (codes.iterators, add_current_iterators, NULL);
htab_traverse (ints.iterators, add_current_iterators, NULL);
- gcc_assert (!VEC_empty (mapping_ptr, current_iterators));
+ gcc_assert (!current_iterators.is_empty ());
for (;;)
{
/* Apply the current iterator values. Accumulate a condition to
say when the resulting rtx can be used. */
condition = NULL;
- FOR_EACH_VEC_ELT (iterator_use, iterator_uses, i, iuse)
+ FOR_EACH_VEC_ELT (iterator_uses, i, iuse)
{
v = iuse->iterator->current_value;
iuse->iterator->group->apply_iterator (iuse->ptr, v->number);
@@ -456,13 +450,13 @@ apply_iterators (rtx original, rtx *queue)
/* Lexicographically increment the iterator value sequence.
That is, cycle through iterator values, starting from the right,
and stopping when one of them doesn't wrap around. */
- i = VEC_length (mapping_ptr, current_iterators);
+ i = current_iterators.length ();
for (;;)
{
if (i == 0)
return;
i--;
- iterator = VEC_index (mapping_ptr, current_iterators, i);
+ iterator = current_iterators[i];
iterator->current_value = iterator->current_value->next;
if (iterator->current_value)
break;
@@ -685,7 +679,7 @@ static void
record_iterator_use (struct mapping *iterator, void *ptr)
{
struct iterator_use iuse = {iterator, ptr};
- VEC_safe_push (iterator_use, heap, iterator_uses, iuse);
+ iterator_uses.safe_push (iuse);
}
/* Record that PTR uses attribute VALUE, which must match a built-in
@@ -696,7 +690,7 @@ record_attribute_use (struct iterator_group *group, void *ptr,
const char *value)
{
struct attribute_use ause = {group, value, ptr};
- VEC_safe_push (attribute_use, heap, attribute_uses, ause);
+ attribute_uses.safe_push (ause);
}
/* Interpret NAME as either a built-in value, iterator or attribute
@@ -858,8 +852,8 @@ read_rtx (const char *rtx_name, rtx *x)
}
apply_iterators (read_rtx_code (rtx_name), &queue_head);
- VEC_truncate (iterator_use, iterator_uses, 0);
- VEC_truncate (attribute_use, attribute_uses, 0);
+ iterator_uses.truncate (0);
+ attribute_uses.truncate (0);
*x = queue_head;
return true;
diff --git a/gcc/ree.c b/gcc/ree.c
index 15667524000..0279b3d3d61 100644
--- a/gcc/ree.c
+++ b/gcc/ree.c
@@ -258,8 +258,6 @@ typedef struct GTY(()) ext_cand
rtx insn;
} ext_cand;
-DEF_VEC_O(ext_cand);
-DEF_VEC_ALLOC_O(ext_cand, heap);
static int max_insn_uid;
@@ -407,7 +405,7 @@ transform_ifelse (ext_cand *cand, rtx def_insn)
of the definitions onto DEST. */
static struct df_link *
-get_defs (rtx insn, rtx reg, VEC (rtx,heap) **dest)
+get_defs (rtx insn, rtx reg, vec<rtx> *dest)
{
df_ref reg_info, *uses;
struct df_link *ref_chain, *ref_link;
@@ -438,7 +436,7 @@ get_defs (rtx insn, rtx reg, VEC (rtx,heap) **dest)
if (dest)
for (ref_link = ref_chain; ref_link; ref_link = ref_link->next)
- VEC_safe_push (rtx, heap, *dest, DF_REF_INSN (ref_link->ref));
+ dest->safe_push (DF_REF_INSN (ref_link->ref));
return ref_chain;
}
@@ -492,13 +490,13 @@ struct ext_modified
/* Vectors used by combine_reaching_defs and its helpers. */
typedef struct ext_state
{
- /* In order to avoid constant VEC_alloc/VEC_free, we keep these
+ /* In order to avoid constant alloc/free, we keep these
4 vectors live through the entire find_and_remove_re and just
- VEC_truncate them each time. */
- VEC (rtx, heap) *defs_list;
- VEC (rtx, heap) *copies_list;
- VEC (rtx, heap) *modified_list;
- VEC (rtx, heap) *work_list;
+ truncate them each time. */
+ vec<rtx> defs_list;
+ vec<rtx> copies_list;
+ vec<rtx> modified_list;
+ vec<rtx> work_list;
/* For instructions that have been successfully modified, this is
the original mode from which the insn is extending and
@@ -526,7 +524,7 @@ make_defs_and_copies_lists (rtx extend_insn, const_rtx set_pat,
bool *is_insn_visited;
bool ret = true;
- VEC_truncate (rtx, state->work_list, 0);
+ state->work_list.truncate (0);
/* Initialize the work list. */
if (!get_defs (extend_insn, src_reg, &state->work_list))
@@ -535,9 +533,9 @@ make_defs_and_copies_lists (rtx extend_insn, const_rtx set_pat,
is_insn_visited = XCNEWVEC (bool, max_insn_uid);
/* Perform transitive closure for conditional copies. */
- while (!VEC_empty (rtx, state->work_list))
+ while (!state->work_list.is_empty ())
{
- rtx def_insn = VEC_pop (rtx, state->work_list);
+ rtx def_insn = state->work_list.pop ();
rtx reg1, reg2;
gcc_assert (INSN_UID (def_insn) < max_insn_uid);
@@ -549,7 +547,7 @@ make_defs_and_copies_lists (rtx extend_insn, const_rtx set_pat,
if (is_cond_copy_insn (def_insn, &reg1, &reg2))
{
/* Push it onto the copy list first. */
- VEC_safe_push (rtx, heap, state->copies_list, def_insn);
+ state->copies_list.safe_push (def_insn);
/* Now perform the transitive closure. */
if (!get_defs (def_insn, reg1, &state->work_list)
@@ -560,7 +558,7 @@ make_defs_and_copies_lists (rtx extend_insn, const_rtx set_pat,
}
}
else
- VEC_safe_push (rtx, heap, state->defs_list, def_insn);
+ state->defs_list.safe_push (def_insn);
}
XDELETEVEC (is_insn_visited);
@@ -655,8 +653,8 @@ combine_reaching_defs (ext_cand *cand, const_rtx set_pat, ext_state *state)
int defs_ix;
bool outcome;
- VEC_truncate (rtx, state->defs_list, 0);
- VEC_truncate (rtx, state->copies_list, 0);
+ state->defs_list.truncate (0);
+ state->copies_list.truncate (0);
outcome = make_defs_and_copies_lists (cand->insn, set_pat, state);
@@ -685,11 +683,11 @@ combine_reaching_defs (ext_cand *cand, const_rtx set_pat, ext_state *state)
/* Go through the defs vector and try to merge all the definitions
in this vector. */
- VEC_truncate (rtx, state->modified_list, 0);
- FOR_EACH_VEC_ELT (rtx, state->defs_list, defs_ix, def_insn)
+ state->modified_list.truncate (0);
+ FOR_EACH_VEC_ELT (state->defs_list, defs_ix, def_insn)
{
if (merge_def_and_ext (cand, def_insn, state))
- VEC_safe_push (rtx, heap, state->modified_list, def_insn);
+ state->modified_list.safe_push (def_insn);
else
{
merge_successful = false;
@@ -701,10 +699,10 @@ combine_reaching_defs (ext_cand *cand, const_rtx set_pat, ext_state *state)
the copies in this vector. */
if (merge_successful)
{
- FOR_EACH_VEC_ELT (rtx, state->copies_list, i, def_insn)
+ FOR_EACH_VEC_ELT (state->copies_list, i, def_insn)
{
if (transform_ifelse (cand, def_insn))
- VEC_safe_push (rtx, heap, state->modified_list, def_insn);
+ state->modified_list.safe_push (def_insn);
else
{
merge_successful = false;
@@ -725,7 +723,7 @@ combine_reaching_defs (ext_cand *cand, const_rtx set_pat, ext_state *state)
if (dump_file)
fprintf (dump_file, "All merges were successful.\n");
- FOR_EACH_VEC_ELT (rtx, state->modified_list, i, def_insn)
+ FOR_EACH_VEC_ELT (state->modified_list, i, def_insn)
if (state->modified[INSN_UID (def_insn)].kind == EXT_MODIFIED_NONE)
state->modified[INSN_UID (def_insn)].kind
= (cand->code == ZERO_EXTEND
@@ -742,7 +740,7 @@ combine_reaching_defs (ext_cand *cand, const_rtx set_pat, ext_state *state)
{
fprintf (dump_file,
"Merge cancelled, non-mergeable definitions:\n");
- FOR_EACH_VEC_ELT (rtx, state->modified_list, i, def_insn)
+ FOR_EACH_VEC_ELT (state->modified_list, i, def_insn)
print_rtl_single (dump_file, def_insn);
}
}
@@ -760,7 +758,7 @@ combine_reaching_defs (ext_cand *cand, const_rtx set_pat, ext_state *state)
static void
add_removable_extension (const_rtx expr, rtx insn,
- VEC (ext_cand, heap) **insn_list,
+ vec<ext_cand> *insn_list,
unsigned *def_map)
{
enum rtx_code code;
@@ -802,7 +800,7 @@ add_removable_extension (const_rtx expr, rtx insn,
different extension. FIXME: this obviously can be improved. */
for (def = defs; def; def = def->next)
if ((idx = def_map[INSN_UID(DF_REF_INSN (def->ref))])
- && (cand = &VEC_index (ext_cand, *insn_list, idx - 1))
+ && (cand = &(*insn_list)[idx - 1])
&& cand->code != code)
{
if (dump_file)
@@ -817,8 +815,8 @@ add_removable_extension (const_rtx expr, rtx insn,
/* Then add the candidate to the list and insert the reaching definitions
into the definition map. */
ext_cand e = {expr, code, mode, insn};
- VEC_safe_push (ext_cand, heap, *insn_list, e);
- idx = VEC_length (ext_cand, *insn_list);
+ insn_list->safe_push (e);
+ idx = insn_list->length ();
for (def = defs; def; def = def->next)
def_map[INSN_UID(DF_REF_INSN (def->ref))] = idx;
@@ -828,10 +826,10 @@ add_removable_extension (const_rtx expr, rtx insn,
/* Traverse the instruction stream looking for extensions and return the
list of candidates. */
-static VEC (ext_cand, heap)*
+static vec<ext_cand>
find_removable_extensions (void)
{
- VEC (ext_cand, heap) *insn_list = NULL;
+ vec<ext_cand> insn_list = vec<ext_cand>();
basic_block bb;
rtx insn, set;
unsigned *def_map = XCNEWVEC (unsigned, max_insn_uid);
@@ -862,8 +860,8 @@ find_and_remove_re (void)
ext_cand *curr_cand;
rtx curr_insn = NULL_RTX;
int num_re_opportunities = 0, num_realized = 0, i;
- VEC (ext_cand, heap) *reinsn_list;
- VEC (rtx, heap) *reinsn_del_list;
+ vec<ext_cand> reinsn_list;
+ vec<rtx> reinsn_del_list;
ext_state state;
/* Construct DU chain to get all reaching definitions of each
@@ -874,18 +872,18 @@ find_and_remove_re (void)
df_set_flags (DF_DEFER_INSN_RESCAN);
max_insn_uid = get_max_uid ();
- reinsn_del_list = NULL;
+ reinsn_del_list.create (0);
reinsn_list = find_removable_extensions ();
- state.defs_list = NULL;
- state.copies_list = NULL;
- state.modified_list = NULL;
- state.work_list = NULL;
- if (VEC_empty (ext_cand, reinsn_list))
+ state.defs_list.create (0);
+ state.copies_list.create (0);
+ state.modified_list.create (0);
+ state.work_list.create (0);
+ if (reinsn_list.is_empty ())
state.modified = NULL;
else
state.modified = XCNEWVEC (struct ext_modified, max_insn_uid);
- FOR_EACH_VEC_ELT (ext_cand, reinsn_list, i, curr_cand)
+ FOR_EACH_VEC_ELT (reinsn_list, i, curr_cand)
{
num_re_opportunities++;
@@ -901,21 +899,21 @@ find_and_remove_re (void)
if (dump_file)
fprintf (dump_file, "Eliminated the extension.\n");
num_realized++;
- VEC_safe_push (rtx, heap, reinsn_del_list, curr_cand->insn);
+ reinsn_del_list.safe_push (curr_cand->insn);
state.modified[INSN_UID (curr_cand->insn)].deleted = 1;
}
}
/* Delete all useless extensions here in one sweep. */
- FOR_EACH_VEC_ELT (rtx, reinsn_del_list, i, curr_insn)
+ FOR_EACH_VEC_ELT (reinsn_del_list, i, curr_insn)
delete_insn (curr_insn);
- VEC_free (ext_cand, heap, reinsn_list);
- VEC_free (rtx, heap, reinsn_del_list);
- VEC_free (rtx, heap, state.defs_list);
- VEC_free (rtx, heap, state.copies_list);
- VEC_free (rtx, heap, state.modified_list);
- VEC_free (rtx, heap, state.work_list);
+ reinsn_list.release ();
+ reinsn_del_list.release ();
+ state.defs_list.release ();
+ state.copies_list.release ();
+ state.modified_list.release ();
+ state.work_list.release ();
XDELETEVEC (state.modified);
if (dump_file && num_re_opportunities > 0)
diff --git a/gcc/reg-stack.c b/gcc/reg-stack.c
index c23c770af93..0f48000b5e1 100644
--- a/gcc/reg-stack.c
+++ b/gcc/reg-stack.c
@@ -170,7 +170,6 @@
#include "tree-pass.h"
#include "target.h"
#include "df.h"
-#include "vecprim.h"
#include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
#ifdef STACK_REGS
@@ -181,7 +180,7 @@
Indexed by insn UIDs. A value of zero is uninitialized, one indicates
the insn uses stack registers, two indicates the insn does not use
stack registers. */
-static VEC(char,heap) *stack_regs_mentioned_data;
+static vec<char> stack_regs_mentioned_data;
#define REG_STACK_SIZE (LAST_STACK_REG - FIRST_STACK_REG + 1)
@@ -306,25 +305,25 @@ stack_regs_mentioned (const_rtx insn)
unsigned int uid, max;
int test;
- if (! INSN_P (insn) || !stack_regs_mentioned_data)
+ if (! INSN_P (insn) || !stack_regs_mentioned_data.exists ())
return 0;
uid = INSN_UID (insn);
- max = VEC_length (char, stack_regs_mentioned_data);
+ max = stack_regs_mentioned_data.length ();
if (uid >= max)
{
/* Allocate some extra size to avoid too many reallocs, but
do not grow too quickly. */
max = uid + uid / 20 + 1;
- VEC_safe_grow_cleared (char, heap, stack_regs_mentioned_data, max);
+ stack_regs_mentioned_data.safe_grow_cleared (max);
}
- test = VEC_index (char, stack_regs_mentioned_data, uid);
+ test = stack_regs_mentioned_data[uid];
if (test == 0)
{
/* This insn has yet to be examined. Do so now. */
test = stack_regs_mentioned_p (PATTERN (insn)) ? 1 : 2;
- VEC_replace (char, stack_regs_mentioned_data, uid, test);
+ stack_regs_mentioned_data[uid] = test;
}
return test == 1;
@@ -3199,8 +3198,7 @@ reg_to_stack (void)
int max_uid;
/* Clean up previous run. */
- if (stack_regs_mentioned_data != NULL)
- VEC_free (char, heap, stack_regs_mentioned_data);
+ stack_regs_mentioned_data.release ();
/* See if there is something to do. Flow analysis is quite
expensive so we might save some compilation time. */
@@ -3279,8 +3277,8 @@ reg_to_stack (void)
/* Allocate a cache for stack_regs_mentioned. */
max_uid = get_max_uid ();
- stack_regs_mentioned_data = VEC_alloc (char, heap, max_uid + 1);
- memset (VEC_address (char, stack_regs_mentioned_data),
+ stack_regs_mentioned_data.create (max_uid + 1);
+ memset (stack_regs_mentioned_data.address (),
0, sizeof (char) * (max_uid + 1));
convert_regs ();
diff --git a/gcc/regrename.c b/gcc/regrename.c
index 10787a23db1..b15deee9b7a 100644
--- a/gcc/regrename.c
+++ b/gcc/regrename.c
@@ -106,7 +106,7 @@ static struct obstack rename_obstack;
/* If nonnull, the code calling into the register renamer requested
information about insn operands, and we store it here. */
-VEC(insn_rr_info, heap) *insn_rr;
+vec<insn_rr_info> insn_rr;
static void scan_rtx (rtx, rtx *, enum reg_class, enum scan_actions,
enum op_type);
@@ -116,7 +116,7 @@ static bool build_def_use (basic_block);
static unsigned current_id;
/* A mapping of unique id numbers to chains. */
-static VEC(du_head_p, heap) *id_to_chain;
+static vec<du_head_p> id_to_chain;
/* List of currently open chains. */
static struct du_head *open_chains;
@@ -142,12 +142,12 @@ static operand_rr_info *cur_operand;
du_head_p
regrename_chain_from_id (unsigned int id)
{
- du_head_p first_chain = VEC_index (du_head_p, id_to_chain, id);
+ du_head_p first_chain = id_to_chain[id];
du_head_p chain = first_chain;
while (chain->id != id)
{
id = chain->id;
- chain = VEC_index (du_head_p, id_to_chain, id);
+ chain = id_to_chain[id];
}
first_chain->id = id;
return chain;
@@ -160,7 +160,7 @@ dump_def_use_chain (int from)
{
du_head_p head;
int i;
- FOR_EACH_VEC_ELT_FROM (du_head_p, id_to_chain, i, head, from)
+ FOR_EACH_VEC_ELT_FROM (id_to_chain, i, head, from)
{
struct du_chain *this_du = head->first;
@@ -182,10 +182,10 @@ free_chain_data (void)
{
int i;
du_head_p ptr;
- for (i = 0; VEC_iterate(du_head_p, id_to_chain, i, ptr); i++)
+ for (i = 0; id_to_chain.iterate (i, &ptr); i++)
bitmap_clear (&ptr->conflicts);
- VEC_free (du_head_p, heap, id_to_chain);
+ id_to_chain.release ();
}
/* Walk all chains starting with CHAINS and record that they conflict with
@@ -232,7 +232,7 @@ create_new_chain (unsigned this_regno, unsigned this_nregs, rtx *loc,
head->need_caller_save_reg = 0;
head->cannot_rename = 0;
- VEC_safe_push (du_head_p, heap, id_to_chain, head);
+ id_to_chain.safe_push (head);
head->id = current_id++;
bitmap_initialize (&head->conflicts, &bitmap_default_obstack);
@@ -429,7 +429,7 @@ rename_chains (void)
#endif
}
- FOR_EACH_VEC_ELT (du_head_p, id_to_chain, i, this_head)
+ FOR_EACH_VEC_ELT (id_to_chain, i, this_head)
{
int best_new_reg;
int n_uses;
@@ -687,7 +687,7 @@ regrename_analyze (bitmap bb_mask)
}
current_id = 0;
- id_to_chain = VEC_alloc (du_head_p, heap, 0);
+ id_to_chain.create (0);
bitmap_initialize (&open_chains_set, &bitmap_default_obstack);
/* The order in which we visit blocks ensures that whenever
@@ -702,7 +702,7 @@ regrename_analyze (bitmap bb_mask)
bool success;
edge e;
edge_iterator ei;
- int old_length = VEC_length (du_head_p, id_to_chain);
+ int old_length = id_to_chain.length ();
this_info = (struct bb_rename_info *) bb1->aux;
if (this_info == NULL)
@@ -719,17 +719,16 @@ regrename_analyze (bitmap bb_mask)
if (dump_file)
fprintf (dump_file, "failed\n");
bb1->aux = NULL;
- VEC_truncate (du_head_p, id_to_chain, old_length);
+ id_to_chain.truncate (old_length);
current_id = old_length;
bitmap_clear (&this_info->incoming_open_chains_set);
open_chains = NULL;
- if (insn_rr != NULL)
+ if (insn_rr.exists ())
{
rtx insn;
FOR_BB_INSNS (bb1, insn)
{
- insn_rr_info *p = &VEC_index (insn_rr_info, insn_rr,
- INSN_UID (insn));
+ insn_rr_info *p = &insn_rr[INSN_UID (insn)];
p->op_info = NULL;
}
}
@@ -1578,9 +1577,9 @@ build_def_use (basic_block bb)
n_ops = recog_data.n_operands;
untracked_operands = 0;
- if (insn_rr != NULL)
+ if (insn_rr.exists ())
{
- insn_info = &VEC_index (insn_rr_info, insn_rr, INSN_UID (insn));
+ insn_info = &insn_rr[INSN_UID (insn)];
insn_info->op_info = XOBNEWVEC (&rename_obstack, operand_rr_info,
recog_data.n_operands);
memset (insn_info->op_info, 0,
@@ -1796,16 +1795,16 @@ void
regrename_init (bool insn_info)
{
gcc_obstack_init (&rename_obstack);
- insn_rr = NULL;
+ insn_rr.create (0);
if (insn_info)
- VEC_safe_grow_cleared (insn_rr_info, heap, insn_rr, get_max_uid ());
+ insn_rr.safe_grow_cleared (get_max_uid ());
}
/* Free all global data used by the register renamer. */
void
regrename_finish (void)
{
- VEC_free (insn_rr_info, heap, insn_rr);
+ insn_rr.release ();
free_chain_data ();
obstack_free (&rename_obstack, NULL);
}
diff --git a/gcc/regrename.h b/gcc/regrename.h
index f3969a14fc2..0048d78fb1d 100644
--- a/gcc/regrename.h
+++ b/gcc/regrename.h
@@ -49,8 +49,6 @@ struct du_head
};
typedef struct du_head *du_head_p;
-DEF_VEC_P (du_head_p);
-DEF_VEC_ALLOC_P (du_head_p, heap);
/* This struct describes a single occurrence of a register. */
struct du_chain
@@ -85,10 +83,8 @@ typedef struct
operand_rr_info *op_info;
} insn_rr_info;
-DEF_VEC_O (insn_rr_info);
-DEF_VEC_ALLOC_O (insn_rr_info, heap);
-extern VEC(insn_rr_info, heap) *insn_rr;
+extern vec<insn_rr_info> insn_rr;
extern void regrename_init (bool);
extern void regrename_finish (void);
diff --git a/gcc/reload.c b/gcc/reload.c
index aea20727068..5fd43a3dc13 100644
--- a/gcc/reload.c
+++ b/gcc/reload.c
@@ -6313,9 +6313,9 @@ subst_reloads (rtx insn)
for (check_regno = 0; check_regno < max_regno; check_regno++)
{
#define CHECK_MODF(ARRAY) \
- gcc_assert (!VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY \
+ gcc_assert (!reg_equivs[check_regno].ARRAY \
|| !loc_mentioned_in_p (r->where, \
- VEC_index (reg_equivs_t, reg_equivs, check_regno).ARRAY))
+ reg_equivs[check_regno).ARRAY)]
CHECK_MODF (equiv_constant);
CHECK_MODF (equiv_memory_loc);
diff --git a/gcc/reload.h b/gcc/reload.h
index a672ddc45db..05ee881bcf9 100644
--- a/gcc/reload.h
+++ b/gcc/reload.h
@@ -243,23 +243,21 @@ typedef struct reg_equivs
} reg_equivs_t;
#define reg_equiv_constant(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT)).constant
+ (*reg_equivs)[(ELT)].constant
#define reg_equiv_invariant(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT)).invariant
+ (*reg_equivs)[(ELT)].invariant
#define reg_equiv_memory_loc(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT)).memory_loc
+ (*reg_equivs)[(ELT)].memory_loc
#define reg_equiv_address(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT)).address
+ (*reg_equivs)[(ELT)].address
#define reg_equiv_mem(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT)).mem
+ (*reg_equivs)[(ELT)].mem
#define reg_equiv_alt_mem_list(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT)).alt_mem_list
+ (*reg_equivs)[(ELT)].alt_mem_list
#define reg_equiv_init(ELT) \
- VEC_index (reg_equivs_t, reg_equivs, (ELT)).init
+ (*reg_equivs)[(ELT)].init
-DEF_VEC_O(reg_equivs_t);
-DEF_VEC_ALLOC_O(reg_equivs_t, gc);
-extern VEC(reg_equivs_t,gc) *reg_equivs;
+extern vec<reg_equivs_t, va_gc> *reg_equivs;
/* All the "earlyclobber" operands of the current insn
are recorded here. */
diff --git a/gcc/reload1.c b/gcc/reload1.c
index 03bd7247795..0c9468f541b 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -328,7 +328,7 @@ static int first_label_num;
static char *offsets_known_at;
static HOST_WIDE_INT (*offsets_at)[NUM_ELIMINABLE_REGS];
-VEC(reg_equivs_t,gc) *reg_equivs;
+vec<reg_equivs_t, va_gc> *reg_equivs;
/* Stack of addresses where an rtx has been changed. We can undo the
changes by popping items off the stack and restoring the original
@@ -341,9 +341,7 @@ VEC(reg_equivs_t,gc) *reg_equivs;
rtx expression would be changed. See PR 42431. */
typedef rtx *rtx_p;
-DEF_VEC_P(rtx_p);
-DEF_VEC_ALLOC_P(rtx_p,heap);
-static VEC(rtx_p,heap) *substitute_stack;
+static vec<rtx_p> substitute_stack;
/* Number of labels in the current function. */
@@ -656,15 +654,15 @@ has_nonexceptional_receiver (void)
void
grow_reg_equivs (void)
{
- int old_size = VEC_length (reg_equivs_t, reg_equivs);
+ int old_size = vec_safe_length (reg_equivs);
int max_regno = max_reg_num ();
int i;
reg_equivs_t ze;
memset (&ze, 0, sizeof (reg_equivs_t));
- VEC_reserve (reg_equivs_t, gc, reg_equivs, max_regno);
+ vec_safe_reserve (reg_equivs, max_regno);
for (i = old_size; i < max_regno; i++)
- VEC_quick_insert (reg_equivs_t, reg_equivs, i, ze);
+ reg_equivs->quick_insert (i, ze);
}
@@ -1323,7 +1321,7 @@ reload (rtx first, int global)
REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = BITS_PER_UNIT;
#endif
- VEC_free (rtx_p, heap, substitute_stack);
+ substitute_stack.release ();
gcc_assert (bitmap_empty_p (&spilled_pseudos));
@@ -3007,7 +3005,7 @@ elimination_effects (rtx x, enum machine_mode mem_mode)
}
else if (reg_renumber[regno] < 0
- && reg_equivs != 0
+ && reg_equivs
&& reg_equiv_constant (regno)
&& ! function_invariant_p (reg_equiv_constant (regno)))
elimination_effects (reg_equiv_constant (regno), mem_mode);
@@ -3078,7 +3076,7 @@ elimination_effects (rtx x, enum machine_mode mem_mode)
if (REG_P (SUBREG_REG (x))
&& (GET_MODE_SIZE (GET_MODE (x))
<= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
- && reg_equivs != 0
+ && reg_equivs
&& reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
return;
@@ -4229,7 +4227,6 @@ free_reg_equiv (void)
{
int i;
-
free (offsets_known_at);
free (offsets_at);
offsets_at = 0;
@@ -4238,9 +4235,7 @@ free_reg_equiv (void)
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (reg_equiv_alt_mem_list (i))
free_EXPR_LIST_list (&reg_equiv_alt_mem_list (i));
- VEC_free (reg_equivs_t, gc, reg_equivs);
- reg_equivs = NULL;
-
+ vec_free (reg_equivs);
}
/* Kick all pseudos out of hard register REGNO.
@@ -5589,7 +5584,7 @@ substitute (rtx *where, const_rtx what, rtx repl)
if (*where == what || rtx_equal_p (*where, what))
{
/* Record the location of the changed rtx. */
- VEC_safe_push (rtx_p, heap, substitute_stack, where);
+ substitute_stack.safe_push (where);
*where = repl;
return;
}
@@ -5688,9 +5683,9 @@ gen_reload_chain_without_interm_reg_p (int r1, int r2)
}
/* Restore the original value at each changed address within R1. */
- while (!VEC_empty (rtx_p, substitute_stack))
+ while (!substitute_stack.is_empty ())
{
- rtx *where = VEC_pop (rtx_p, substitute_stack);
+ rtx *where = substitute_stack.pop ();
*where = rld[r2].in;
}
diff --git a/gcc/rtl.h b/gcc/rtl.h
index 16004e2c107..5617dc145d1 100644
--- a/gcc/rtl.h
+++ b/gcc/rtl.h
@@ -27,7 +27,6 @@ along with GCC; see the file COPYING3. If not see
#include "input.h"
#include "real.h"
#include "vec.h"
-#include "vecir.h"
#include "fixed-value.h"
#include "alias.h"
#include "hashtab.h"
@@ -239,7 +238,7 @@ struct GTY(()) object_block {
!SYMBOL_REF_ANCHOR_P (X)
SYMBOL_REF_BLOCK (X) == [address of this structure]
SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
- VEC(rtx,gc) *objects;
+ vec<rtx, va_gc> *objects;
/* All the anchor SYMBOL_REFs used to address these objects, sorted
in order of increasing offset, and then increasing TLS model.
@@ -249,7 +248,7 @@ struct GTY(()) object_block {
SYMBOL_REF_ANCHOR_P (X)
SYMBOL_REF_BLOCK (X) == [address of this structure]
SYMBOL_REF_BLOCK_OFFSET (X) >= 0. */
- VEC(rtx,gc) *anchors;
+ vec<rtx, va_gc> *anchors;
};
/* RTL expression ("rtx"). */
diff --git a/gcc/sched-deps.c b/gcc/sched-deps.c
index 78c5269d603..52e154132fa 100644
--- a/gcc/sched-deps.c
+++ b/gcc/sched-deps.c
@@ -58,7 +58,8 @@ along with GCC; see the file COPYING3. If not see
struct sched_deps_info_def *sched_deps_info;
/* The data is specific to the Haifa scheduler. */
-VEC(haifa_deps_insn_data_def, heap) *h_d_i_d = NULL;
+vec<haifa_deps_insn_data_def>
+ h_d_i_d = vec<haifa_deps_insn_data_def>();
/* Return the major type present in the DS. */
enum reg_note
@@ -3932,12 +3933,9 @@ remove_from_deps (struct deps_desc *deps, rtx insn)
static void
init_deps_data_vector (void)
{
- int reserve = (sched_max_luid + 1
- - VEC_length (haifa_deps_insn_data_def, h_d_i_d));
- if (reserve > 0
- && ! VEC_space (haifa_deps_insn_data_def, h_d_i_d, reserve))
- VEC_safe_grow_cleared (haifa_deps_insn_data_def, heap, h_d_i_d,
- 3 * sched_max_luid / 2);
+ int reserve = (sched_max_luid + 1 - h_d_i_d.length ());
+ if (reserve > 0 && ! h_d_i_d.space (reserve))
+ h_d_i_d.safe_grow_cleared (3 * sched_max_luid / 2);
}
/* If it is profitable to use them, initialize or extend (depending on
@@ -4024,7 +4022,7 @@ sched_deps_finish (void)
free_alloc_pool_if_empty (&dl_pool);
gcc_assert (dn_pool == NULL && dl_pool == NULL);
- VEC_free (haifa_deps_insn_data_def, heap, h_d_i_d);
+ h_d_i_d.release ();
cache_size = 0;
if (true_dependency_cache)
diff --git a/gcc/sched-int.h b/gcc/sched-int.h
index d3a15aa90a7..1a80f5272f8 100644
--- a/gcc/sched-int.h
+++ b/gcc/sched-int.h
@@ -30,9 +30,6 @@ along with GCC; see the file COPYING3. If not see
#include "df.h"
#include "basic-block.h"
-/* For VEC (int, heap). */
-#include "vecprim.h"
-
/* Identificator of a scheduler pass. */
enum sched_pass_id_t { SCHED_PASS_UNKNOWN, SCHED_RGN_PASS, SCHED_EBB_PASS,
SCHED_SMS_PASS, SCHED_SEL_PASS };
@@ -45,9 +42,9 @@ enum sched_pressure_algorithm
SCHED_PRESSURE_MODEL
};
-typedef VEC (basic_block, heap) *bb_vec_t;
-typedef VEC (rtx, heap) *insn_vec_t;
-typedef VEC (rtx, heap) *rtx_vec_t;
+typedef vec<basic_block> bb_vec_t;
+typedef vec<rtx> insn_vec_t;
+typedef vec<rtx> rtx_vec_t;
extern void sched_init_bbs (void);
@@ -115,12 +112,12 @@ extern int sched_emulate_haifa_p;
/* Mapping from INSN_UID to INSN_LUID. In the end all other per insn data
structures should be indexed by luid. */
-extern VEC (int, heap) *sched_luids;
-#define INSN_LUID(INSN) (VEC_index (int, sched_luids, INSN_UID (INSN)))
-#define LUID_BY_UID(UID) (VEC_index (int, sched_luids, UID))
+extern vec<int> sched_luids;
+#define INSN_LUID(INSN) (sched_luids[INSN_UID (INSN)])
+#define LUID_BY_UID(UID) (sched_luids[UID])
#define SET_INSN_LUID(INSN, LUID) \
-(VEC_replace (int, sched_luids, INSN_UID (INSN), (LUID)))
+(sched_luids[INSN_UID (INSN)] = (LUID))
/* The highest INSN_LUID. */
extern int sched_max_luid;
@@ -893,12 +890,10 @@ struct _haifa_insn_data
typedef struct _haifa_insn_data haifa_insn_data_def;
typedef haifa_insn_data_def *haifa_insn_data_t;
-DEF_VEC_O (haifa_insn_data_def);
-DEF_VEC_ALLOC_O (haifa_insn_data_def, heap);
-extern VEC(haifa_insn_data_def, heap) *h_i_d;
+extern vec<haifa_insn_data_def> h_i_d;
-#define HID(INSN) (&VEC_index (haifa_insn_data_def, h_i_d, INSN_UID (INSN)))
+#define HID(INSN) (&h_i_d[INSN_UID (INSN)])
/* Accessor macros for h_i_d. There are more in haifa-sched.c and
sched-rgn.c. */
@@ -915,13 +910,10 @@ extern VEC(haifa_insn_data_def, heap) *h_i_d;
typedef struct _haifa_deps_insn_data haifa_deps_insn_data_def;
typedef haifa_deps_insn_data_def *haifa_deps_insn_data_t;
-DEF_VEC_O (haifa_deps_insn_data_def);
-DEF_VEC_ALLOC_O (haifa_deps_insn_data_def, heap);
-extern VEC(haifa_deps_insn_data_def, heap) *h_d_i_d;
+extern vec<haifa_deps_insn_data_def> h_d_i_d;
-#define HDID(INSN) (&VEC_index (haifa_deps_insn_data_def, h_d_i_d, \
- INSN_LUID (INSN)))
+#define HDID(INSN) (&h_d_i_d[INSN_LUID (INSN)])
#define INSN_DEP_COUNT(INSN) (HDID (INSN)->dep_count)
#define HAS_INTERNAL_DEP(INSN) (HDID (INSN)->has_internal_dep)
#define INSN_FORW_DEPS(INSN) (HDID (INSN)->forw_deps)
@@ -933,8 +925,7 @@ extern VEC(haifa_deps_insn_data_def, heap) *h_d_i_d;
#define INSN_REVERSE_COND(INSN) (HDID (INSN)->reverse_cond)
#define INSN_COND_DEPS(INSN) (HDID (INSN)->cond_deps)
#define CANT_MOVE(INSN) (HDID (INSN)->cant_move)
-#define CANT_MOVE_BY_LUID(LUID) (VEC_index (haifa_deps_insn_data_def, h_d_i_d, \
- LUID).cant_move)
+#define CANT_MOVE_BY_LUID(LUID) (h_d_i_d[LUID].cant_move)
#define INSN_PRIORITY(INSN) (HID (INSN)->priority)
diff --git a/gcc/sdbout.c b/gcc/sdbout.c
index 5413c6cd153..a1f4845ec88 100644
--- a/gcc/sdbout.c
+++ b/gcc/sdbout.c
@@ -58,7 +58,7 @@ static GTY(()) int unnamed_struct_number;
/* Declarations whose debug info was deferred till end of compilation. */
-static GTY(()) VEC(tree,gc) *deferred_global_decls;
+static GTY(()) vec<tree, va_gc> *deferred_global_decls;
/* The C front end may call sdbout_symbol before sdbout_init runs.
We save all such decls in this list and output them when we get
@@ -1427,7 +1427,7 @@ sdbout_global_decl (tree decl)
if (!DECL_INITIAL (decl) || !TREE_PUBLIC (decl))
sdbout_symbol (decl, 0);
else
- VEC_safe_push (tree, gc, deferred_global_decls, decl);
+ vec_safe_push (deferred_global_decls, decl);
/* Output COFF information for non-global file-scope initialized
variables. */
@@ -1445,7 +1445,7 @@ sdbout_finish (const char *main_filename ATTRIBUTE_UNUSED)
size_t i;
tree decl;
- FOR_EACH_VEC_ELT (tree, deferred_global_decls, i, decl)
+ FOR_EACH_VEC_SAFE_ELT (deferred_global_decls, i, decl)
sdbout_symbol (decl, 0);
}
@@ -1621,7 +1621,7 @@ sdbout_init (const char *input_file_name ATTRIBUTE_UNUSED)
{
tree t;
- deferred_global_decls = VEC_alloc (tree, gc, 12);
+ vec_alloc (deferred_global_decls, 12);
/* Emit debug information which was queued by sdbout_symbol before
we got here. */
diff --git a/gcc/sel-sched-dump.c b/gcc/sel-sched-dump.c
index 999c50c3a5a..9d9accb96b7 100644
--- a/gcc/sel-sched-dump.c
+++ b/gcc/sel-sched-dump.c
@@ -465,7 +465,7 @@ dump_insn_vector (rtx_vec_t succs)
int i;
rtx succ;
- FOR_EACH_VEC_ELT (rtx, succs, i, succ)
+ FOR_EACH_VEC_ELT (succs, i, succ)
if (succ)
dump_insn (succ);
else
@@ -502,7 +502,7 @@ sel_print_insn (const_rtx insn, int aligned ATTRIBUTE_UNUSED)
/* '+' before insn means it is a new cycle start and it's not been
scheduled yet. '>' - has been scheduled. */
- if (s_i_d && INSN_LUID (insn) > 0)
+ if (s_i_d.exists () && INSN_LUID (insn) > 0)
if (GET_MODE (insn) == TImode)
sprintf (buf, "%s %4d",
INSN_SCHED_TIMES (insn) > 0 ? "> " : "< ",
@@ -606,7 +606,7 @@ sel_dump_cfg_insn (insn_t insn, int flags)
{
int insn_flags = DUMP_INSN_UID | DUMP_INSN_PATTERN;
- if (sched_luids != NULL && INSN_LUID (insn) > 0)
+ if (sched_luids.exists () && INSN_LUID (insn) > 0)
{
if (flags & SEL_DUMP_CFG_INSN_SEQNO)
insn_flags |= DUMP_INSN_SEQNO | DUMP_INSN_SCHED_CYCLE | DUMP_INSN_EXPR;
diff --git a/gcc/sel-sched-ir.c b/gcc/sel-sched-ir.c
index e0239dc3637..e5929299c11 100644
--- a/gcc/sel-sched-ir.c
+++ b/gcc/sel-sched-ir.c
@@ -49,10 +49,12 @@ along with GCC; see the file COPYING3. If not see
#include "sel-sched-dump.h"
/* A vector holding bb info for whole scheduling pass. */
-VEC(sel_global_bb_info_def, heap) *sel_global_bb_info = NULL;
+vec<sel_global_bb_info_def>
+ sel_global_bb_info = vec<sel_global_bb_info_def>();
/* A vector holding bb info. */
-VEC(sel_region_bb_info_def, heap) *sel_region_bb_info = NULL;
+vec<sel_region_bb_info_def>
+ sel_region_bb_info = vec<sel_region_bb_info_def>();
/* A pool for allocating all lists. */
alloc_pool sched_lists_pool;
@@ -68,7 +70,7 @@ struct loop *current_loop_nest;
/* LOOP_NESTS is a vector containing the corresponding loop nest for
each region. */
-static VEC(loop_p, heap) *loop_nests = NULL;
+static vec<loop_p> loop_nests = vec<loop_p>();
/* Saves blocks already in loop regions, indexed by bb->index. */
static sbitmap bbs_in_loop_rgns = NULL;
@@ -147,7 +149,7 @@ static expr_t set_insn_init (expr_t, vinsn_t, int);
static void cfg_preds (basic_block, insn_t **, int *);
static void prepare_insn_expr (insn_t, int);
-static void free_history_vect (VEC (expr_history_def, heap) **);
+static void free_history_vect (vec<expr_history_def> &);
static void move_bb_info (basic_block, basic_block);
static void remove_empty_bb (basic_block, bool);
@@ -261,7 +263,7 @@ init_fence_for_scheduling (fence_t f)
/* Add new fence consisting of INSN and STATE to the list pointed to by LP. */
static void
flist_add (flist_t *lp, insn_t insn, state_t state, deps_t dc, void *tc,
- insn_t last_scheduled_insn, VEC(rtx,gc) *executing_insns,
+ insn_t last_scheduled_insn, vec<rtx, va_gc> *executing_insns,
int *ready_ticks, int ready_ticks_size, insn_t sched_next,
int cycle, int cycle_issued_insns, int issue_more,
bool starts_cycle_p, bool after_stall_p)
@@ -586,7 +588,7 @@ fence_clear (fence_t f)
if (tc != NULL)
delete_target_context (tc);
- VEC_free (rtx, gc, FENCE_EXECUTING_INSNS (f));
+ vec_free (FENCE_EXECUTING_INSNS (f));
free (FENCE_READY_TICKS (f));
FENCE_READY_TICKS (f) = NULL;
}
@@ -636,7 +638,7 @@ init_fences (insn_t old_fence)
static void
merge_fences (fence_t f, insn_t insn,
state_t state, deps_t dc, void *tc,
- rtx last_scheduled_insn, VEC(rtx, gc) *executing_insns,
+ rtx last_scheduled_insn, vec<rtx, va_gc> *executing_insns,
int *ready_ticks, int ready_ticks_size,
rtx sched_next, int cycle, int issue_more, bool after_stall_p)
{
@@ -669,11 +671,11 @@ merge_fences (fence_t f, insn_t insn,
FENCE_LAST_SCHEDULED_INSN (f) = NULL;
FENCE_ISSUE_MORE (f) = issue_rate;
- VEC_free (rtx, gc, executing_insns);
+ vec_free (executing_insns);
free (ready_ticks);
if (FENCE_EXECUTING_INSNS (f))
- VEC_block_remove (rtx, FENCE_EXECUTING_INSNS (f), 0,
- VEC_length (rtx, FENCE_EXECUTING_INSNS (f)));
+ FENCE_EXECUTING_INSNS (f)->block_remove (0,
+ FENCE_EXECUTING_INSNS (f)->length ());
if (FENCE_READY_TICKS (f))
memset (FENCE_READY_TICKS (f), 0, FENCE_READY_TICKS_SIZE (f));
}
@@ -757,13 +759,13 @@ merge_fences (fence_t f, insn_t insn,
{
reset_deps_context (FENCE_DC (f));
delete_deps_context (dc);
- VEC_free (rtx, gc, executing_insns);
+ vec_free (executing_insns);
free (ready_ticks);
FENCE_CYCLE (f) = MAX (FENCE_CYCLE (f), cycle);
if (FENCE_EXECUTING_INSNS (f))
- VEC_block_remove (rtx, FENCE_EXECUTING_INSNS (f), 0,
- VEC_length (rtx, FENCE_EXECUTING_INSNS (f)));
+ FENCE_EXECUTING_INSNS (f)->block_remove (0,
+ FENCE_EXECUTING_INSNS (f)->length ());
if (FENCE_READY_TICKS (f))
memset (FENCE_READY_TICKS (f), 0, FENCE_READY_TICKS_SIZE (f));
}
@@ -772,7 +774,7 @@ merge_fences (fence_t f, insn_t insn,
{
delete_deps_context (FENCE_DC (f));
FENCE_DC (f) = dc;
- VEC_free (rtx, gc, FENCE_EXECUTING_INSNS (f));
+ vec_free (FENCE_EXECUTING_INSNS (f));
FENCE_EXECUTING_INSNS (f) = executing_insns;
free (FENCE_READY_TICKS (f));
FENCE_READY_TICKS (f) = ready_ticks;
@@ -783,7 +785,7 @@ merge_fences (fence_t f, insn_t insn,
{
/* Leave DC and CYCLE untouched. */
delete_deps_context (dc);
- VEC_free (rtx, gc, executing_insns);
+ vec_free (executing_insns);
free (ready_ticks);
}
}
@@ -802,7 +804,7 @@ merge_fences (fence_t f, insn_t insn,
static void
add_to_fences (flist_tail_t new_fences, insn_t insn,
state_t state, deps_t dc, void *tc, rtx last_scheduled_insn,
- VEC(rtx, gc) *executing_insns, int *ready_ticks,
+ vec<rtx, va_gc> *executing_insns, int *ready_ticks,
int ready_ticks_size, rtx sched_next, int cycle,
int cycle_issued_insns, int issue_rate,
bool starts_cycle_p, bool after_stall_p)
@@ -886,7 +888,7 @@ add_dirty_fence_to_fences (flist_tail_t new_fences, insn_t succ, fence_t fence)
create_copy_of_deps_context (FENCE_DC (fence)),
create_copy_of_target_context (FENCE_TC (fence)),
FENCE_LAST_SCHEDULED_INSN (fence),
- VEC_copy (rtx, gc, FENCE_EXECUTING_INSNS (fence)),
+ vec_safe_copy (FENCE_EXECUTING_INSNS (fence)),
new_ready_ticks,
FENCE_READY_TICKS_SIZE (fence),
FENCE_SCHED_NEXT (fence),
@@ -1438,12 +1440,12 @@ sel_move_insn (expr_t expr, int seqno, insn_t after)
the search has stopped, such that inserting the new element at INDP will
retain VECT's sort order. */
static bool
-find_in_history_vect_1 (VEC(expr_history_def, heap) *vect,
+find_in_history_vect_1 (vec<expr_history_def> vect,
unsigned uid, vinsn_t new_vinsn,
bool compare_vinsns, int *indp)
{
expr_history_def *arr;
- int i, j, len = VEC_length (expr_history_def, vect);
+ int i, j, len = vect.length ();
if (len == 0)
{
@@ -1451,7 +1453,7 @@ find_in_history_vect_1 (VEC(expr_history_def, heap) *vect,
return false;
}
- arr = VEC_address (expr_history_def, vect);
+ arr = vect.address ();
i = 0, j = len - 1;
while (i <= j)
@@ -1483,7 +1485,7 @@ find_in_history_vect_1 (VEC(expr_history_def, heap) *vect,
the position found or -1, if no such value is in vector.
Search also for UIDs of insn's originators, if ORIGINATORS_P is true. */
int
-find_in_history_vect (VEC(expr_history_def, heap) *vect, rtx insn,
+find_in_history_vect (vec<expr_history_def> vect, rtx insn,
vinsn_t new_vinsn, bool originators_p)
{
int ind;
@@ -1510,12 +1512,12 @@ find_in_history_vect (VEC(expr_history_def, heap) *vect, rtx insn,
UID/NEW_EXPR_VINSN pair. TYPE, OLD_EXPR_VINSN and SPEC_DS save
the history of a transformation. */
void
-insert_in_history_vect (VEC (expr_history_def, heap) **pvect,
+insert_in_history_vect (vec<expr_history_def> *pvect,
unsigned uid, enum local_trans_type type,
vinsn_t old_expr_vinsn, vinsn_t new_expr_vinsn,
ds_t spec_ds)
{
- VEC(expr_history_def, heap) *vect = *pvect;
+ vec<expr_history_def> vect = *pvect;
expr_history_def temp;
bool res;
int ind;
@@ -1524,7 +1526,7 @@ insert_in_history_vect (VEC (expr_history_def, heap) **pvect,
if (res)
{
- expr_history_def *phist = &VEC_index (expr_history_def, vect, ind);
+ expr_history_def *phist = &vect[ind];
/* It is possible that speculation types of expressions that were
propagated through different paths will be different here. In this
@@ -1542,42 +1544,39 @@ insert_in_history_vect (VEC (expr_history_def, heap) **pvect,
vinsn_attach (old_expr_vinsn);
vinsn_attach (new_expr_vinsn);
- VEC_safe_insert (expr_history_def, heap, vect, ind, temp);
+ vect.safe_insert (ind, temp);
*pvect = vect;
}
/* Free history vector PVECT. */
static void
-free_history_vect (VEC (expr_history_def, heap) **pvect)
+free_history_vect (vec<expr_history_def> &pvect)
{
unsigned i;
expr_history_def *phist;
- if (! *pvect)
+ if (! pvect.exists ())
return;
- for (i = 0;
- VEC_iterate (expr_history_def, *pvect, i, phist);
- i++)
+ for (i = 0; pvect.iterate (i, &phist); i++)
{
vinsn_detach (phist->old_expr_vinsn);
vinsn_detach (phist->new_expr_vinsn);
}
- VEC_free (expr_history_def, heap, *pvect);
- *pvect = NULL;
+ pvect.release ();
}
/* Merge vector FROM to PVECT. */
static void
-merge_history_vect (VEC (expr_history_def, heap) **pvect,
- VEC (expr_history_def, heap) *from)
+merge_history_vect (vec<expr_history_def> *pvect,
+ vec<expr_history_def> from)
{
expr_history_def *phist;
int i;
/* We keep this vector sorted. */
- for (i = 0; VEC_iterate (expr_history_def, from, i, phist); i++)
+ for (i = 0; from.iterate (i, &phist); i++)
insert_in_history_vect (pvect, phist->uid, phist->type,
phist->old_expr_vinsn, phist->new_expr_vinsn,
phist->spec_ds);
@@ -1619,7 +1618,8 @@ static void
init_expr (expr_t expr, vinsn_t vi, int spec, int use, int priority,
int sched_times, int orig_bb_index, ds_t spec_done_ds,
ds_t spec_to_check_ds, int orig_sched_cycle,
- VEC(expr_history_def, heap) *history, signed char target_available,
+ vec<expr_history_def> history,
+ signed char target_available,
bool was_substituted, bool was_renamed, bool needs_spec_check_p,
bool cant_move)
{
@@ -1636,10 +1636,10 @@ init_expr (expr_t expr, vinsn_t vi, int spec, int use, int priority,
EXPR_SPEC_DONE_DS (expr) = spec_done_ds;
EXPR_SPEC_TO_CHECK_DS (expr) = spec_to_check_ds;
- if (history)
+ if (history.exists ())
EXPR_HISTORY_OF_CHANGES (expr) = history;
else
- EXPR_HISTORY_OF_CHANGES (expr) = NULL;
+ EXPR_HISTORY_OF_CHANGES (expr).create (0);
EXPR_TARGET_AVAILABLE (expr) = target_available;
EXPR_WAS_SUBSTITUTED (expr) = was_substituted;
@@ -1652,16 +1652,16 @@ init_expr (expr_t expr, vinsn_t vi, int spec, int use, int priority,
void
copy_expr (expr_t to, expr_t from)
{
- VEC(expr_history_def, heap) *temp = NULL;
+ vec<expr_history_def> temp = vec<expr_history_def>();
- if (EXPR_HISTORY_OF_CHANGES (from))
+ if (EXPR_HISTORY_OF_CHANGES (from).exists ())
{
unsigned i;
expr_history_def *phist;
- temp = VEC_copy (expr_history_def, heap, EXPR_HISTORY_OF_CHANGES (from));
+ temp = EXPR_HISTORY_OF_CHANGES (from).copy ();
for (i = 0;
- VEC_iterate (expr_history_def, temp, i, phist);
+ temp.iterate (i, &phist);
i++)
{
vinsn_attach (phist->old_expr_vinsn);
@@ -1686,7 +1686,8 @@ copy_expr_onside (expr_t to, expr_t from)
{
init_expr (to, EXPR_VINSN (from), EXPR_SPEC (from), EXPR_USEFULNESS (from),
EXPR_PRIORITY (from), EXPR_SCHED_TIMES (from), 0,
- EXPR_SPEC_DONE_DS (from), EXPR_SPEC_TO_CHECK_DS (from), 0, NULL,
+ EXPR_SPEC_DONE_DS (from), EXPR_SPEC_TO_CHECK_DS (from), 0,
+ vec<expr_history_def>(),
EXPR_TARGET_AVAILABLE (from), EXPR_WAS_SUBSTITUTED (from),
EXPR_WAS_RENAMED (from), EXPR_NEEDS_SPEC_CHECK_P (from),
EXPR_CANT_MOVE (from));
@@ -1718,7 +1719,7 @@ prepare_insn_expr (insn_t insn, int seqno)
if (ds)
EXPR_SPEC_DONE_DS (expr) = ds_get_max_dep_weak (ds);
- free_history_vect (&EXPR_HISTORY_OF_CHANGES (expr));
+ free_history_vect (EXPR_HISTORY_OF_CHANGES (expr));
}
/* Update target_available bits when merging exprs TO and FROM. SPLIT_POINT
@@ -1882,7 +1883,7 @@ clear_expr (expr_t expr)
vinsn_detach (EXPR_VINSN (expr));
EXPR_VINSN (expr) = NULL;
- free_history_vect (&EXPR_HISTORY_OF_CHANGES (expr));
+ free_history_vect (EXPR_HISTORY_OF_CHANGES (expr));
}
/* For a given LV_SET, mark EXPR having unavailable target register. */
@@ -2781,14 +2782,14 @@ sched_scan (const struct sched_scan_info_def *ssi, bb_vec_t bbs)
ssi->extend_bb ();
if (ssi->init_bb)
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
ssi->init_bb (bb);
if (ssi->extend_insn)
ssi->extend_insn ();
if (ssi->init_insn)
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
{
rtx insn;
@@ -3003,8 +3004,8 @@ init_global_and_expr_for_insn (insn_t insn)
/* Initialize INSN's expr. */
init_expr (INSN_EXPR (insn), vinsn_create (insn, force_unique_p), 0,
REG_BR_PROB_BASE, INSN_PRIORITY (insn), 0, BLOCK_NUM (insn),
- spec_done_ds, 0, 0, NULL, true, false, false, false,
- CANT_MOVE (insn));
+ spec_done_ds, 0, 0, vec<expr_history_def>(), true,
+ false, false, false, CANT_MOVE (insn));
}
init_first_time_insn_data (insn);
@@ -3065,10 +3066,10 @@ sel_finish_global_and_expr (void)
bb_vec_t bbs;
int i;
- bbs = VEC_alloc (basic_block, heap, current_nr_blocks);
+ bbs.create (current_nr_blocks);
for (i = 0; i < current_nr_blocks; i++)
- VEC_quick_push (basic_block, bbs, BASIC_BLOCK (BB_TO_BLOCK (i)));
+ bbs.quick_push (BASIC_BLOCK (BB_TO_BLOCK (i)));
/* Clear AV_SETs and INSN_EXPRs. */
{
@@ -3083,7 +3084,7 @@ sel_finish_global_and_expr (void)
sched_scan (&ssi, bbs);
}
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
}
finish_insns ();
@@ -3563,7 +3564,7 @@ insn_sid (insn_t insn)
bool
sel_insn_is_speculation_check (rtx insn)
{
- return s_i_d && !! INSN_SPEC_CHECKED_DS (insn);
+ return s_i_d.exists () && !! INSN_SPEC_CHECKED_DS (insn);
}
/* Extracts machine mode MODE and destination location DST_LOC
@@ -3665,7 +3666,7 @@ static bool
maybe_tidy_empty_bb (basic_block bb)
{
basic_block succ_bb, pred_bb, note_bb;
- VEC (basic_block, heap) *dom_bbs;
+ vec<basic_block> dom_bbs;
edge e;
edge_iterator ei;
bool rescan_p;
@@ -3717,7 +3718,7 @@ maybe_tidy_empty_bb (basic_block bb)
succ_bb = single_succ (bb);
rescan_p = true;
pred_bb = NULL;
- dom_bbs = NULL;
+ dom_bbs.create (0);
/* Save a pred/succ from the current region to attach the notes to. */
note_bb = NULL;
@@ -3749,7 +3750,7 @@ maybe_tidy_empty_bb (basic_block bb)
sel_redirect_edge_and_branch will take care of it. */
if (e->dest != bb
&& single_pred_p (e->dest))
- VEC_safe_push (basic_block, heap, dom_bbs, e->dest);
+ dom_bbs.safe_push (e->dest);
sel_redirect_edge_and_branch (e, succ_bb);
rescan_p = true;
break;
@@ -3784,11 +3785,11 @@ maybe_tidy_empty_bb (basic_block bb)
remove_empty_bb (bb, true);
}
- if (!VEC_empty (basic_block, dom_bbs))
+ if (!dom_bbs.is_empty ())
{
- VEC_safe_push (basic_block, heap, dom_bbs, succ_bb);
+ dom_bbs.safe_push (succ_bb);
iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
- VEC_free (basic_block, heap, dom_bbs);
+ dom_bbs.release ();
}
return true;
@@ -4100,16 +4101,14 @@ get_seqno_by_preds (rtx insn)
void
sel_extend_global_bb_info (void)
{
- VEC_safe_grow_cleared (sel_global_bb_info_def, heap, sel_global_bb_info,
- last_basic_block);
+ sel_global_bb_info.safe_grow_cleared (last_basic_block);
}
/* Extend region-scope data structures for basic blocks. */
static void
extend_region_bb_info (void)
{
- VEC_safe_grow_cleared (sel_region_bb_info_def, heap, sel_region_bb_info,
- last_basic_block);
+ sel_region_bb_info.safe_grow_cleared (last_basic_block);
}
/* Extend all data structures to fit for all basic blocks. */
@@ -4124,19 +4123,19 @@ extend_bb_info (void)
void
sel_finish_global_bb_info (void)
{
- VEC_free (sel_global_bb_info_def, heap, sel_global_bb_info);
+ sel_global_bb_info.release ();
}
/* Finalize region-scope data structures for basic blocks. */
static void
finish_region_bb_info (void)
{
- VEC_free (sel_region_bb_info_def, heap, sel_region_bb_info);
+ sel_region_bb_info.release ();
}
/* Data for each insn in current region. */
-VEC (sel_insn_data_def, heap) *s_i_d = NULL;
+vec<sel_insn_data_def> s_i_d = vec<sel_insn_data_def>();
/* Extend data structures for insns from current region. */
static void
@@ -4148,10 +4147,8 @@ extend_insn_data (void)
sched_deps_init (false);
/* Extend data structures for insns from current region. */
- reserve = (sched_max_luid + 1
- - VEC_length (sel_insn_data_def, s_i_d));
- if (reserve > 0
- && ! VEC_space (sel_insn_data_def, s_i_d, reserve))
+ reserve = (sched_max_luid + 1 - s_i_d.length ());
+ if (reserve > 0 && ! s_i_d.space (reserve))
{
int size;
@@ -4161,7 +4158,7 @@ extend_insn_data (void)
size = 3 * sched_max_luid / 2;
- VEC_safe_grow_cleared (sel_insn_data_def, heap, s_i_d, size);
+ s_i_d.safe_grow_cleared (size);
}
}
@@ -4173,9 +4170,9 @@ finish_insns (void)
/* Clear here all dependence contexts that may have left from insns that were
removed during the scheduling. */
- for (i = 0; i < VEC_length (sel_insn_data_def, s_i_d); i++)
+ for (i = 0; i < s_i_d.length (); i++)
{
- sel_insn_data_def *sid_entry = &VEC_index (sel_insn_data_def, s_i_d, i);
+ sel_insn_data_def *sid_entry = &s_i_d[i];
if (sid_entry->live)
return_regset_to_pool (sid_entry->live);
@@ -4196,7 +4193,7 @@ finish_insns (void)
}
}
- VEC_free (sel_insn_data_def, heap, s_i_d);
+ s_i_d.release ();
}
/* A proxy to pass initialization data to init_insn (). */
@@ -4255,7 +4252,8 @@ static void
init_simplejump_data (insn_t insn)
{
init_expr (INSN_EXPR (insn), vinsn_create (insn, false), 0,
- REG_BR_PROB_BASE, 0, 0, 0, 0, 0, 0, NULL, true, false, false,
+ REG_BR_PROB_BASE, 0, 0, 0, 0, 0, 0,
+ vec<expr_history_def>(), true, false, false,
false, true);
INSN_SEQNO (insn) = get_seqno_for_a_jump (insn);
init_first_time_insn_data (insn);
@@ -4505,7 +4503,8 @@ get_av_level (insn_t insn)
/* The basic block that already has been processed by the sched_data_update (),
but hasn't been in sel_add_bb () yet. */
-static VEC (basic_block, heap) *last_added_blocks = NULL;
+static vec<basic_block>
+ last_added_blocks = vec<basic_block>();
/* A pool for allocating successor infos. */
static struct
@@ -4707,9 +4706,9 @@ alloc_succs_info (void)
gcc_unreachable ();
i = ++succs_info_pool.top;
- succs_info_pool.stack[i].succs_ok = VEC_alloc (rtx, heap, 10);
- succs_info_pool.stack[i].succs_other = VEC_alloc (rtx, heap, 10);
- succs_info_pool.stack[i].probs_ok = VEC_alloc (int, heap, 10);
+ succs_info_pool.stack[i].succs_ok.create (10);
+ succs_info_pool.stack[i].succs_other.create (10);
+ succs_info_pool.stack[i].probs_ok.create (10);
}
else
succs_info_pool.top++;
@@ -4726,12 +4725,9 @@ free_succs_info (struct succs_info * sinfo)
succs_info_pool.top--;
/* Clear stale info. */
- VEC_block_remove (rtx, sinfo->succs_ok,
- 0, VEC_length (rtx, sinfo->succs_ok));
- VEC_block_remove (rtx, sinfo->succs_other,
- 0, VEC_length (rtx, sinfo->succs_other));
- VEC_block_remove (int, sinfo->probs_ok,
- 0, VEC_length (int, sinfo->probs_ok));
+ sinfo->succs_ok.block_remove (0, sinfo->succs_ok.length ());
+ sinfo->succs_other.block_remove (0, sinfo->succs_other.length ());
+ sinfo->probs_ok.block_remove (0, sinfo->probs_ok.length ());
sinfo->all_prob = 0;
sinfo->succs_ok_n = 0;
sinfo->all_succs_n = 0;
@@ -4755,17 +4751,15 @@ compute_succs_info (insn_t insn, short flags)
if (current_flags & flags)
{
- VEC_safe_push (rtx, heap, sinfo->succs_ok, succ);
- VEC_safe_push (int, heap, sinfo->probs_ok,
- /* FIXME: Improve calculation when skipping
- inner loop to exits. */
- (si.bb_end
- ? si.e1->probability
- : REG_BR_PROB_BASE));
+ sinfo->succs_ok.safe_push (succ);
+ sinfo->probs_ok.safe_push (
+ /* FIXME: Improve calculation when skipping
+ inner loop to exits. */
+ si.bb_end ? si.e1->probability : REG_BR_PROB_BASE);
sinfo->succs_ok_n++;
}
else
- VEC_safe_push (rtx, heap, sinfo->succs_other, succ);
+ sinfo->succs_other.safe_push (succ);
/* Compute all_prob. */
if (!si.bb_end)
@@ -4972,18 +4966,18 @@ return_bb_to_pool (basic_block bb)
/* It turns out that current cfg infrastructure does not support
reuse of basic blocks. Don't bother for now. */
- /*VEC_safe_push (rtx, heap, bb_note_pool, note);*/
+ /*bb_note_pool.safe_push (note);*/
}
/* Get a bb_note from pool or return NULL_RTX if pool is empty. */
static rtx
get_bb_note_from_pool (void)
{
- if (VEC_empty (rtx, bb_note_pool))
+ if (bb_note_pool.is_empty ())
return NULL_RTX;
else
{
- rtx note = VEC_pop (rtx, bb_note_pool);
+ rtx note = bb_note_pool.pop ();
PREV_INSN (note) = NULL_RTX;
NEXT_INSN (note) = NULL_RTX;
@@ -4996,7 +4990,7 @@ get_bb_note_from_pool (void)
void
free_bb_note_pool (void)
{
- VEC_free (rtx, heap, bb_note_pool);
+ bb_note_pool.release ();
}
/* Setup scheduler pool and successor structure. */
@@ -5025,9 +5019,9 @@ free_sched_pools (void)
gcc_assert (succs_info_pool.top == -1);
for (i = 0; i < succs_info_pool.max_top; i++)
{
- VEC_free (rtx, heap, succs_info_pool.stack[i].succs_ok);
- VEC_free (rtx, heap, succs_info_pool.stack[i].succs_other);
- VEC_free (int, heap, succs_info_pool.stack[i].probs_ok);
+ succs_info_pool.stack[i].succs_ok.release ();
+ succs_info_pool.stack[i].succs_other.release ();
+ succs_info_pool.stack[i].probs_ok.release ();
}
free (succs_info_pool.stack);
}
@@ -5194,13 +5188,12 @@ sel_add_bb (basic_block bb)
/* When bb is passed explicitly, the vector should contain
the only element that equals to bb; otherwise, the vector
should not be NULL. */
- gcc_assert (last_added_blocks != NULL);
+ gcc_assert (last_added_blocks.exists ());
if (bb != NULL)
{
- gcc_assert (VEC_length (basic_block, last_added_blocks) == 1
- && VEC_index (basic_block,
- last_added_blocks, 0) == bb);
+ gcc_assert (last_added_blocks.length () == 1
+ && last_added_blocks[0] == bb);
add_block_to_current_region (bb);
/* We associate creating/deleting data sets with the first insn
@@ -5208,7 +5201,7 @@ sel_add_bb (basic_block bb)
if (!sel_bb_empty_p (bb) && BB_LV_SET (bb) == NULL)
create_initial_data_sets (bb);
- VEC_free (basic_block, heap, last_added_blocks);
+ last_added_blocks.release ();
}
else
/* BB is NULL - process LAST_ADDED_BLOCKS instead. */
@@ -5217,7 +5210,7 @@ sel_add_bb (basic_block bb)
basic_block temp_bb = NULL;
for (i = 0;
- VEC_iterate (basic_block, last_added_blocks, i, bb); i++)
+ last_added_blocks.iterate (i, &bb); i++)
{
add_block_to_current_region (bb);
temp_bb = bb;
@@ -5228,7 +5221,7 @@ sel_add_bb (basic_block bb)
gcc_assert (temp_bb != NULL);
bb = temp_bb;
- VEC_free (basic_block, heap, last_added_blocks);
+ last_added_blocks.release ();
}
rgn_setup_region (CONTAINING_RGN (bb->index));
@@ -5345,7 +5338,7 @@ sel_create_basic_block (void *headp, void *endp, basic_block after)
insn_t new_bb_note;
gcc_assert (flag_sel_sched_pipelining_outer_loops
- || last_added_blocks == NULL);
+ || !last_added_blocks.exists ());
new_bb_note = get_bb_note_from_pool ();
@@ -5358,7 +5351,7 @@ sel_create_basic_block (void *headp, void *endp, basic_block after)
new_bb->aux = NULL;
}
- VEC_safe_push (basic_block, heap, last_added_blocks, new_bb);
+ last_added_blocks.safe_push (new_bb);
return new_bb;
}
@@ -5489,7 +5482,7 @@ sel_split_edge (edge e)
/* Some of the basic blocks might not have been added to the loop.
Add them here, until this is fixed in force_fallthru. */
for (i = 0;
- VEC_iterate (basic_block, last_added_blocks, i, bb); i++)
+ last_added_blocks.iterate (i, &bb); i++)
if (!bb->loop_father)
{
add_bb_to_loop (bb, e->dest->loop_father);
@@ -5523,10 +5516,10 @@ sel_create_empty_bb (basic_block after)
/* We'll explicitly initialize NEW_BB via sel_init_only_bb () a bit
later. */
- gcc_assert (VEC_length (basic_block, last_added_blocks) == 1
- && VEC_index (basic_block, last_added_blocks, 0) == new_bb);
+ gcc_assert (last_added_blocks.length () == 1
+ && last_added_blocks[0] == new_bb);
- VEC_free (basic_block, heap, last_added_blocks);
+ last_added_blocks.release ();
return new_bb;
}
@@ -5640,7 +5633,7 @@ sel_redirect_edge_and_branch (edge e, basic_block to)
redirected = redirect_edge_and_branch (e, to);
- gcc_assert (redirected && last_added_blocks == NULL);
+ gcc_assert (redirected && !last_added_blocks.exists ());
/* When we've redirected a latch edge, update the header. */
if (latch_edge_p)
@@ -6034,7 +6027,7 @@ make_region_from_loop (struct loop *loop)
/* Create a new region from preheader blocks LOOP_BLOCKS. */
void
-make_region_from_loop_preheader (VEC(basic_block, heap) **loop_blocks)
+make_region_from_loop_preheader (vec<basic_block> *&loop_blocks)
{
unsigned int i;
int new_rgn_number = -1;
@@ -6045,15 +6038,14 @@ make_region_from_loop_preheader (VEC(basic_block, heap) **loop_blocks)
new_rgn_number = sel_create_new_region ();
- FOR_EACH_VEC_ELT (basic_block, *loop_blocks, i, bb)
+ FOR_EACH_VEC_ELT (*loop_blocks, i, bb)
{
gcc_assert (new_rgn_number >= 0);
sel_add_block_to_region (bb, &bb_ord_index, new_rgn_number);
}
- VEC_free (basic_block, heap, *loop_blocks);
- gcc_assert (*loop_blocks == NULL);
+ vec_free (loop_blocks);
}
@@ -6078,7 +6070,7 @@ make_regions_from_loop_nest (struct loop *loop)
if (rgn_number < 0)
return false;
- VEC_safe_push (loop_p, heap, loop_nests, loop);
+ loop_nests.safe_push (loop);
return true;
}
@@ -6105,8 +6097,8 @@ get_loop_nest_for_rgn (unsigned int rgn)
{
/* Regions created with extend_rgns don't have corresponding loop nests,
because they don't represent loops. */
- if (rgn < VEC_length (loop_p, loop_nests))
- return VEC_index (loop_p, loop_nests, rgn);
+ if (rgn < loop_nests.length ())
+ return loop_nests[rgn];
else
return NULL;
}
@@ -6128,7 +6120,7 @@ considered_for_pipelining_p (struct loop *loop)
{
int rgn = CONTAINING_RGN (loop->latch->index);
- gcc_assert ((unsigned) rgn < VEC_length (loop_p, loop_nests));
+ gcc_assert ((unsigned) rgn < loop_nests.length ());
return true;
}
@@ -6224,7 +6216,7 @@ void sel_finish_pipelining (void)
loop_optimizer_finalize ();
- VEC_free (loop_p, heap, loop_nests);
+ loop_nests.release ();
free (rev_top_order_index);
rev_top_order_index = NULL;
@@ -6267,19 +6259,20 @@ sel_add_loop_preheaders (bb_vec_t *bbs)
{
int i;
basic_block bb;
- VEC(basic_block, heap) *preheader_blocks
+ vec<basic_block> *preheader_blocks
= LOOP_PREHEADER_BLOCKS (current_loop_nest);
- for (i = 0;
- VEC_iterate (basic_block, preheader_blocks, i, bb);
- i++)
+ if (!preheader_blocks)
+ return;
+
+ for (i = 0; preheader_blocks->iterate (i, &bb); i++)
{
- VEC_safe_push (basic_block, heap, *bbs, bb);
- VEC_safe_push (basic_block, heap, last_added_blocks, bb);
+ bbs->safe_push (bb);
+ last_added_blocks.safe_push (bb);
sel_add_bb (bb);
}
- VEC_free (basic_block, heap, preheader_blocks);
+ vec_free (preheader_blocks);
}
/* While pipelining outer loops, returns TRUE if BB is a loop preheader.
@@ -6350,11 +6343,13 @@ sel_remove_loop_preheader (void)
int cur_rgn = CONTAINING_RGN (BB_TO_BLOCK (0));
basic_block bb;
bool all_empty_p = true;
- VEC(basic_block, heap) *preheader_blocks
+ vec<basic_block> *preheader_blocks
= LOOP_PREHEADER_BLOCKS (loop_outer (current_loop_nest));
+ vec_check_alloc (preheader_blocks, 0);
+
gcc_assert (current_loop_nest);
- old_len = VEC_length (basic_block, preheader_blocks);
+ old_len = preheader_blocks->length ();
/* Add blocks that aren't within the current loop to PREHEADER_BLOCKS. */
for (i = 0; i < RGN_NR_BLOCKS (cur_rgn); i++)
@@ -6365,18 +6360,16 @@ sel_remove_loop_preheader (void)
corresponding loop, then it should be a preheader. */
if (sel_is_loop_preheader_p (bb))
{
- VEC_safe_push (basic_block, heap, preheader_blocks, bb);
+ preheader_blocks->safe_push (bb);
if (BB_END (bb) != bb_note (bb))
all_empty_p = false;
}
}
/* Remove these blocks only after iterating over the whole region. */
- for (i = VEC_length (basic_block, preheader_blocks) - 1;
- i >= old_len;
- i--)
+ for (i = preheader_blocks->length () - 1; i >= old_len; i--)
{
- bb = VEC_index (basic_block, preheader_blocks, i);
+ bb = (*preheader_blocks)[i];
sel_remove_bb (bb, false);
}
@@ -6384,12 +6377,12 @@ sel_remove_loop_preheader (void)
{
if (!all_empty_p)
/* Immediately create new region from preheader. */
- make_region_from_loop_preheader (&preheader_blocks);
+ make_region_from_loop_preheader (preheader_blocks);
else
{
/* If all preheader blocks are empty - dont create new empty region.
Instead, remove them completely. */
- FOR_EACH_VEC_ELT (basic_block, preheader_blocks, i, bb)
+ FOR_EACH_VEC_ELT (*preheader_blocks, i, bb)
{
edge e;
edge_iterator ei;
@@ -6424,7 +6417,7 @@ sel_remove_loop_preheader (void)
next_bb));
}
}
- VEC_free (basic_block, heap, preheader_blocks);
+ vec_free (preheader_blocks);
}
else
/* Store preheader within the father's loop structure. */
diff --git a/gcc/sel-sched-ir.h b/gcc/sel-sched-ir.h
index ef884a56f12..04714230cb0 100644
--- a/gcc/sel-sched-ir.h
+++ b/gcc/sel-sched-ir.h
@@ -30,7 +30,6 @@ along with GCC; see the file COPYING3. If not see
#include "rtl.h"
#include "ggc.h"
#include "bitmap.h"
-#include "vecprim.h"
#include "sched-int.h"
#include "cfgloop.h"
@@ -99,8 +98,6 @@ struct expr_history_def_1
typedef struct expr_history_def_1 expr_history_def;
-DEF_VEC_O (expr_history_def);
-DEF_VEC_ALLOC_O (expr_history_def, heap);
/* Expression information. */
struct _expr
@@ -146,7 +143,7 @@ struct _expr
int orig_sched_cycle;
/* This vector contains the history of insn's transformations. */
- VEC(expr_history_def, heap) *history_of_changes;
+ vec<expr_history_def> history_of_changes;
/* True (1) when original target (register or memory) of this instruction
is available for scheduling, false otherwise. -1 means we're not sure;
@@ -195,8 +192,7 @@ typedef expr_def *expr_t;
#define EXPR_WAS_RENAMED(EXPR) ((EXPR)->was_renamed)
#define EXPR_CANT_MOVE(EXPR) ((EXPR)->cant_move)
-#define EXPR_WAS_CHANGED(EXPR) (VEC_length (expr_history_def, \
- EXPR_HISTORY_OF_CHANGES (EXPR)) > 0)
+#define EXPR_WAS_CHANGED(EXPR) (EXPR_HISTORY_OF_CHANGES (EXPR).length () > 0)
/* Insn definition for list of original insns in find_used_regs. */
struct _def
@@ -285,7 +281,7 @@ struct _fence
tc_t tc;
/* A vector of insns that are scheduled but not yet completed. */
- VEC (rtx,gc) *executing_insns;
+ vec<rtx, va_gc> *executing_insns;
/* A vector indexed by UIDs that caches the earliest cycle on which
an insn can be scheduled on this fence. */
@@ -760,13 +756,11 @@ struct _sel_insn_data
typedef struct _sel_insn_data sel_insn_data_def;
typedef sel_insn_data_def *sel_insn_data_t;
-DEF_VEC_O (sel_insn_data_def);
-DEF_VEC_ALLOC_O (sel_insn_data_def, heap);
-extern VEC (sel_insn_data_def, heap) *s_i_d;
+extern vec<sel_insn_data_def> s_i_d;
/* Accessor macros for s_i_d. */
-#define SID(INSN) (&VEC_index (sel_insn_data_def, s_i_d, INSN_LUID (INSN)))
-#define SID_BY_UID(UID) (&VEC_index (sel_insn_data_def, s_i_d, LUID_BY_UID (UID)))
+#define SID(INSN) (&s_i_d[INSN_LUID (INSN)])
+#define SID_BY_UID(UID) (&s_i_d[LUID_BY_UID (UID)])
extern sel_insn_data_def insn_sid (insn_t);
@@ -848,7 +842,7 @@ extern rtx exit_insn;
/* Saved loop preheader to transfer when scheduling the loop. */
#define LOOP_PREHEADER_BLOCKS(LOOP) ((size_t)((LOOP)->aux) == 1 \
? NULL \
- : ((VEC(basic_block, heap) *) (LOOP)->aux))
+ : ((vec<basic_block> *) (LOOP)->aux))
#define SET_LOOP_PREHEADER_BLOCKS(LOOP,BLOCKS) ((LOOP)->aux \
= (BLOCKS != NULL \
? BLOCKS \
@@ -886,18 +880,16 @@ typedef struct
typedef sel_global_bb_info_def *sel_global_bb_info_t;
-DEF_VEC_O (sel_global_bb_info_def);
-DEF_VEC_ALLOC_O (sel_global_bb_info_def, heap);
/* Per basic block data. This array is indexed by basic block index. */
-extern VEC (sel_global_bb_info_def, heap) *sel_global_bb_info;
+extern vec<sel_global_bb_info_def> sel_global_bb_info;
extern void sel_extend_global_bb_info (void);
extern void sel_finish_global_bb_info (void);
/* Get data for BB. */
#define SEL_GLOBAL_BB_INFO(BB) \
- (&VEC_index (sel_global_bb_info_def, sel_global_bb_info, (BB)->index))
+ (&sel_global_bb_info[(BB)->index])
/* Access macros. */
#define BB_LV_SET(BB) (SEL_GLOBAL_BB_INFO (BB)->lv_set)
@@ -920,15 +912,12 @@ typedef struct
typedef sel_region_bb_info_def *sel_region_bb_info_t;
-DEF_VEC_O (sel_region_bb_info_def);
-DEF_VEC_ALLOC_O (sel_region_bb_info_def, heap);
/* Per basic block data. This array is indexed by basic block index. */
-extern VEC (sel_region_bb_info_def, heap) *sel_region_bb_info;
+extern vec<sel_region_bb_info_def> sel_region_bb_info;
/* Get data for BB. */
-#define SEL_REGION_BB_INFO(BB) (&VEC_index (sel_region_bb_info_def, \
- sel_region_bb_info, (BB)->index))
+#define SEL_REGION_BB_INFO(BB) (&sel_region_bb_info[(BB)->index])
/* Get BB's note_list.
A note_list is a list of various notes that was scattered across BB
@@ -992,7 +981,7 @@ typedef struct
/* If skip to loop exits, save here information about loop exits. */
int current_exit;
- VEC (edge, heap) *loop_exits;
+ vec<edge> loop_exits;
} succ_iterator;
/* A structure returning all successor's information. */
@@ -1006,7 +995,7 @@ struct succs_info
/* Their probabilities. As of now, we don't need this for other
successors. */
- VEC(int,heap) *probs_ok;
+ vec<int> probs_ok;
/* Other successors. */
insn_vec_t succs_other;
@@ -1058,10 +1047,10 @@ inner_loop_header_p (basic_block bb)
}
/* Return exit edges of LOOP, filtering out edges with the same dest bb. */
-static inline VEC (edge, heap) *
+static inline vec<edge>
get_loop_exit_edges_unique_dests (const struct loop *loop)
{
- VEC (edge, heap) *edges = NULL;
+ vec<edge> edges = vec<edge>();
struct loop_exit *exit;
gcc_assert (loop->latch != EXIT_BLOCK_PTR
@@ -1073,7 +1062,7 @@ get_loop_exit_edges_unique_dests (const struct loop *loop)
edge e;
bool was_dest = false;
- for (i = 0; VEC_iterate (edge, edges, i, e); i++)
+ for (i = 0; edges.iterate (i, &e); i++)
if (e->dest == exit->e->dest)
{
was_dest = true;
@@ -1081,7 +1070,7 @@ get_loop_exit_edges_unique_dests (const struct loop *loop)
}
if (!was_dest)
- VEC_safe_push (edge, heap, edges, exit->e);
+ edges.safe_push (exit->e);
}
return edges;
}
@@ -1112,10 +1101,10 @@ sel_bb_empty_or_nop_p (basic_block bb)
traverse all of them and if any of them turns out to be another loop header
(after skipping empty BBs), add its loop exits to the resulting vector
as well. */
-static inline VEC(edge, heap) *
+static inline vec<edge>
get_all_loop_exits (basic_block bb)
{
- VEC(edge, heap) *exits = NULL;
+ vec<edge> exits = vec<edge>();
/* If bb is empty, and we're skipping to loop exits, then
consider bb as a possible gate to the inner loop now. */
@@ -1148,24 +1137,24 @@ get_all_loop_exits (basic_block bb)
exits = get_loop_exit_edges_unique_dests (this_loop);
/* Traverse all loop headers. */
- for (i = 0; VEC_iterate (edge, exits, i, e); i++)
+ for (i = 0; exits.iterate (i, &e); i++)
if (in_current_region_p (e->dest)
|| inner_loop_header_p (e->dest))
{
- VEC(edge, heap) *next_exits = get_all_loop_exits (e->dest);
+ vec<edge> next_exits = get_all_loop_exits (e->dest);
- if (next_exits)
+ if (next_exits.exists ())
{
int j;
edge ne;
/* Add all loop exits for the current edge into the
resulting vector. */
- for (j = 0; VEC_iterate (edge, next_exits, j, ne); j++)
- VEC_safe_push (edge, heap, exits, ne);
+ for (j = 0; next_exits.iterate (j, &ne); j++)
+ exits.safe_push (ne);
/* Remove the original edge. */
- VEC_ordered_remove (edge, exits, i);
+ exits.ordered_remove (i);
/* Decrease the loop counter so we won't skip anything. */
i--;
@@ -1216,7 +1205,7 @@ _succ_iter_start (insn_t *succp, insn_t insn, int flags)
i.bb = bb;
i.current_flags = 0;
i.current_exit = -1;
- i.loop_exits = NULL;
+ i.loop_exits.create (0);
if (bb != EXIT_BLOCK_PTR && BB_END (bb) != insn)
{
@@ -1224,7 +1213,7 @@ _succ_iter_start (insn_t *succp, insn_t insn, int flags)
/* Avoid 'uninitialized' warning. */
i.ei.index = 0;
- i.ei.container = NULL;
+ i.ei.container = 0;
}
else
{
@@ -1257,18 +1246,17 @@ _succ_iter_cond (succ_iterator *ip, rtx *succp, rtx insn,
edge e_tmp = NULL;
/* First, try loop exits, if we have them. */
- if (ip->loop_exits)
+ if (ip->loop_exits.exists ())
{
do
{
- VEC_iterate (edge, ip->loop_exits,
- ip->current_exit, e_tmp);
+ ip->loop_exits.iterate (ip->current_exit, &e_tmp);
ip->current_exit++;
}
while (e_tmp && !check (e_tmp, ip));
if (!e_tmp)
- VEC_free (edge, heap, ip->loop_exits);
+ ip->loop_exits.release ();
}
/* If we have found a successor, then great. */
@@ -1293,7 +1281,7 @@ _succ_iter_cond (succ_iterator *ip, rtx *succp, rtx insn,
/* Get all loop exits recursively. */
ip->loop_exits = get_all_loop_exits (bb);
- if (ip->loop_exits)
+ if (ip->loop_exits.exists ())
{
ip->current_exit = 0;
/* Move the iterator now, because we won't do
@@ -1312,7 +1300,7 @@ _succ_iter_cond (succ_iterator *ip, rtx *succp, rtx insn,
/* If loop_exits are non null, we have found an inner loop;
do one more iteration to fetch an edge from these exits. */
- if (ip->loop_exits)
+ if (ip->loop_exits.exists ())
continue;
/* Otherwise, we've found an edge in a usual way. Break now. */
@@ -1346,7 +1334,7 @@ _succ_iter_next (succ_iterator *ip)
{
gcc_assert (!ip->e2 || ip->e1);
- if (ip->bb_end && ip->e1 && !ip->loop_exits)
+ if (ip->bb_end && ip->e1 && !ip->loop_exits.exists ())
ei_next (&(ip->ei));
}
@@ -1545,9 +1533,9 @@ extern void merge_expr (expr_t, expr_t, insn_t);
extern void clear_expr (expr_t);
extern unsigned expr_dest_regno (expr_t);
extern rtx expr_dest_reg (expr_t);
-extern int find_in_history_vect (VEC(expr_history_def, heap) *,
+extern int find_in_history_vect (vec<expr_history_def> ,
rtx, vinsn_t, bool);
-extern void insert_in_history_vect (VEC(expr_history_def, heap) **,
+extern void insert_in_history_vect (vec<expr_history_def> *,
unsigned, enum local_trans_type,
vinsn_t, vinsn_t, ds_t);
extern void mark_unavailable_targets (av_set_t, av_set_t, regset);
@@ -1630,7 +1618,7 @@ extern void sel_finish_pipelining (void);
extern void sel_sched_region (int);
extern loop_p get_loop_nest_for_rgn (unsigned int);
extern bool considered_for_pipelining_p (struct loop *);
-extern void make_region_from_loop_preheader (VEC(basic_block, heap) **);
+extern void make_region_from_loop_preheader (vec<basic_block> *&);
extern void sel_add_loop_preheaders (bb_vec_t *);
extern bool sel_is_loop_preheader_p (basic_block);
extern void clear_outdated_rtx_info (basic_block);
diff --git a/gcc/sel-sched.c b/gcc/sel-sched.c
index fd018242288..f1d1a771f01 100644
--- a/gcc/sel-sched.c
+++ b/gcc/sel-sched.c
@@ -500,31 +500,25 @@ static int max_ws;
static int num_insns_scheduled;
/* A vector of expressions is used to be able to sort them. */
-DEF_VEC_P(expr_t);
-DEF_VEC_ALLOC_P(expr_t,heap);
-static VEC(expr_t, heap) *vec_av_set = NULL;
+static vec<expr_t> vec_av_set = vec<expr_t>();
/* A vector of vinsns is used to hold temporary lists of vinsns. */
-DEF_VEC_P(vinsn_t);
-DEF_VEC_ALLOC_P(vinsn_t,heap);
-typedef VEC(vinsn_t, heap) *vinsn_vec_t;
+typedef vec<vinsn_t> vinsn_vec_t;
/* This vector has the exprs which may still present in av_sets, but actually
can't be moved up due to bookkeeping created during code motion to another
fence. See comment near the call to update_and_record_unavailable_insns
for the detailed explanations. */
-static vinsn_vec_t vec_bookkeeping_blocked_vinsns = NULL;
+static vinsn_vec_t vec_bookkeeping_blocked_vinsns = vinsn_vec_t();
/* This vector has vinsns which are scheduled with renaming on the first fence
and then seen on the second. For expressions with such vinsns, target
availability information may be wrong. */
-static vinsn_vec_t vec_target_unavailable_vinsns = NULL;
+static vinsn_vec_t vec_target_unavailable_vinsns = vinsn_vec_t();
/* Vector to store temporary nops inserted in move_op to prevent removal
of empty bbs. */
-DEF_VEC_P(insn_t);
-DEF_VEC_ALLOC_P(insn_t,heap);
-static VEC(insn_t, heap) *vec_temp_moveop_nops = NULL;
+static vec<insn_t> vec_temp_moveop_nops = vec<insn_t>();
/* These bitmaps record original instructions scheduled on the current
iteration and bookkeeping copies created by them. */
@@ -587,12 +581,12 @@ advance_one_cycle (fence_t fence)
can_issue_more = issue_rate;
FENCE_ISSUE_MORE (fence) = can_issue_more;
- for (i = 0; VEC_iterate (rtx, FENCE_EXECUTING_INSNS (fence), i, insn); )
+ for (i = 0; vec_safe_iterate (FENCE_EXECUTING_INSNS (fence), i, &insn); )
{
if (INSN_READY_CYCLE (insn) < cycle)
{
remove_from_deps (FENCE_DC (fence), insn);
- VEC_unordered_remove (rtx, FENCE_EXECUTING_INSNS (fence), i);
+ FENCE_EXECUTING_INSNS (fence)->unordered_remove (i);
continue;
}
i++;
@@ -1938,9 +1932,7 @@ undo_transformations (av_set_t *av_ptr, rtx insn)
{
expr_history_def *phist;
- phist = &VEC_index (expr_history_def,
- EXPR_HISTORY_OF_CHANGES (expr),
- index);
+ phist = &EXPR_HISTORY_OF_CHANGES (expr)[index];
switch (phist->type)
{
@@ -2735,7 +2727,7 @@ compute_av_set_at_bb_end (insn_t insn, ilist_t p, int ws)
/* Add insn to the tail of current path. */
ilist_add (&p, insn);
- FOR_EACH_VEC_ELT (rtx, sinfo->succs_ok, is, succ)
+ FOR_EACH_VEC_ELT (sinfo->succs_ok, is, succ)
{
av_set_t succ_set;
@@ -2743,7 +2735,7 @@ compute_av_set_at_bb_end (insn_t insn, ilist_t p, int ws)
succ_set = compute_av_set_inside_bb (succ, p, ws, true);
av_set_split_usefulness (succ_set,
- VEC_index (int, sinfo->probs_ok, is),
+ sinfo->probs_ok[is],
sinfo->all_prob);
if (sinfo->all_succs_n > 1)
@@ -2789,7 +2781,7 @@ compute_av_set_at_bb_end (insn_t insn, ilist_t p, int ws)
/* Check liveness restrictions via hard way when there are more than
two successors. */
if (sinfo->succs_ok_n > 2)
- FOR_EACH_VEC_ELT (rtx, sinfo->succs_ok, is, succ)
+ FOR_EACH_VEC_ELT (sinfo->succs_ok, is, succ)
{
basic_block succ_bb = BLOCK_FOR_INSN (succ);
@@ -2800,7 +2792,7 @@ compute_av_set_at_bb_end (insn_t insn, ilist_t p, int ws)
/* Finally, check liveness restrictions on paths leaving the region. */
if (sinfo->all_succs_n > sinfo->succs_ok_n)
- FOR_EACH_VEC_ELT (rtx, sinfo->succs_other, is, succ)
+ FOR_EACH_VEC_ELT (sinfo->succs_other, is, succ)
mark_unavailable_targets
(av1, NULL, BB_LV_SET (BLOCK_FOR_INSN (succ)));
@@ -3577,13 +3569,10 @@ vinsn_vec_has_expr_p (vinsn_vec_t vinsn_vec, expr_t expr)
of expr taken from its history vector. */
for (i = 0, expr_vinsn = EXPR_VINSN (expr);
expr_vinsn;
- expr_vinsn = (i < VEC_length (expr_history_def,
- EXPR_HISTORY_OF_CHANGES (expr))
- ? VEC_index (expr_history_def,
- EXPR_HISTORY_OF_CHANGES (expr),
- i++).old_expr_vinsn
+ expr_vinsn = (i < EXPR_HISTORY_OF_CHANGES (expr).length ()
+ ? EXPR_HISTORY_OF_CHANGES (expr)[i++].old_expr_vinsn
: NULL))
- FOR_EACH_VEC_ELT (vinsn_t, vinsn_vec, n, vinsn)
+ FOR_EACH_VEC_ELT (vinsn_vec, n, vinsn)
if (VINSN_SEPARABLE_P (vinsn))
{
if (vinsn_equal_p (vinsn, expr_vinsn))
@@ -3651,15 +3640,15 @@ av_set_could_be_blocked_by_bookkeeping_p (av_set_t orig_ops, void *static_params
static void
vinsn_vec_clear (vinsn_vec_t *vinsn_vec)
{
- unsigned len = VEC_length (vinsn_t, *vinsn_vec);
+ unsigned len = vinsn_vec->length ();
if (len > 0)
{
vinsn_t vinsn;
int n;
- FOR_EACH_VEC_ELT (vinsn_t, *vinsn_vec, n, vinsn)
+ FOR_EACH_VEC_ELT (*vinsn_vec, n, vinsn)
vinsn_detach (vinsn);
- VEC_block_remove (vinsn_t, *vinsn_vec, 0, len);
+ vinsn_vec->block_remove (0, len);
}
}
@@ -3668,15 +3657,14 @@ static void
vinsn_vec_add (vinsn_vec_t *vinsn_vec, expr_t expr)
{
vinsn_attach (EXPR_VINSN (expr));
- VEC_safe_push (vinsn_t, heap, *vinsn_vec, EXPR_VINSN (expr));
+ vinsn_vec->safe_push (EXPR_VINSN (expr));
}
/* Free the vector representing blocked expressions. */
static void
-vinsn_vec_free (vinsn_vec_t *vinsn_vec)
+vinsn_vec_free (vinsn_vec_t &vinsn_vec)
{
- if (*vinsn_vec)
- VEC_free (vinsn_t, heap, *vinsn_vec);
+ vinsn_vec.release ();
}
/* Increase EXPR_PRIORITY_ADJ for INSN by AMOUNT. */
@@ -3713,15 +3701,15 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
return false;
/* Empty vector from the previous stuff. */
- if (VEC_length (expr_t, vec_av_set) > 0)
- VEC_block_remove (expr_t, vec_av_set, 0, VEC_length (expr_t, vec_av_set));
+ if (vec_av_set.length () > 0)
+ vec_av_set.block_remove (0, vec_av_set.length ());
/* Turn the set into a vector for sorting and call sel_target_adjust_priority
for each insn. */
- gcc_assert (VEC_empty (expr_t, vec_av_set));
+ gcc_assert (vec_av_set.is_empty ());
FOR_EACH_EXPR (expr, si, av)
{
- VEC_safe_push (expr_t, heap, vec_av_set, expr);
+ vec_av_set.safe_push (expr);
gcc_assert (EXPR_PRIORITY_ADJ (expr) == 0 || *pneed_stall);
@@ -3730,7 +3718,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
}
/* Sort the vector. */
- VEC_qsort (expr_t, vec_av_set, sel_rank_for_schedule);
+ vec_av_set.qsort (sel_rank_for_schedule);
/* We record maximal priority of insns in av set for current instruction
group. */
@@ -3738,11 +3726,11 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
av_max_prio = est_ticks_till_branch = INT_MIN;
/* Filter out inappropriate expressions. Loop's direction is reversed to
- visit "best" instructions first. We assume that VEC_unordered_remove
+ visit "best" instructions first. We assume that vec::unordered_remove
moves last element in place of one being deleted. */
- for (n = VEC_length (expr_t, vec_av_set) - 1, stalled = 0; n >= 0; n--)
+ for (n = vec_av_set.length () - 1, stalled = 0; n >= 0; n--)
{
- expr_t expr = VEC_index (expr_t, vec_av_set, n);
+ expr_t expr = vec_av_set[n];
insn_t insn = EXPR_INSN_RTX (expr);
signed char target_available;
bool is_orig_reg_p = true;
@@ -3751,7 +3739,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
/* Don't allow any insns other than from SCHED_GROUP if we have one. */
if (FENCE_SCHED_NEXT (fence) && insn != FENCE_SCHED_NEXT (fence))
{
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
continue;
}
@@ -3775,7 +3763,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
we have to recompute the set of available registers for it. */
if (vinsn_vec_has_expr_p (vec_bookkeeping_blocked_vinsns, expr))
{
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
if (sched_verbose >= 4)
sel_print ("Expr %d is blocked by bookkeeping inserted earlier\n",
INSN_UID (insn));
@@ -3792,12 +3780,12 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
(target_available == false
&& !EXPR_SEPARABLE_P (expr))
/* Don't try to find a register for low-priority expression. */
- || (int) VEC_length (expr_t, vec_av_set) - 1 - n >= max_insns_to_rename
+ || (int) vec_av_set.length () - 1 - n >= max_insns_to_rename
/* ??? FIXME: Don't try to rename data speculation. */
|| (EXPR_SPEC_DONE_DS (expr) & BEGIN_DATA)
|| ! find_best_reg_for_expr (expr, bnds, &is_orig_reg_p))
{
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
if (sched_verbose >= 4)
sel_print ("Expr %d has no suitable target register\n",
INSN_UID (insn));
@@ -3817,7 +3805,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
if ((int) current_loop_nest->ninsns < 9)
{
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
if (sched_verbose >= 4)
sel_print ("Pipelining expr %d will likely cause stall\n",
INSN_UID (insn));
@@ -3828,7 +3816,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
< need_n_ticks_till_branch * issue_rate / 2
&& est_ticks_till_branch < need_n_ticks_till_branch)
{
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
if (sched_verbose >= 4)
sel_print ("Pipelining expr %d will likely cause stall\n",
INSN_UID (insn));
@@ -3843,7 +3831,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
{
stalled++;
min_need_stall = min_need_stall < 0 ? 1 : MIN (min_need_stall, 1);
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
if (sched_verbose >= 4)
sel_print ("Delaying speculation check %d until its first use\n",
INSN_UID (insn));
@@ -3870,7 +3858,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
min_need_stall = (min_need_stall < 0
? need_cycles
: MIN (min_need_stall, need_cycles));
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
if (sched_verbose >= 4)
sel_print ("Expr %d is not ready until cycle %d (cached)\n",
@@ -3908,7 +3896,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
? need_cycles
: MIN (min_need_stall, need_cycles));
- VEC_unordered_remove (expr_t, vec_av_set, n);
+ vec_av_set.unordered_remove (n);
if (sched_verbose >= 4)
sel_print ("Expr %d is not ready yet until cycle %d\n",
@@ -3933,7 +3921,7 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
if (min_need_stall < 0)
min_need_stall = 0;
- if (VEC_empty (expr_t, vec_av_set))
+ if (vec_av_set.is_empty ())
{
/* We need to set *pneed_stall here, because later we skip this code
when ready list is empty. */
@@ -3944,14 +3932,14 @@ fill_vec_av_set (av_set_t av, blist_t bnds, fence_t fence,
gcc_assert (min_need_stall == 0);
/* Sort the vector. */
- VEC_qsort (expr_t, vec_av_set, sel_rank_for_schedule);
+ vec_av_set.qsort (sel_rank_for_schedule);
if (sched_verbose >= 4)
{
sel_print ("Total ready exprs: %d, stalled: %d\n",
- VEC_length (expr_t, vec_av_set), stalled);
- sel_print ("Sorted av set (%d): ", VEC_length (expr_t, vec_av_set));
- FOR_EACH_VEC_ELT (expr_t, vec_av_set, n, expr)
+ vec_av_set.length (), stalled);
+ sel_print ("Sorted av set (%d): ", vec_av_set.length ());
+ FOR_EACH_VEC_ELT (vec_av_set, n, expr)
dump_expr (expr);
sel_print ("\n");
}
@@ -3969,7 +3957,7 @@ convert_vec_av_set_to_ready (void)
expr_t expr;
/* Allocate and fill the ready list from the sorted vector. */
- ready.n_ready = VEC_length (expr_t, vec_av_set);
+ ready.n_ready = vec_av_set.length ();
ready.first = ready.n_ready - 1;
gcc_assert (ready.n_ready > 0);
@@ -3980,7 +3968,7 @@ convert_vec_av_set_to_ready (void)
sched_extend_ready_list (ready.n_ready);
}
- FOR_EACH_VEC_ELT (expr_t, vec_av_set, n, expr)
+ FOR_EACH_VEC_ELT (vec_av_set, n, expr)
{
vinsn_t vi = EXPR_VINSN (expr);
insn_t insn = VINSN_INSN_RTX (vi);
@@ -4126,7 +4114,7 @@ invoke_reorder_hooks (fence_t fence)
{
int i, j, n;
rtx *arr = ready.vec;
- expr_t *vec = VEC_address (expr_t, vec_av_set);
+ expr_t *vec = vec_av_set.address ();
for (i = 0, n = ready.n_ready; i < n; i++)
if (EXPR_INSN_RTX (vec[i]) != arr[i])
@@ -4159,7 +4147,7 @@ find_expr_for_ready (int index, bool follow_ready_element)
real_index = follow_ready_element ? ready.first - index : index;
- expr = VEC_index (expr_t, vec_av_set, real_index);
+ expr = vec_av_set[real_index];
gcc_assert (ready.vec[real_index] == EXPR_INSN_RTX (expr));
return expr;
@@ -4994,16 +4982,15 @@ remove_temp_moveop_nops (bool full_tidying)
int i;
insn_t insn;
- FOR_EACH_VEC_ELT (insn_t, vec_temp_moveop_nops, i, insn)
+ FOR_EACH_VEC_ELT (vec_temp_moveop_nops, i, insn)
{
gcc_assert (INSN_NOP_P (insn));
return_nop_to_pool (insn, full_tidying);
}
/* Empty the vector. */
- if (VEC_length (insn_t, vec_temp_moveop_nops) > 0)
- VEC_block_remove (insn_t, vec_temp_moveop_nops, 0,
- VEC_length (insn_t, vec_temp_moveop_nops));
+ if (vec_temp_moveop_nops.length () > 0)
+ vec_temp_moveop_nops.block_remove (0, vec_temp_moveop_nops.length ());
}
/* Records the maximal UID before moving up an instruction. Used for
@@ -5336,7 +5323,7 @@ update_fence_and_insn (fence_t fence, insn_t insn, int need_stall)
/* First, reflect that something is scheduled on this fence. */
asm_p = advance_state_on_fence (fence, insn);
FENCE_LAST_SCHEDULED_INSN (fence) = insn;
- VEC_safe_push (rtx, gc, FENCE_EXECUTING_INSNS (fence), insn);
+ vec_safe_push (FENCE_EXECUTING_INSNS (fence), insn);
if (SCHED_GROUP_P (insn))
{
FENCE_SCHED_NEXT (fence) = INSN_SCHED_NEXT (insn);
@@ -5983,7 +5970,7 @@ remove_insn_from_stream (rtx insn, bool only_disconnect)
{
insn_t nop = get_nop_from_pool (insn);
gcc_assert (INSN_NOP_P (nop));
- VEC_safe_push (insn_t, heap, vec_temp_moveop_nops, nop);
+ vec_temp_moveop_nops.safe_push (nop);
}
sel_remove_insn (insn, only_disconnect, false);
@@ -6876,10 +6863,10 @@ sel_region_init (int rgn)
if (current_region_empty_p ())
return true;
- bbs = VEC_alloc (basic_block, heap, current_nr_blocks);
+ bbs.create (current_nr_blocks);
for (i = 0; i < current_nr_blocks; i++)
- VEC_quick_push (basic_block, bbs, BASIC_BLOCK (BB_TO_BLOCK (i)));
+ bbs.quick_push (BASIC_BLOCK (BB_TO_BLOCK (i)));
sel_init_bbs (bbs);
@@ -6905,7 +6892,7 @@ sel_region_init (int rgn)
sel_setup_sched_infos ();
sel_init_global_and_expr (bbs);
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
blocks_to_reschedule = BITMAP_ALLOC (NULL);
@@ -7253,13 +7240,12 @@ sel_region_finish (bool reset_sched_cycles_p)
free_nop_pool ();
/* Free the vectors. */
- if (vec_av_set)
- VEC_free (expr_t, heap, vec_av_set);
+ vec_av_set.release ();
BITMAP_FREE (current_copies);
BITMAP_FREE (current_originators);
BITMAP_FREE (code_motion_visited_blocks);
- vinsn_vec_free (&vec_bookkeeping_blocked_vinsns);
- vinsn_vec_free (&vec_target_unavailable_vinsns);
+ vinsn_vec_free (vec_bookkeeping_blocked_vinsns);
+ vinsn_vec_free (vec_target_unavailable_vinsns);
/* If LV_SET of the region head should be updated, do it now because
there will be no other chance. */
@@ -7289,7 +7275,7 @@ sel_region_finish (bool reset_sched_cycles_p)
finish_deps_global ();
sched_finish_luids ();
- VEC_free (haifa_deps_insn_data_def, heap, h_d_i_d);
+ h_d_i_d.release ();
sel_finish_bbs ();
BITMAP_FREE (blocks_to_reschedule);
diff --git a/gcc/sese.c b/gcc/sese.c
index bfb797150ba..47578e82a6e 100644
--- a/gcc/sese.c
+++ b/gcc/sese.c
@@ -142,7 +142,7 @@ sese_record_loop (sese region, loop_p loop)
return;
bitmap_set_bit (SESE_LOOPS (region), loop->num);
- VEC_safe_push (loop_p, heap, SESE_LOOP_NEST (region), loop);
+ SESE_LOOP_NEST (region).safe_push (loop);
}
/* Build the loop nests contained in REGION. Returns true when the
@@ -169,16 +169,16 @@ build_sese_loop_nests (sese region)
/* Make sure that the loops in the SESE_LOOP_NEST are ordered. It
can be the case that an inner loop is inserted before an outer
loop. To avoid this, semi-sort once. */
- FOR_EACH_VEC_ELT (loop_p, SESE_LOOP_NEST (region), i, loop0)
+ FOR_EACH_VEC_ELT (SESE_LOOP_NEST (region), i, loop0)
{
- if (VEC_length (loop_p, SESE_LOOP_NEST (region)) == i + 1)
+ if (SESE_LOOP_NEST (region).length () == i + 1)
break;
- loop1 = VEC_index (loop_p, SESE_LOOP_NEST (region), i + 1);
+ loop1 = SESE_LOOP_NEST (region)[i + 1];
if (loop0->num > loop1->num)
{
- VEC_replace (loop_p, SESE_LOOP_NEST (region), i, loop1);
- VEC_replace (loop_p, SESE_LOOP_NEST (region), i + 1, loop0);
+ SESE_LOOP_NEST (region)[i] = loop1;
+ SESE_LOOP_NEST (region)[i + 1] = loop0;
}
}
}
@@ -319,9 +319,9 @@ new_sese (edge entry, edge exit)
SESE_ENTRY (region) = entry;
SESE_EXIT (region) = exit;
SESE_LOOPS (region) = BITMAP_ALLOC (NULL);
- SESE_LOOP_NEST (region) = VEC_alloc (loop_p, heap, 3);
+ SESE_LOOP_NEST (region).create (3);
SESE_ADD_PARAMS (region) = true;
- SESE_PARAMS (region) = VEC_alloc (tree, heap, 3);
+ SESE_PARAMS (region).create (3);
return region;
}
@@ -334,8 +334,8 @@ free_sese (sese region)
if (SESE_LOOPS (region))
SESE_LOOPS (region) = BITMAP_ALLOC (NULL);
- VEC_free (tree, heap, SESE_PARAMS (region));
- VEC_free (loop_p, heap, SESE_LOOP_NEST (region));
+ SESE_PARAMS (region).release ();
+ SESE_LOOP_NEST (region).release ();
XDELETE (region);
}
@@ -461,7 +461,7 @@ set_rename (htab_t rename_map, tree old_name, tree expr)
static bool
rename_uses (gimple copy, htab_t rename_map, gimple_stmt_iterator *gsi_tgt,
- sese region, loop_p loop, VEC (tree, heap) *iv_map,
+ sese region, loop_p loop, vec<tree> iv_map,
bool *gloog_error)
{
use_operand_p use_p;
@@ -567,7 +567,7 @@ rename_uses (gimple copy, htab_t rename_map, gimple_stmt_iterator *gsi_tgt,
static void
graphite_copy_stmts_from_block (basic_block bb, basic_block new_bb,
htab_t rename_map,
- VEC (tree, heap) *iv_map, sese region,
+ vec<tree> iv_map, sese region,
bool *gloog_error)
{
gimple_stmt_iterator gsi, gsi_tgt;
@@ -630,7 +630,7 @@ graphite_copy_stmts_from_block (basic_block bb, basic_block new_bb,
edge
copy_bb_and_scalar_dependences (basic_block bb, sese region,
- edge next_e, VEC (tree, heap) *iv_map,
+ edge next_e, vec<tree> iv_map,
bool *gloog_error)
{
basic_block new_bb = split_edge (next_e);
diff --git a/gcc/sese.h b/gcc/sese.h
index 7c6a9c68896..2f4a7cbe4ce 100644
--- a/gcc/sese.h
+++ b/gcc/sese.h
@@ -31,11 +31,11 @@ typedef struct sese_s
edge entry, exit;
/* Parameters used within the SCOP. */
- VEC (tree, heap) *params;
+ vec<tree> params;
/* Loops completely contained in the SCOP. */
bitmap loops;
- VEC (loop_p, heap) *loop_nest;
+ vec<loop_p> loop_nest;
/* Are we allowed to add more params? This is for debugging purpose. We
can only add new params before generating the bb domains, otherwise they
@@ -57,7 +57,7 @@ extern void free_sese (sese);
extern void sese_insert_phis_for_liveouts (sese, basic_block, edge, edge);
extern void build_sese_loop_nests (sese);
extern edge copy_bb_and_scalar_dependences (basic_block, sese, edge,
- VEC (tree, heap) *, bool *);
+ vec<tree> , bool *);
extern struct loop *outermost_loop_in_sese (sese, basic_block);
extern void insert_loop_close_phis (htab_t, loop_p);
extern void insert_guard_phis (basic_block, edge, edge, htab_t, htab_t);
@@ -76,7 +76,7 @@ sese_contains_loop (sese sese, struct loop *loop)
static inline unsigned
sese_nb_params (sese region)
{
- return VEC_length (tree, SESE_PARAMS (region));
+ return SESE_PARAMS (region).length ();
}
/* Checks whether BB is contained in the region delimited by ENTRY and
@@ -259,8 +259,6 @@ typedef struct rename_map_elt_s
tree old_name, expr;
} *rename_map_elt;
-DEF_VEC_P(rename_map_elt);
-DEF_VEC_ALLOC_P (rename_map_elt, heap);
extern void debug_rename_map (htab_t);
extern hashval_t rename_map_elt_info (const void *);
@@ -341,9 +339,9 @@ typedef struct gimple_bb
corresponding element in CONDITION_CASES is not NULL_TREE. For a
SWITCH_EXPR the corresponding element in CONDITION_CASES is a
CASE_LABEL_EXPR. */
- VEC (gimple, heap) *conditions;
- VEC (gimple, heap) *condition_cases;
- VEC (data_reference_p, heap) *data_refs;
+ vec<gimple> conditions;
+ vec<gimple> condition_cases;
+ vec<data_reference_p> data_refs;
} *gimple_bb_p;
#define GBB_BB(GBB) (GBB)->bb
diff --git a/gcc/statistics.h b/gcc/statistics.h
index 6bb8166165c..add37a47a17 100644
--- a/gcc/statistics.h
+++ b/gcc/statistics.h
@@ -39,10 +39,11 @@
#define FINAL_PASS_MEM_STAT , ALONE_FINAL_PASS_MEM_STAT
#define MEM_STAT_INFO , ALONE_MEM_STAT_INFO
#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 8)
-#define CXX_MEM_STAT_INFO , const char * _loc_name = __builtin_FILE (), int _loc_line = __builtin_LINE (), const char * _loc_function = __builtin_FUNCTION ()
+#define ALONE_CXX_MEM_STAT_INFO const char * _loc_name = __builtin_FILE (), int _loc_line = __builtin_LINE (), const char * _loc_function = __builtin_FUNCTION ()
#else
-#define CXX_MEM_STAT_INFO , const char * _loc_name = __FILE__, int _loc_line = __LINE__, const char * _loc_function = NULL
+#define ALONE_CXX_MEM_STAT_INFO const char * _loc_name = __FILE__, int _loc_line = __LINE__, const char * _loc_function = NULL
#endif
+#define CXX_MEM_STAT_INFO , ALONE_CXX_MEM_STAT_INFO
#else
#define ALONE_MEM_STAT_DECL void
#define ALONE_FINAL_MEM_STAT_DECL GCC_MEM_STAT_ARGUMENTS
@@ -54,6 +55,7 @@
#define PASS_MEM_STAT
#define FINAL_PASS_MEM_STAT , ALONE_FINAL_PASS_MEM_STAT
#define MEM_STAT_INFO ALONE_MEM_STAT_INFO
+#define ALONE_CXX_MEM_STAT_INFO
#define CXX_MEM_STAT_INFO
#endif
diff --git a/gcc/stmt.c b/gcc/stmt.c
index b84fa670e70..700dbb508ce 100644
--- a/gcc/stmt.c
+++ b/gcc/stmt.c
@@ -2224,12 +2224,12 @@ expand_case (gimple stmt)
void
expand_sjlj_dispatch_table (rtx dispatch_index,
- VEC(tree,heap) *dispatch_table)
+ vec<tree> dispatch_table)
{
tree index_type = integer_type_node;
enum machine_mode index_mode = TYPE_MODE (index_type);
- int ncases = VEC_length (tree, dispatch_table);
+ int ncases = dispatch_table.length ();
do_pending_stack_adjust ();
rtx before_case = get_last_insn ();
@@ -2239,7 +2239,7 @@ expand_sjlj_dispatch_table (rtx dispatch_index,
and seems to be a reasonable compromise between the "old way"
of expanding as a decision tree or dispatch table vs. the "new
way" with decrement chain or dispatch table. */
- if (VEC_length (tree, dispatch_table) <= 5
+ if (dispatch_table.length () <= 5
|| (!HAVE_casesi && !HAVE_tablejump)
|| !flag_jump_tables)
{
@@ -2261,7 +2261,7 @@ expand_sjlj_dispatch_table (rtx dispatch_index,
rtx zero = CONST0_RTX (index_mode);
for (int i = 0; i < ncases; i++)
{
- tree elt = VEC_index (tree, dispatch_table, i);
+ tree elt = dispatch_table[i];
rtx lab = label_rtx (CASE_LABEL (elt));
do_jump_if_equal (index_mode, index, zero, lab, 0, -1);
force_expand_binop (index_mode, sub_optab,
@@ -2278,13 +2278,13 @@ expand_sjlj_dispatch_table (rtx dispatch_index,
ncases);
tree index_expr = make_tree (index_type, dispatch_index);
tree minval = build_int_cst (index_type, 0);
- tree maxval = CASE_LOW (VEC_last (tree, dispatch_table));
+ tree maxval = CASE_LOW (dispatch_table.last ());
tree range = maxval;
rtx default_label = gen_label_rtx ();
for (int i = ncases - 1; i > 0; --i)
{
- tree elt = VEC_index (tree, dispatch_table, i);
+ tree elt = dispatch_table[i];
tree low = CASE_LOW (elt);
tree lab = CASE_LABEL (elt);
case_list = add_case_node (case_list, low, low, lab, 0, case_node_pool);
diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c
index 674f88801bc..20c49f97041 100644
--- a/gcc/stor-layout.c
+++ b/gcc/stor-layout.c
@@ -98,7 +98,7 @@ variable_size (tree size)
}
/* An array of functions used for self-referential size computation. */
-static GTY(()) VEC (tree, gc) *size_functions;
+static GTY(()) vec<tree, va_gc> *size_functions;
/* Look inside EXPR into simple arithmetic operations involving constants.
Return the outermost non-arithmetic or non-constant node. */
@@ -189,12 +189,12 @@ static tree
self_referential_size (tree size)
{
static unsigned HOST_WIDE_INT fnno = 0;
- VEC (tree, heap) *self_refs = NULL;
+ vec<tree> self_refs = vec<tree>();
tree param_type_list = NULL, param_decl_list = NULL;
tree t, ref, return_type, fntype, fnname, fndecl;
unsigned int i;
char buf[128];
- VEC(tree,gc) *args = NULL;
+ vec<tree, va_gc> *args = NULL;
/* Do not factor out simple operations. */
t = skip_simple_constant_arithmetic (size);
@@ -203,7 +203,7 @@ self_referential_size (tree size)
/* Collect the list of self-references in the expression. */
find_placeholder_in_expr (size, &self_refs);
- gcc_assert (VEC_length (tree, self_refs) > 0);
+ gcc_assert (self_refs.length () > 0);
/* Obtain a private copy of the expression. */
t = size;
@@ -213,8 +213,8 @@ self_referential_size (tree size)
/* Build the parameter and argument lists in parallel; also
substitute the former for the latter in the expression. */
- args = VEC_alloc (tree, gc, VEC_length (tree, self_refs));
- FOR_EACH_VEC_ELT (tree, self_refs, i, ref)
+ vec_alloc (args, self_refs.length ());
+ FOR_EACH_VEC_ELT (self_refs, i, ref)
{
tree subst, param_name, param_type, param_decl;
@@ -249,10 +249,10 @@ self_referential_size (tree size)
param_type_list = tree_cons (NULL_TREE, param_type, param_type_list);
param_decl_list = chainon (param_decl, param_decl_list);
- VEC_quick_push (tree, args, ref);
+ args->quick_push (ref);
}
- VEC_free (tree, heap, self_refs);
+ self_refs.release ();
/* Append 'void' to indicate that the number of parameters is fixed. */
param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
@@ -297,7 +297,7 @@ self_referential_size (tree size)
TREE_STATIC (fndecl) = 1;
/* Put it onto the list of size functions. */
- VEC_safe_push (tree, gc, size_functions, fndecl);
+ vec_safe_push (size_functions, fndecl);
/* Replace the original expression with a call to the size function. */
return build_call_expr_loc_vec (UNKNOWN_LOCATION, fndecl, args);
@@ -316,7 +316,7 @@ finalize_size_functions (void)
unsigned int i;
tree fndecl;
- for (i = 0; VEC_iterate(tree, size_functions, i, fndecl); i++)
+ for (i = 0; size_functions && size_functions->iterate (i, &fndecl); i++)
{
dump_function (TDI_original, fndecl);
gimplify_function_tree (fndecl);
@@ -324,7 +324,7 @@ finalize_size_functions (void)
cgraph_finalize_function (fndecl, false);
}
- VEC_free (tree, gc, size_functions);
+ vec_free (size_functions);
}
/* Return the machine mode to use for a nonscalar of SIZE bits. The
@@ -777,7 +777,7 @@ start_record_layout (tree t)
rli->offset = size_zero_node;
rli->bitpos = bitsize_zero_node;
rli->prev_field = 0;
- rli->pending_statics = NULL;
+ rli->pending_statics = 0;
rli->packed_maybe_necessary = 0;
rli->remaining_in_alignment = 0;
@@ -885,7 +885,7 @@ debug_rli (record_layout_info rli)
if (rli->packed_maybe_necessary)
fprintf (stderr, "packed may be necessary\n");
- if (!VEC_empty (tree, rli->pending_statics))
+ if (!vec_safe_is_empty (rli->pending_statics))
{
fprintf (stderr, "pending statics:\n");
debug_vec_tree (rli->pending_statics);
@@ -1099,7 +1099,7 @@ place_field (record_layout_info rli, tree field)
it *after* the record is laid out. */
if (TREE_CODE (field) == VAR_DECL)
{
- VEC_safe_push (tree, gc, rli->pending_statics, field);
+ vec_safe_push (rli->pending_statics, field);
return;
}
@@ -1988,13 +1988,13 @@ finish_record_layout (record_layout_info rli, int free_p)
/* Lay out any static members. This is done now because their type
may use the record's type. */
- while (!VEC_empty (tree, rli->pending_statics))
- layout_decl (VEC_pop (tree, rli->pending_statics), 0);
+ while (!vec_safe_is_empty (rli->pending_statics))
+ layout_decl (rli->pending_statics->pop (), 0);
/* Clean up. */
if (free_p)
{
- VEC_free (tree, gc, rli->pending_statics);
+ vec_free (rli->pending_statics);
free (rli);
}
}
diff --git a/gcc/store-motion.c b/gcc/store-motion.c
index 1cf883297f5..57793f209e7 100644
--- a/gcc/store-motion.c
+++ b/gcc/store-motion.c
@@ -54,7 +54,7 @@ along with GCC; see the file COPYING3. If not see
invalidate REG_EQUAL/REG_EQUIV notes for?).
- pattern_regs in st_expr should be a regset (on its own obstack).
- antic_stores and avail_stores should be VECs instead of lists.
- - store_motion_mems should be a VEC instead of a list.
+ - store_motion_mems should be a vec instead of a list.
- there should be an alloc pool for struct st_expr objects.
- investigate whether it is helpful to make the address of an st_expr
a cselib VALUE.
diff --git a/gcc/tlink.c b/gcc/tlink.c
index 4c3feca39d1..2ec0f876131 100644
--- a/gcc/tlink.c
+++ b/gcc/tlink.c
@@ -69,13 +69,11 @@ typedef struct file_hash_entry
} file;
typedef const char *str;
-DEF_VEC_P(str);
-DEF_VEC_ALLOC_P(str,heap);
typedef struct demangled_hash_entry
{
const char *key;
- VEC(str,heap) *mangled;
+ vec<str> mangled;
} demangled;
/* Hash and comparison functions for these hash tables. */
@@ -609,7 +607,7 @@ demangle_new_symbols (void)
continue;
dem = demangled_hash_lookup (p, true);
- VEC_safe_push (str, heap, dem->mangled, sym->key);
+ dem->mangled.safe_push (sym->key);
}
}
@@ -775,9 +773,9 @@ scan_linker_output (const char *fname)
on the next attempt we will switch all of them the other way
and that will cause it to succeed. */
int chosen = 0;
- int len = VEC_length (str, dem->mangled);
+ int len = dem->mangled.length ();
ok = true;
- FOR_EACH_VEC_ELT (str, dem->mangled, ix, s)
+ FOR_EACH_VEC_ELT (dem->mangled, ix, s)
{
sym = symbol_hash_lookup (s, false);
if (ix == 0)
diff --git a/gcc/toplev.c b/gcc/toplev.c
index 7c0cef58ae3..f7e20998069 100644
--- a/gcc/toplev.c
+++ b/gcc/toplev.c
@@ -502,16 +502,16 @@ check_global_declaration_1 (tree decl)
"%q+D defined but not used", decl);
}
-/* Issue appropriate warnings for the global declarations in VEC (of
+/* Issue appropriate warnings for the global declarations in V (of
which there are LEN). */
void
-check_global_declarations (tree *vec, int len)
+check_global_declarations (tree *v, int len)
{
int i;
for (i = 0; i < len; i++)
- check_global_declaration_1 (vec[i]);
+ check_global_declaration_1 (v[i]);
}
/* Emit debugging information for all global declarations in VEC. */
diff --git a/gcc/trans-mem.c b/gcc/trans-mem.c
index a7b4a9c0484..4edb98598ec 100644
--- a/gcc/trans-mem.c
+++ b/gcc/trans-mem.c
@@ -884,7 +884,7 @@ static htab_t tm_log;
/* Addresses to log with a save/restore sequence. These should be in
dominator order. */
-static VEC(tree,heap) *tm_log_save_addresses;
+static vec<tree> tm_log_save_addresses;
/* Map for an SSA_NAME originally pointing to a non aliased new piece
of memory (malloc, alloc, etc). */
@@ -947,7 +947,7 @@ static void
tm_log_free (void *p)
{
struct tm_log_entry *lp = (struct tm_log_entry *) p;
- VEC_free (gimple, heap, lp->stmts);
+ lp->stmts.release ();
free (lp);
}
@@ -957,7 +957,7 @@ tm_log_init (void)
{
tm_log = htab_create (10, tm_log_hash, tm_log_eq, tm_log_free);
tm_new_mem_hash = htab_create (5, struct_ptr_hash, struct_ptr_eq, free);
- tm_log_save_addresses = VEC_alloc (tree, heap, 5);
+ tm_log_save_addresses.create (5);
}
/* Free logging data structures. */
@@ -966,7 +966,7 @@ tm_log_delete (void)
{
htab_delete (tm_log);
htab_delete (tm_new_mem_hash);
- VEC_free (tree, heap, tm_log_save_addresses);
+ tm_log_save_addresses.release ();
}
/* Return true if MEM is a transaction invariant memory for the TM
@@ -1031,18 +1031,18 @@ tm_log_add (basic_block entry_block, tree addr, gimple stmt)
&& !TREE_ADDRESSABLE (type))
{
lp->save_var = create_tmp_reg (TREE_TYPE (lp->addr), "tm_save");
- lp->stmts = NULL;
+ lp->stmts.create (0);
lp->entry_block = entry_block;
/* Save addresses separately in dominator order so we don't
get confused by overlapping addresses in the save/restore
sequence. */
- VEC_safe_push (tree, heap, tm_log_save_addresses, lp->addr);
+ tm_log_save_addresses.safe_push (lp->addr);
}
else
{
/* Use the logging functions. */
- lp->stmts = VEC_alloc (gimple, heap, 5);
- VEC_quick_push (gimple, lp->stmts, stmt);
+ lp->stmts.create (5);
+ lp->stmts.quick_push (stmt);
lp->save_var = NULL;
}
}
@@ -1058,7 +1058,7 @@ tm_log_add (basic_block entry_block, tree addr, gimple stmt)
if (lp->save_var)
return;
- for (i = 0; VEC_iterate (gimple, lp->stmts, i, oldstmt); ++i)
+ for (i = 0; lp->stmts.iterate (i, &oldstmt); ++i)
{
if (stmt == oldstmt)
return;
@@ -1072,7 +1072,7 @@ tm_log_add (basic_block entry_block, tree addr, gimple stmt)
gimple_bb (oldstmt), gimple_bb (stmt)));
}
/* Store is on a different code path. */
- VEC_safe_push (gimple, heap, lp->stmts, stmt);
+ lp->stmts.safe_push (stmt);
}
}
@@ -1178,7 +1178,7 @@ tm_log_emit (void)
{
if (dump_file)
fprintf (dump_file, "DUMPING with logging functions\n");
- for (i = 0; VEC_iterate (gimple, lp->stmts, i, stmt); ++i)
+ for (i = 0; lp->stmts.iterate (i, &stmt); ++i)
tm_log_emit_stmt (lp->addr, stmt);
}
}
@@ -1195,9 +1195,9 @@ tm_log_emit_saves (basic_block entry_block, basic_block bb)
gimple stmt;
struct tm_log_entry l, *lp;
- for (i = 0; i < VEC_length (tree, tm_log_save_addresses); ++i)
+ for (i = 0; i < tm_log_save_addresses.length (); ++i)
{
- l.addr = VEC_index (tree, tm_log_save_addresses, i);
+ l.addr = tm_log_save_addresses[i];
lp = (struct tm_log_entry *) *htab_find_slot (tm_log, &l, NO_INSERT);
gcc_assert (lp->save_var != NULL);
@@ -1231,9 +1231,9 @@ tm_log_emit_restores (basic_block entry_block, basic_block bb)
gimple_stmt_iterator gsi;
gimple stmt;
- for (i = VEC_length (tree, tm_log_save_addresses) - 1; i >= 0; i--)
+ for (i = tm_log_save_addresses.length () - 1; i >= 0; i--)
{
- l.addr = VEC_index (tree, tm_log_save_addresses, i);
+ l.addr = tm_log_save_addresses[i];
lp = (struct tm_log_entry *) *htab_find_slot (tm_log, &l, NO_INSERT);
gcc_assert (lp->save_var != NULL);
@@ -1740,8 +1740,6 @@ struct tm_region
};
typedef struct tm_region *tm_region_p;
-DEF_VEC_P (tm_region_p);
-DEF_VEC_ALLOC_P (tm_region_p, heap);
/* True if there are pending edge statements to be committed for the
current function being scanned in the tmmark pass. */
@@ -1843,10 +1841,10 @@ tm_region_init (struct tm_region *region)
edge_iterator ei;
edge e;
basic_block bb;
- VEC(basic_block, heap) *queue = NULL;
+ vec<basic_block> queue = vec<basic_block>();
bitmap visited_blocks = BITMAP_ALLOC (NULL);
struct tm_region *old_region;
- VEC(tm_region_p, heap) *bb_regions = NULL;
+ vec<tm_region_p> bb_regions = vec<tm_region_p>();
all_tm_regions = region;
bb = single_succ (ENTRY_BLOCK_PTR);
@@ -1854,15 +1852,15 @@ tm_region_init (struct tm_region *region)
/* We could store this information in bb->aux, but we may get called
through get_all_tm_blocks() from another pass that may be already
using bb->aux. */
- VEC_safe_grow_cleared (tm_region_p, heap, bb_regions, last_basic_block);
+ bb_regions.safe_grow_cleared (last_basic_block);
- VEC_safe_push (basic_block, heap, queue, bb);
- VEC_replace (tm_region_p, bb_regions, bb->index, region);
+ queue.safe_push (bb);
+ bb_regions[bb->index] = region;
do
{
- bb = VEC_pop (basic_block, queue);
- region = VEC_index (tm_region_p, bb_regions, bb->index);
- VEC_replace (tm_region_p, bb_regions, bb->index, NULL);
+ bb = queue.pop ();
+ region = bb_regions[bb->index];
+ bb_regions[bb->index] = NULL;
/* Record exit and irrevocable blocks. */
region = tm_region_init_1 (region, bb);
@@ -1878,21 +1876,21 @@ tm_region_init (struct tm_region *region)
if (!bitmap_bit_p (visited_blocks, e->dest->index))
{
bitmap_set_bit (visited_blocks, e->dest->index);
- VEC_safe_push (basic_block, heap, queue, e->dest);
+ queue.safe_push (e->dest);
/* If the current block started a new region, make sure that only
the entry block of the new region is associated with this region.
Other successors are still part of the old region. */
if (old_region != region && e->dest != region->entry_block)
- VEC_replace (tm_region_p, bb_regions, e->dest->index, old_region);
+ bb_regions[e->dest->index] = old_region;
else
- VEC_replace (tm_region_p, bb_regions, e->dest->index, region);
+ bb_regions[e->dest->index] = region;
}
}
- while (!VEC_empty (basic_block, queue));
- VEC_free (basic_block, heap, queue);
+ while (!queue.is_empty ());
+ queue.release ();
BITMAP_FREE (visited_blocks);
- VEC_free (tm_region_p, heap, bb_regions);
+ bb_regions.release ();
}
/* The "gate" function for all transactional memory expansion and optimization
@@ -2097,7 +2095,7 @@ build_tm_store (location_t loc, tree lhs, tree rhs, gimple_stmt_iterator *gsi)
if (TREE_CODE (rhs) == CONSTRUCTOR)
{
/* Handle the easy initialization to zero. */
- if (CONSTRUCTOR_ELTS (rhs) == 0)
+ if (!CONSTRUCTOR_ELTS (rhs))
rhs = build_int_cst (simple_type, 0);
else
{
@@ -2398,26 +2396,26 @@ expand_block_tm (struct tm_region *region, basic_block bb)
STOP_AT_IRREVOCABLE_P is true if caller is uninterested in blocks
following a TM_IRREVOCABLE call. */
-static VEC (basic_block, heap) *
+static vec<basic_block>
get_tm_region_blocks (basic_block entry_block,
bitmap exit_blocks,
bitmap irr_blocks,
bitmap all_region_blocks,
bool stop_at_irrevocable_p)
{
- VEC(basic_block, heap) *bbs = NULL;
+ vec<basic_block> bbs = vec<basic_block>();
unsigned i;
edge e;
edge_iterator ei;
bitmap visited_blocks = BITMAP_ALLOC (NULL);
i = 0;
- VEC_safe_push (basic_block, heap, bbs, entry_block);
+ bbs.safe_push (entry_block);
bitmap_set_bit (visited_blocks, entry_block->index);
do
{
- basic_block bb = VEC_index (basic_block, bbs, i++);
+ basic_block bb = bbs[i++];
if (exit_blocks &&
bitmap_bit_p (exit_blocks, bb->index))
@@ -2432,10 +2430,10 @@ get_tm_region_blocks (basic_block entry_block,
if (!bitmap_bit_p (visited_blocks, e->dest->index))
{
bitmap_set_bit (visited_blocks, e->dest->index);
- VEC_safe_push (basic_block, heap, bbs, e->dest);
+ bbs.safe_push (e->dest);
}
}
- while (i < VEC_length (basic_block, bbs));
+ while (i < bbs.length ());
if (all_region_blocks)
bitmap_ior_into (all_region_blocks, visited_blocks);
@@ -2448,8 +2446,8 @@ get_tm_region_blocks (basic_block entry_block,
static void *
collect_bb2reg (struct tm_region *region, void *data)
{
- VEC(tm_region_p, heap) *bb2reg = (VEC(tm_region_p, heap) *) data;
- VEC (basic_block, heap) *queue;
+ vec<tm_region_p> *bb2reg = (vec<tm_region_p> *) data;
+ vec<basic_block> queue;
unsigned int i;
basic_block bb;
@@ -2461,10 +2459,10 @@ collect_bb2reg (struct tm_region *region, void *data)
// We expect expand_region to perform a post-order traversal of the region
// tree. Therefore the last region seen for any bb is the innermost.
- FOR_EACH_VEC_ELT (basic_block, queue, i, bb)
- VEC_replace (tm_region_p, bb2reg, bb->index, region);
+ FOR_EACH_VEC_ELT (queue, i, bb)
+ (*bb2reg)[bb->index] = region;
- VEC_free (basic_block, heap, queue);
+ queue.release ();
return NULL;
}
@@ -2490,15 +2488,15 @@ collect_bb2reg (struct tm_region *region, void *data)
// ??? There is currently a hack inside tree-ssa-pre.c to work around the
// only known instance of this block sharing.
-static VEC(tm_region_p, heap) *
+static vec<tm_region_p>
get_bb_regions_instrumented (void)
{
unsigned n = last_basic_block;
- VEC(tm_region_p, heap) *ret;
+ vec<tm_region_p> ret;
- ret = VEC_alloc (tm_region_p, heap, n);
- VEC_safe_grow_cleared (tm_region_p, heap, ret, n);
- expand_regions (all_tm_regions, collect_bb2reg, ret);
+ ret.create (n);
+ ret.safe_grow_cleared (n);
+ expand_regions (all_tm_regions, collect_bb2reg, &ret);
return ret;
}
@@ -2510,7 +2508,7 @@ void
compute_transaction_bits (void)
{
struct tm_region *region;
- VEC (basic_block, heap) *queue;
+ vec<basic_block> queue;
unsigned int i;
basic_block bb;
@@ -2528,9 +2526,9 @@ compute_transaction_bits (void)
region->irr_blocks,
NULL,
/*stop_at_irr_p=*/true);
- for (i = 0; VEC_iterate (basic_block, queue, i, bb); ++i)
+ for (i = 0; queue.iterate (i, &bb); ++i)
bb->flags |= BB_IN_TRANSACTION;
- VEC_free (basic_block, heap, queue);
+ queue.release ();
}
if (all_tm_regions)
@@ -2603,7 +2601,7 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
}
// Generate log saves.
- if (!VEC_empty (tree, tm_log_save_addresses))
+ if (!tm_log_save_addresses.is_empty ())
tm_log_emit_saves (region->entry_block, transaction_bb);
// In the beginning, we've no tests to perform on transaction restart.
@@ -2612,7 +2610,7 @@ expand_transaction (struct tm_region *region, void *data ATTRIBUTE_UNUSED)
region->restart_block = region->entry_block;
// Generate log restores.
- if (!VEC_empty (tree, tm_log_save_addresses))
+ if (!tm_log_save_addresses.is_empty ())
{
basic_block test_bb = create_empty_bb (transaction_bb);
basic_block code_bb = create_empty_bb (test_bb);
@@ -2830,13 +2828,13 @@ execute_tm_mark (void)
tm_log_init ();
- VEC(tm_region_p, heap) *bb_regions = get_bb_regions_instrumented ();
+ vec<tm_region_p> bb_regions = get_bb_regions_instrumented ();
struct tm_region *r;
unsigned i;
// Expand memory operations into calls into the runtime.
// This collects log entries as well.
- FOR_EACH_VEC_ELT (tm_region_p, bb_regions, i, r)
+ FOR_EACH_VEC_ELT (bb_regions, i, r)
if (r != NULL)
expand_block_tm (r, BASIC_BLOCK (i));
@@ -3000,15 +2998,15 @@ expand_block_edges (struct tm_region *const region, basic_block bb)
static unsigned int
execute_tm_edges (void)
{
- VEC(tm_region_p, heap) *bb_regions = get_bb_regions_instrumented ();
+ vec<tm_region_p> bb_regions = get_bb_regions_instrumented ();
struct tm_region *r;
unsigned i;
- FOR_EACH_VEC_ELT (tm_region_p, bb_regions, i, r)
+ FOR_EACH_VEC_ELT (bb_regions, i, r)
if (r != NULL)
expand_block_edges (r, BASIC_BLOCK (i));
- VEC_free (tm_region_p, heap, bb_regions);
+ bb_regions.release ();
/* We've got to release the dominance info now, to indicate that it
must be rebuilt completely. Otherwise we'll crash trying to update
@@ -3265,12 +3263,12 @@ dump_tm_memopt_set (const char *set_name, bitmap bits)
/* Prettily dump all of the memopt sets in BLOCKS. */
static void
-dump_tm_memopt_sets (VEC (basic_block, heap) *blocks)
+dump_tm_memopt_sets (vec<basic_block> blocks)
{
size_t i;
basic_block bb;
- for (i = 0; VEC_iterate (basic_block, blocks, i, bb); ++i)
+ for (i = 0; blocks.iterate (i, &bb); ++i)
{
fprintf (dump_file, "------------BB %d---------\n", bb->index);
dump_tm_memopt_set ("STORE_LOCAL", STORE_LOCAL (bb));
@@ -3368,7 +3366,7 @@ tm_memopt_compute_antin (basic_block bb)
static void
tm_memopt_compute_available (struct tm_region *region,
- VEC (basic_block, heap) *blocks)
+ vec<basic_block> blocks)
{
edge e;
basic_block *worklist, *qin, *qout, *qend, bb;
@@ -3379,12 +3377,12 @@ tm_memopt_compute_available (struct tm_region *region,
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks in the region. */
- qlen = VEC_length (basic_block, blocks) - 1;
+ qlen = blocks.length () - 1;
qin = qout = worklist =
XNEWVEC (basic_block, qlen);
/* Put every block in the region on the worklist. */
- for (i = 0; VEC_iterate (basic_block, blocks, i, bb); ++i)
+ for (i = 0; blocks.iterate (i, &bb); ++i)
{
/* Seed AVAIL_OUT with the LOCAL set. */
bitmap_ior_into (STORE_AVAIL_OUT (bb), STORE_LOCAL (bb));
@@ -3456,7 +3454,7 @@ tm_memopt_compute_available (struct tm_region *region,
static void
tm_memopt_compute_antic (struct tm_region *region,
- VEC (basic_block, heap) *blocks)
+ vec<basic_block> blocks)
{
edge e;
basic_block *worklist, *qin, *qout, *qend, bb;
@@ -3467,12 +3465,11 @@ tm_memopt_compute_antic (struct tm_region *region,
/* Allocate a worklist array/queue. Entries are only added to the
list if they were not already on the list. So the size is
bounded by the number of basic blocks in the region. */
- qin = qout = worklist =
- XNEWVEC (basic_block, VEC_length (basic_block, blocks));
+ qin = qout = worklist = XNEWVEC (basic_block, blocks.length ());
- for (qlen = 0, i = VEC_length (basic_block, blocks) - 1; i >= 0; --i)
+ for (qlen = 0, i = blocks.length () - 1; i >= 0; --i)
{
- bb = VEC_index (basic_block, blocks, i);
+ bb = blocks[i];
/* Seed ANTIC_OUT with the LOCAL set. */
bitmap_ior_into (STORE_ANTIC_OUT (bb), STORE_LOCAL (bb));
@@ -3586,13 +3583,13 @@ tm_memopt_transform_stmt (unsigned int offset,
basic blocks in BLOCKS. */
static void
-tm_memopt_transform_blocks (VEC (basic_block, heap) *blocks)
+tm_memopt_transform_blocks (vec<basic_block> blocks)
{
size_t i;
basic_block bb;
gimple_stmt_iterator gsi;
- for (i = 0; VEC_iterate (basic_block, blocks, i, bb); ++i)
+ for (i = 0; blocks.iterate (i, &bb); ++i)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
@@ -3655,24 +3652,24 @@ tm_memopt_init_sets (void)
/* Free sets computed for each BB. */
static void
-tm_memopt_free_sets (VEC (basic_block, heap) *blocks)
+tm_memopt_free_sets (vec<basic_block> blocks)
{
size_t i;
basic_block bb;
- for (i = 0; VEC_iterate (basic_block, blocks, i, bb); ++i)
+ for (i = 0; blocks.iterate (i, &bb); ++i)
bb->aux = NULL;
}
/* Clear the visited bit for every basic block in BLOCKS. */
static void
-tm_memopt_clear_visited (VEC (basic_block, heap) *blocks)
+tm_memopt_clear_visited (vec<basic_block> blocks)
{
size_t i;
basic_block bb;
- for (i = 0; VEC_iterate (basic_block, blocks, i, bb); ++i)
+ for (i = 0; blocks.iterate (i, &bb); ++i)
BB_VISITED_P (bb) = false;
}
@@ -3684,7 +3681,7 @@ static unsigned int
execute_tm_memopt (void)
{
struct tm_region *region;
- VEC (basic_block, heap) *bbs;
+ vec<basic_block> bbs;
tm_memopt_value_id = 0;
tm_memopt_value_numbers = htab_create (10, tm_memop_hash, tm_memop_eq, free);
@@ -3705,7 +3702,7 @@ execute_tm_memopt (void)
false);
/* Collect all the memory operations. */
- for (i = 0; VEC_iterate (basic_block, bbs, i, bb); ++i)
+ for (i = 0; bbs.iterate (i, &bb); ++i)
{
bb->aux = tm_memopt_init_sets ();
tm_memopt_accumulate_memops (bb);
@@ -3719,7 +3716,7 @@ execute_tm_memopt (void)
tm_memopt_transform_blocks (bbs);
tm_memopt_free_sets (bbs);
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
bitmap_obstack_release (&tm_memopt_obstack);
htab_empty (tm_memopt_value_numbers);
}
@@ -3837,7 +3834,7 @@ struct tm_ipa_cg_data
bool want_irr_scan_normal;
};
-typedef VEC (cgraph_node_p, heap) *cgraph_node_queue;
+typedef vec<cgraph_node_ptr> cgraph_node_queue;
/* Return the ipa data associated with NODE, allocating zeroed memory
if necessary. TRAVERSE_ALIASES is true if we must traverse aliases
@@ -3874,7 +3871,7 @@ maybe_push_queue (struct cgraph_node *node,
if (!*in_queue_p)
{
*in_queue_p = true;
- VEC_safe_push (cgraph_node_p, heap, *queue_p, node);
+ queue_p->safe_push (node);
}
}
@@ -3887,15 +3884,14 @@ maybe_push_queue (struct cgraph_node *node,
static void
ipa_uninstrument_transaction (struct tm_region *region,
- VEC (basic_block, heap) *queue)
+ vec<basic_block> queue)
{
gimple transaction = region->transaction_stmt;
basic_block transaction_bb = gimple_bb (transaction);
- int n = VEC_length (basic_block, queue);
+ int n = queue.length ();
basic_block *new_bbs = XNEWVEC (basic_block, n);
- copy_bbs (VEC_address (basic_block, queue), n, new_bbs,
- NULL, 0, NULL, NULL, transaction_bb);
+ copy_bbs (queue.address (), n, new_bbs, NULL, 0, NULL, NULL, transaction_bb);
edge e = make_edge (transaction_bb, new_bbs[0], EDGE_TM_UNINSTRUMENTED);
add_phi_args_after_copy (new_bbs, n, e);
@@ -3961,7 +3957,7 @@ ipa_tm_scan_calls_transaction (struct tm_ipa_cg_data *d,
for (r = all_tm_regions; r; r = r->next)
{
- VEC (basic_block, heap) *bbs;
+ vec<basic_block> bbs;
basic_block bb;
unsigned i;
@@ -3971,10 +3967,10 @@ ipa_tm_scan_calls_transaction (struct tm_ipa_cg_data *d,
// Generate the uninstrumented code path for this transaction.
ipa_uninstrument_transaction (r, bbs);
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
ipa_tm_scan_calls_block (callees_p, bb, false);
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
}
// ??? copy_bbs should maintain cgraph edges for the blocks as it is
@@ -4138,7 +4134,7 @@ ipa_tm_scan_irr_block (basic_block bb)
scanning past OLD_IRR or EXIT_BLOCKS. */
static bool
-ipa_tm_scan_irr_blocks (VEC (basic_block, heap) **pqueue, bitmap new_irr,
+ipa_tm_scan_irr_blocks (vec<basic_block> *pqueue, bitmap new_irr,
bitmap old_irr, bitmap exit_blocks)
{
bool any_new_irr = false;
@@ -4148,7 +4144,7 @@ ipa_tm_scan_irr_blocks (VEC (basic_block, heap) **pqueue, bitmap new_irr,
do
{
- basic_block bb = VEC_pop (basic_block, *pqueue);
+ basic_block bb = pqueue->pop ();
/* Don't re-scan blocks we know already are irrevocable. */
if (old_irr && bitmap_bit_p (old_irr, bb->index))
@@ -4165,11 +4161,11 @@ ipa_tm_scan_irr_blocks (VEC (basic_block, heap) **pqueue, bitmap new_irr,
if (!bitmap_bit_p (visited_blocks, e->dest->index))
{
bitmap_set_bit (visited_blocks, e->dest->index);
- VEC_safe_push (basic_block, heap, *pqueue, e->dest);
+ pqueue->safe_push (e->dest);
}
}
}
- while (!VEC_empty (basic_block, *pqueue));
+ while (!pqueue->is_empty ());
BITMAP_FREE (visited_blocks);
@@ -4186,7 +4182,7 @@ static void
ipa_tm_propagate_irr (basic_block entry_block, bitmap new_irr,
bitmap old_irr, bitmap exit_blocks)
{
- VEC (basic_block, heap) *bbs;
+ vec<basic_block> bbs;
bitmap all_region_blocks;
/* If this block is in the old set, no need to rescan. */
@@ -4198,7 +4194,7 @@ ipa_tm_propagate_irr (basic_block entry_block, bitmap new_irr,
all_region_blocks, false);
do
{
- basic_block bb = VEC_pop (basic_block, bbs);
+ basic_block bb = bbs.pop ();
bool this_irr = bitmap_bit_p (new_irr, bb->index);
bool all_son_irr = false;
edge_iterator ei;
@@ -4245,10 +4241,10 @@ ipa_tm_propagate_irr (basic_block entry_block, bitmap new_irr,
}
}
}
- while (!VEC_empty (basic_block, bbs));
+ while (!bbs.is_empty ());
BITMAP_FREE (all_region_blocks);
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
}
static void
@@ -4297,7 +4293,7 @@ ipa_tm_scan_irr_function (struct cgraph_node *node, bool for_clone)
{
struct tm_ipa_cg_data *d;
bitmap new_irr, old_irr;
- VEC (basic_block, heap) *queue;
+ vec<basic_block> queue;
bool ret = false;
/* Builtin operators (operator new, and such). */
@@ -4309,14 +4305,14 @@ ipa_tm_scan_irr_function (struct cgraph_node *node, bool for_clone)
calculate_dominance_info (CDI_DOMINATORS);
d = get_cg_data (&node, true);
- queue = VEC_alloc (basic_block, heap, 10);
+ queue.create (10);
new_irr = BITMAP_ALLOC (&tm_obstack);
/* Scan each tm region, propagating irrevocable status through the tree. */
if (for_clone)
{
old_irr = d->irrevocable_blocks_clone;
- VEC_quick_push (basic_block, queue, single_succ (ENTRY_BLOCK_PTR));
+ queue.quick_push (single_succ (ENTRY_BLOCK_PTR));
if (ipa_tm_scan_irr_blocks (&queue, new_irr, old_irr, NULL))
{
ipa_tm_propagate_irr (single_succ (ENTRY_BLOCK_PTR), new_irr,
@@ -4331,7 +4327,7 @@ ipa_tm_scan_irr_function (struct cgraph_node *node, bool for_clone)
old_irr = d->irrevocable_blocks_normal;
for (region = d->all_tm_regions; region; region = region->next)
{
- VEC_quick_push (basic_block, queue, region->entry_block);
+ queue.quick_push (region->entry_block);
if (ipa_tm_scan_irr_blocks (&queue, new_irr, old_irr,
region->exit_blocks))
ipa_tm_propagate_irr (region->entry_block, new_irr, old_irr,
@@ -4374,7 +4370,7 @@ ipa_tm_scan_irr_function (struct cgraph_node *node, bool for_clone)
else
BITMAP_FREE (new_irr);
- VEC_free (basic_block, heap, queue);
+ queue.release ();
pop_cfun ();
return ret;
@@ -4476,7 +4472,7 @@ ipa_tm_diagnose_transaction (struct cgraph_node *node,
}
else
{
- VEC (basic_block, heap) *bbs;
+ vec<basic_block> bbs;
gimple_stmt_iterator gsi;
basic_block bb;
size_t i;
@@ -4484,7 +4480,7 @@ ipa_tm_diagnose_transaction (struct cgraph_node *node,
bbs = get_tm_region_blocks (r->entry_block, r->exit_blocks,
r->irr_blocks, NULL, false);
- for (i = 0; VEC_iterate (basic_block, bbs, i, bb); ++i)
+ for (i = 0; bbs.iterate (i, &bb); ++i)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
@@ -4525,7 +4521,7 @@ ipa_tm_diagnose_transaction (struct cgraph_node *node,
"atomic transaction", fndecl);
}
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
}
}
@@ -4682,7 +4678,9 @@ ipa_tm_create_version (struct cgraph_node *old_node)
if (DECL_ONE_ONLY (new_decl))
DECL_COMDAT_GROUP (new_decl) = tm_mangle (DECL_COMDAT_GROUP (old_decl));
- new_node = cgraph_copy_node_for_versioning (old_node, new_decl, NULL, NULL);
+ new_node = cgraph_copy_node_for_versioning (old_node, new_decl,
+ vec<cgraph_edge_p>(),
+ NULL);
new_node->symbol.externally_visible = old_node->symbol.externally_visible;
new_node->lowered = true;
new_node->tm_clone = 1;
@@ -4699,8 +4697,9 @@ ipa_tm_create_version (struct cgraph_node *old_node)
DECL_WEAK (new_decl) = 0;
}
- tree_function_versioning (old_decl, new_decl, NULL, false, NULL, false,
- NULL, NULL);
+ tree_function_versioning (old_decl, new_decl,
+ NULL, false, NULL,
+ false, NULL, NULL);
}
record_tm_clone_pair (old_decl, new_decl);
@@ -4970,13 +4969,13 @@ ipa_tm_transform_calls (struct cgraph_node *node, struct tm_region *region,
bool need_ssa_rename = false;
edge e;
edge_iterator ei;
- VEC(basic_block, heap) *queue = NULL;
+ vec<basic_block> queue = vec<basic_block>();
bitmap visited_blocks = BITMAP_ALLOC (NULL);
- VEC_safe_push (basic_block, heap, queue, bb);
+ queue.safe_push (bb);
do
{
- bb = VEC_pop (basic_block, queue);
+ bb = queue.pop ();
need_ssa_rename |=
ipa_tm_transform_calls_1 (node, region, bb, irr_blocks);
@@ -4991,12 +4990,12 @@ ipa_tm_transform_calls (struct cgraph_node *node, struct tm_region *region,
if (!bitmap_bit_p (visited_blocks, e->dest->index))
{
bitmap_set_bit (visited_blocks, e->dest->index);
- VEC_safe_push (basic_block, heap, queue, e->dest);
+ queue.safe_push (e->dest);
}
}
- while (!VEC_empty (basic_block, queue));
+ while (!queue.is_empty ());
- VEC_free (basic_block, heap, queue);
+ queue.release ();
BITMAP_FREE (visited_blocks);
return need_ssa_rename;
@@ -5073,9 +5072,9 @@ ipa_tm_transform_clone (struct cgraph_node *node)
static unsigned int
ipa_tm_execute (void)
{
- cgraph_node_queue tm_callees = NULL;
+ cgraph_node_queue tm_callees = cgraph_node_queue();
/* List of functions that will go irrevocable. */
- cgraph_node_queue irr_worklist = NULL;
+ cgraph_node_queue irr_worklist = cgraph_node_queue();
struct cgraph_node *node;
struct tm_ipa_cg_data *d;
@@ -5138,9 +5137,9 @@ ipa_tm_execute (void)
/* For every local function on the callee list, scan as if we will be
creating a transactional clone, queueing all new functions we find
along the way. */
- for (i = 0; i < VEC_length (cgraph_node_p, tm_callees); ++i)
+ for (i = 0; i < tm_callees.length (); ++i)
{
- node = VEC_index (cgraph_node_p, tm_callees, i);
+ node = tm_callees[i];
a = cgraph_function_body_availability (node);
d = get_cg_data (&node, true);
@@ -5180,19 +5179,19 @@ ipa_tm_execute (void)
}
/* Iterate scans until no more work to be done. Prefer not to use
- VEC_pop because the worklist tends to follow a breadth-first
+ vec::pop because the worklist tends to follow a breadth-first
search of the callgraph, which should allow convergance with a
minimum number of scans. But we also don't want the worklist
array to grow without bound, so we shift the array up periodically. */
- for (i = 0; i < VEC_length (cgraph_node_p, irr_worklist); ++i)
+ for (i = 0; i < irr_worklist.length (); ++i)
{
- if (i > 256 && i == VEC_length (cgraph_node_p, irr_worklist) / 8)
+ if (i > 256 && i == irr_worklist.length () / 8)
{
- VEC_block_remove (cgraph_node_p, irr_worklist, 0, i);
+ irr_worklist.block_remove (0, i);
i = 0;
}
- node = VEC_index (cgraph_node_p, irr_worklist, i);
+ node = irr_worklist[i];
d = get_cg_data (&node, true);
d->in_worklist = false;
@@ -5207,10 +5206,10 @@ ipa_tm_execute (void)
/* For every function on the callee list, collect the tm_may_enter_irr
bit on the node. */
- VEC_truncate (cgraph_node_p, irr_worklist, 0);
- for (i = 0; i < VEC_length (cgraph_node_p, tm_callees); ++i)
+ irr_worklist.truncate (0);
+ for (i = 0; i < tm_callees.length (); ++i)
{
- node = VEC_index (cgraph_node_p, tm_callees, i);
+ node = tm_callees[i];
if (ipa_tm_mayenterirr_function (node))
{
d = get_cg_data (&node, true);
@@ -5220,20 +5219,20 @@ ipa_tm_execute (void)
}
/* Propagate the tm_may_enter_irr bit to callers until stable. */
- for (i = 0; i < VEC_length (cgraph_node_p, irr_worklist); ++i)
+ for (i = 0; i < irr_worklist.length (); ++i)
{
struct cgraph_node *caller;
struct cgraph_edge *e;
struct ipa_ref *ref;
unsigned j;
- if (i > 256 && i == VEC_length (cgraph_node_p, irr_worklist) / 8)
+ if (i > 256 && i == irr_worklist.length () / 8)
{
- VEC_block_remove (cgraph_node_p, irr_worklist, 0, i);
+ irr_worklist.block_remove (0, i);
i = 0;
}
- node = VEC_index (cgraph_node_p, irr_worklist, i);
+ node = irr_worklist[i];
d = get_cg_data (&node, true);
d->in_worklist = false;
node->local.tm_may_enter_irr = true;
@@ -5280,11 +5279,11 @@ ipa_tm_execute (void)
/* Create clones. Do those that are not irrevocable and have a
positive call count. Do those publicly visible functions that
the user directed us to clone. */
- for (i = 0; i < VEC_length (cgraph_node_p, tm_callees); ++i)
+ for (i = 0; i < tm_callees.length (); ++i)
{
bool doit = false;
- node = VEC_index (cgraph_node_p, tm_callees, i);
+ node = tm_callees[i];
if (node->same_body_alias)
continue;
@@ -5304,9 +5303,9 @@ ipa_tm_execute (void)
}
/* Redirect calls to the new clones, and insert irrevocable marks. */
- for (i = 0; i < VEC_length (cgraph_node_p, tm_callees); ++i)
+ for (i = 0; i < tm_callees.length (); ++i)
{
- node = VEC_index (cgraph_node_p, tm_callees, i);
+ node = tm_callees[i];
if (node->analyzed)
{
d = get_cg_data (&node, true);
@@ -5324,8 +5323,8 @@ ipa_tm_execute (void)
}
/* Free and clear all data structures. */
- VEC_free (cgraph_node_p, heap, tm_callees);
- VEC_free (cgraph_node_p, heap, irr_worklist);
+ tm_callees.release ();
+ irr_worklist.release ();
bitmap_obstack_release (&tm_obstack);
free_original_copy_tables ();
diff --git a/gcc/tree-browser.c b/gcc/tree-browser.c
index dab7286670c..f05cbf73629 100644
--- a/gcc/tree-browser.c
+++ b/gcc/tree-browser.c
@@ -103,7 +103,7 @@ void browse_tree (tree);
/* Static variables. */
static htab_t TB_up_ht;
-static VEC(tree,gc) *TB_history_stack;
+static vec<tree, va_gc> *TB_history_stack;
static int TB_verbose = 1;
@@ -121,7 +121,7 @@ browse_tree (tree begin)
fprintf (TB_OUT_FILE, "\nTree Browser\n");
#define TB_SET_HEAD(N) do { \
- VEC_safe_push (tree, gc, TB_history_stack, N); \
+ vec_safe_push (TB_history_stack, N); \
head = N; \
if (TB_verbose) \
if (head) \
@@ -871,10 +871,10 @@ find_node_with_code (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
static tree
TB_history_prev (void)
{
- if (!VEC_empty (tree, TB_history_stack))
+ if (!vec_safe_is_empty (TB_history_stack))
{
- tree last = VEC_last (tree, TB_history_stack);
- VEC_pop (tree, TB_history_stack);
+ tree last = TB_history_stack->last ();
+ TB_history_stack->pop ();
return last;
}
return NULL_TREE;
diff --git a/gcc/tree-call-cdce.c b/gcc/tree-call-cdce.c
index 12357eb1503..42f47523e25 100644
--- a/gcc/tree-call-cdce.c
+++ b/gcc/tree-call-cdce.c
@@ -318,7 +318,7 @@ gen_one_condition (tree arg, int lbub,
enum tree_code tcode,
const char *temp_name1,
const char *temp_name2,
- VEC (gimple, heap) *conds,
+ vec<gimple> conds,
unsigned *nconds)
{
tree lbub_real_cst, lbub_cst, float_type;
@@ -343,9 +343,9 @@ gen_one_condition (tree arg, int lbub,
gimple_assign_set_lhs (stmt2, tempcn);
stmt3 = gimple_build_cond_from_tree (tempcn, NULL_TREE, NULL_TREE);
- VEC_quick_push (gimple, conds, stmt1);
- VEC_quick_push (gimple, conds, stmt2);
- VEC_quick_push (gimple, conds, stmt3);
+ conds.quick_push (stmt1);
+ conds.quick_push (stmt2);
+ conds.quick_push (stmt3);
(*nconds)++;
}
@@ -360,7 +360,7 @@ gen_one_condition (tree arg, int lbub,
static void
gen_conditions_for_domain (tree arg, inp_domain domain,
- VEC (gimple, heap) *conds,
+ vec<gimple> conds,
unsigned *nconds)
{
if (domain.has_lb)
@@ -374,7 +374,7 @@ gen_conditions_for_domain (tree arg, inp_domain domain,
{
/* Now push a separator. */
if (domain.has_lb)
- VEC_quick_push (gimple, conds, NULL);
+ conds.quick_push (NULL);
gen_one_condition (arg, domain.ub,
(domain.is_ub_inclusive
@@ -403,7 +403,7 @@ gen_conditions_for_domain (tree arg, inp_domain domain,
static void
gen_conditions_for_pow_cst_base (tree base, tree expn,
- VEC (gimple, heap) *conds,
+ vec<gimple> conds,
unsigned *nconds)
{
inp_domain exp_domain;
@@ -439,7 +439,7 @@ gen_conditions_for_pow_cst_base (tree base, tree expn,
static void
gen_conditions_for_pow_int_base (tree base, tree expn,
- VEC (gimple, heap) *conds,
+ vec<gimple> conds,
unsigned *nconds)
{
gimple base_def;
@@ -496,7 +496,7 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
type is integer. */
/* Push a separator. */
- VEC_quick_push (gimple, conds, NULL);
+ conds.quick_push (NULL);
temp = create_tmp_var (int_type, "DCE_COND1");
cst0 = build_int_cst (int_type, 0);
@@ -505,15 +505,15 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
gimple_assign_set_lhs (stmt1, tempn);
stmt2 = gimple_build_cond (LE_EXPR, tempn, cst0, NULL_TREE, NULL_TREE);
- VEC_quick_push (gimple, conds, stmt1);
- VEC_quick_push (gimple, conds, stmt2);
+ conds.quick_push (stmt1);
+ conds.quick_push (stmt2);
(*nconds)++;
}
/* Method to generate conditional statements for guarding conditionally
dead calls to pow. One or more statements can be generated for
each logical condition. Statement groups of different conditions
- are separated by a NULL tree and they are stored in the VEC
+ are separated by a NULL tree and they are stored in the vec
conds. The number of logical conditions are stored in *nconds.
See C99 standard, 7.12.7.4:2, for description of pow (x, y).
@@ -528,7 +528,7 @@ gen_conditions_for_pow_int_base (tree base, tree expn,
and *NCONDS is the number of logical conditions. */
static void
-gen_conditions_for_pow (gimple pow_call, VEC (gimple, heap) *conds,
+gen_conditions_for_pow (gimple pow_call, vec<gimple> conds,
unsigned *nconds)
{
tree base, expn;
@@ -664,15 +664,15 @@ get_no_error_domain (enum built_in_function fnc)
condition are separated by NULL tree in the vector. */
static void
-gen_shrink_wrap_conditions (gimple bi_call, VEC (gimple, heap) *conds,
+gen_shrink_wrap_conditions (gimple bi_call, vec<gimple> conds,
unsigned int *nconds)
{
gimple call;
tree fn;
enum built_in_function fnc;
- gcc_assert (nconds && conds);
- gcc_assert (VEC_length (gimple, conds) == 0);
+ gcc_assert (nconds && conds.exists ());
+ gcc_assert (conds.length () == 0);
gcc_assert (is_gimple_call (bi_call));
call = bi_call;
@@ -711,7 +711,7 @@ shrink_wrap_one_built_in_call (gimple bi_call)
basic_block bi_call_bb, join_tgt_bb, guard_bb, guard_bb0;
edge join_tgt_in_edge_from_call, join_tgt_in_edge_fall_thru;
edge bi_call_in_edge0, guard_bb_in_edge;
- VEC (gimple, heap) *conds;
+ vec<gimple> conds;
unsigned tn_cond_stmts, nconds;
unsigned ci;
gimple cond_expr = NULL;
@@ -719,7 +719,7 @@ shrink_wrap_one_built_in_call (gimple bi_call)
tree bi_call_label_decl;
gimple bi_call_label;
- conds = VEC_alloc (gimple, heap, 12);
+ conds.create (12);
gen_shrink_wrap_conditions (bi_call, conds, &nconds);
/* This can happen if the condition generator decides
@@ -743,12 +743,12 @@ shrink_wrap_one_built_in_call (gimple bi_call)
/* Now it is time to insert the first conditional expression
into bi_call_bb and split this bb so that bi_call is
shrink-wrapped. */
- tn_cond_stmts = VEC_length (gimple, conds);
+ tn_cond_stmts = conds.length ();
cond_expr = NULL;
- cond_expr_start = VEC_index (gimple, conds, 0);
+ cond_expr_start = conds[0];
for (ci = 0; ci < tn_cond_stmts; ci++)
{
- gimple c = VEC_index (gimple, conds, ci);
+ gimple c = conds[ci];
gcc_assert (c || ci != 0);
if (!c)
break;
@@ -789,10 +789,10 @@ shrink_wrap_one_built_in_call (gimple bi_call)
edge bi_call_in_edge;
gimple_stmt_iterator guard_bsi = gsi_for_stmt (cond_expr_start);
ci0 = ci;
- cond_expr_start = VEC_index (gimple, conds, ci0);
+ cond_expr_start = conds[ci0];
for (; ci < tn_cond_stmts; ci++)
{
- gimple c = VEC_index (gimple, conds, ci);
+ gimple c = conds[ci];
gcc_assert (c || ci != ci0);
if (!c)
break;
@@ -817,7 +817,7 @@ shrink_wrap_one_built_in_call (gimple bi_call)
guard_bb_in_edge->count = guard_bb->count - bi_call_in_edge->count;
}
- VEC_free (gimple, heap, conds);
+ conds.release ();
if (dump_file && (dump_flags & TDF_DETAILS))
{
location_t loc;
@@ -835,18 +835,18 @@ shrink_wrap_one_built_in_call (gimple bi_call)
wrapping transformation. */
static bool
-shrink_wrap_conditional_dead_built_in_calls (VEC (gimple, heap) *calls)
+shrink_wrap_conditional_dead_built_in_calls (vec<gimple> calls)
{
bool changed = false;
unsigned i = 0;
- unsigned n = VEC_length (gimple, calls);
+ unsigned n = calls.length ();
if (n == 0)
return false;
for (; i < n ; i++)
{
- gimple bi_call = VEC_index (gimple, calls, i);
+ gimple bi_call = calls[i];
changed |= shrink_wrap_one_built_in_call (bi_call);
}
@@ -861,7 +861,7 @@ tree_call_cdce (void)
basic_block bb;
gimple_stmt_iterator i;
bool something_changed = false;
- VEC (gimple, heap) *cond_dead_built_in_calls = NULL;
+ vec<gimple> cond_dead_built_in_calls = vec<gimple>();
FOR_EACH_BB (bb)
{
/* Collect dead call candidates. */
@@ -877,20 +877,20 @@ tree_call_cdce (void)
print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
fprintf (dump_file, "\n");
}
- if (cond_dead_built_in_calls == NULL)
- cond_dead_built_in_calls = VEC_alloc (gimple, heap, 64);
- VEC_safe_push (gimple, heap, cond_dead_built_in_calls, stmt);
+ if (!cond_dead_built_in_calls.exists ())
+ cond_dead_built_in_calls.create (64);
+ cond_dead_built_in_calls.safe_push (stmt);
}
}
}
- if (cond_dead_built_in_calls == NULL)
+ if (!cond_dead_built_in_calls.exists ())
return 0;
something_changed
= shrink_wrap_conditional_dead_built_in_calls (cond_dead_built_in_calls);
- VEC_free (gimple, heap, cond_dead_built_in_calls);
+ cond_dead_built_in_calls.release ();
if (something_changed)
{
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index 5f74646ea87..eaf59eae800 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -132,17 +132,13 @@ init_empty_tree_cfg_for_function (struct function *fn)
profile_status_for_function (fn) = PROFILE_ABSENT;
n_basic_blocks_for_function (fn) = NUM_FIXED_BLOCKS;
last_basic_block_for_function (fn) = NUM_FIXED_BLOCKS;
- basic_block_info_for_function (fn)
- = VEC_alloc (basic_block, gc, initial_cfg_capacity);
- VEC_safe_grow_cleared (basic_block, gc,
- basic_block_info_for_function (fn),
+ vec_alloc (basic_block_info_for_function (fn), initial_cfg_capacity);
+ vec_safe_grow_cleared (basic_block_info_for_function (fn),
initial_cfg_capacity);
/* Build a mapping of labels to their associated blocks. */
- label_to_block_map_for_function (fn)
- = VEC_alloc (basic_block, gc, initial_cfg_capacity);
- VEC_safe_grow_cleared (basic_block, gc,
- label_to_block_map_for_function (fn),
+ vec_alloc (label_to_block_map_for_function (fn), initial_cfg_capacity);
+ vec_safe_grow_cleared (label_to_block_map_for_function (fn),
initial_cfg_capacity);
SET_BASIC_BLOCK_FOR_FUNCTION (fn, ENTRY_BLOCK,
@@ -195,8 +191,8 @@ build_gimple_cfg (gimple_seq seq)
create_empty_bb (ENTRY_BLOCK_PTR);
/* Adjust the size of the array. */
- if (VEC_length (basic_block, basic_block_info) < (size_t) n_basic_blocks)
- VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks);
+ if (basic_block_info->length () < (size_t) n_basic_blocks)
+ vec_safe_grow_cleared (basic_block_info, n_basic_blocks);
/* To speed up statement iterator walks, we first purge dead labels. */
cleanup_dead_labels ();
@@ -440,10 +436,10 @@ create_bb (void *h, void *e, basic_block after)
link_block (bb, after);
/* Grow the basic block array if needed. */
- if ((size_t) last_basic_block == VEC_length (basic_block, basic_block_info))
+ if ((size_t) last_basic_block == basic_block_info->length ())
{
size_t new_size = last_basic_block + (last_basic_block + 3) / 4;
- VEC_safe_grow_cleared (basic_block, gc, basic_block_info, new_size);
+ vec_safe_grow_cleared (basic_block_info, new_size);
}
/* Add the newly created block to the array. */
@@ -971,10 +967,9 @@ label_to_block_fn (struct function *ifun, tree dest)
gsi_insert_before (&gsi, stmt, GSI_NEW_STMT);
uid = LABEL_DECL_UID (dest);
}
- if (VEC_length (basic_block, ifun->cfg->x_label_to_block_map)
- <= (unsigned int) uid)
+ if (vec_safe_length (ifun->cfg->x_label_to_block_map) <= (unsigned int) uid)
return NULL;
- return VEC_index (basic_block, ifun->cfg->x_label_to_block_map, uid);
+ return (*ifun->cfg->x_label_to_block_map)[uid];
}
/* Create edges for an abnormal goto statement at block BB. If FOR_CALL
@@ -1104,7 +1099,7 @@ cleanup_dead_labels_eh (void)
if (cfun->eh == NULL)
return;
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
if (lp && lp->post_landing_pad)
{
lab = main_block_label (lp->post_landing_pad);
@@ -2426,7 +2421,7 @@ stmt_ends_bb_p (gimple t)
void
delete_tree_cfg_annotations (void)
{
- label_to_block_map = NULL;
+ vec_free (label_to_block_map);
}
@@ -2510,7 +2505,7 @@ last_and_only_stmt (basic_block bb)
static void
reinstall_phi_args (edge new_edge, edge old_edge)
{
- edge_var_map_vector v;
+ edge_var_map_vector *v;
edge_var_map *vm;
int i;
gimple_stmt_iterator phis;
@@ -2520,7 +2515,7 @@ reinstall_phi_args (edge new_edge, edge old_edge)
return;
for (i = 0, phis = gsi_start_phis (new_edge->dest);
- VEC_iterate (edge_var_map, v, i, vm) && !gsi_end_p (phis);
+ v->iterate (i, &vm) && !gsi_end_p (phis);
i++, gsi_next (&phis))
{
gimple phi = gsi_stmt (phis);
@@ -4294,9 +4289,7 @@ verify_gimple_label (gimple stmt)
uid = LABEL_DECL_UID (decl);
if (cfun->cfg
- && (uid == -1
- || VEC_index (basic_block,
- label_to_block_map, uid) != gimple_bb (stmt)))
+ && (uid == -1 || (*label_to_block_map)[uid] != gimple_bb (stmt)))
{
error ("incorrect entry in label_to_block_map");
err |= true;
@@ -5636,7 +5629,7 @@ gimple_duplicate_sese_region (edge entry, edge exit,
bool free_region_copy = false, copying_header = false;
struct loop *loop = entry->dest->loop_father;
edge exit_copy;
- VEC (basic_block, heap) *doms;
+ vec<basic_block> doms;
edge redirected;
int total_freq = 0, entry_freq = 0;
gcov_type total_count = 0, entry_count = 0;
@@ -5686,7 +5679,7 @@ gimple_duplicate_sese_region (edge entry, edge exit,
/* Record blocks outside the region that are dominated by something
inside. */
- doms = NULL;
+ doms.create (0);
initialize_original_copy_tables ();
doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
@@ -5745,9 +5738,9 @@ gimple_duplicate_sese_region (edge entry, edge exit,
region, but was dominated by something inside needs recounting as
well. */
set_immediate_dominator (CDI_DOMINATORS, entry->dest, entry->src);
- VEC_safe_push (basic_block, heap, doms, get_bb_original (entry->dest));
+ doms.safe_push (get_bb_original (entry->dest));
iterate_fix_dominators (CDI_DOMINATORS, doms, false);
- VEC_free (basic_block, heap, doms);
+ doms.release ();
/* Add the other PHI node arguments. */
add_phi_args_after_copy (region_copy, n_region, NULL);
@@ -5812,7 +5805,7 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
struct loop *loop = exit->dest->loop_father;
struct loop *orig_loop = entry->dest->loop_father;
basic_block switch_bb, entry_bb, nentry_bb;
- VEC (basic_block, heap) *doms;
+ vec<basic_block> doms;
int total_freq = 0, exit_freq = 0;
gcov_type total_count = 0, exit_count = 0;
edge exits[2], nexits[2], e;
@@ -5952,7 +5945,7 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
/* Anything that is outside of the region, but was dominated by something
inside needs to update dominance info. */
iterate_fix_dominators (CDI_DOMINATORS, doms, false);
- VEC_free (basic_block, heap, doms);
+ doms.release ();
/* Update the SSA web. */
update_ssa (TODO_update_ssa);
@@ -5969,7 +5962,7 @@ gimple_duplicate_sese_tail (edge entry ATTRIBUTE_UNUSED, edge exit ATTRIBUTE_UNU
void
gather_blocks_in_sese_region (basic_block entry, basic_block exit,
- VEC(basic_block,heap) **bbs_p)
+ vec<basic_block> *bbs_p)
{
basic_block son;
@@ -5977,7 +5970,7 @@ gather_blocks_in_sese_region (basic_block entry, basic_block exit,
son;
son = next_dom_son (CDI_DOMINATORS, son))
{
- VEC_safe_push (basic_block, heap, *bbs_p, son);
+ bbs_p->safe_push (son);
if (son != exit)
gather_blocks_in_sese_region (son, exit, bbs_p);
}
@@ -6296,7 +6289,7 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
}
/* Remove BB from the original basic block array. */
- VEC_replace (basic_block, cfun->cfg->x_basic_block_info, bb->index, NULL);
+ (*cfun->cfg->x_basic_block_info)[bb->index] = NULL;
cfun->cfg->x_n_basic_blocks--;
/* Grow DEST_CFUN's basic block array if needed. */
@@ -6305,16 +6298,14 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
if (bb->index >= cfg->x_last_basic_block)
cfg->x_last_basic_block = bb->index + 1;
- old_len = VEC_length (basic_block, cfg->x_basic_block_info);
+ old_len = vec_safe_length (cfg->x_basic_block_info);
if ((unsigned) cfg->x_last_basic_block >= old_len)
{
new_len = cfg->x_last_basic_block + (cfg->x_last_basic_block + 3) / 4;
- VEC_safe_grow_cleared (basic_block, gc, cfg->x_basic_block_info,
- new_len);
+ vec_safe_grow_cleared (cfg->x_basic_block_info, new_len);
}
- VEC_replace (basic_block, cfg->x_basic_block_info,
- bb->index, bb);
+ (*cfg->x_basic_block_info)[bb->index] = bb;
/* Remap the variables in phi nodes. */
for (si = gsi_start_phis (bb); !gsi_end_p (si); )
@@ -6378,16 +6369,15 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
gcc_assert (uid > -1);
- old_len = VEC_length (basic_block, cfg->x_label_to_block_map);
+ old_len = vec_safe_length (cfg->x_label_to_block_map);
if (old_len <= (unsigned) uid)
{
new_len = 3 * uid / 2 + 1;
- VEC_safe_grow_cleared (basic_block, gc,
- cfg->x_label_to_block_map, new_len);
+ vec_safe_grow_cleared (cfg->x_label_to_block_map, new_len);
}
- VEC_replace (basic_block, cfg->x_label_to_block_map, uid, bb);
- VEC_replace (basic_block, cfun->cfg->x_label_to_block_map, uid, NULL);
+ (*cfg->x_label_to_block_map)[uid] = bb;
+ (*cfun->cfg->x_label_to_block_map)[uid] = NULL;
gcc_assert (DECL_CONTEXT (label) == dest_cfun->decl);
@@ -6539,7 +6529,7 @@ basic_block
move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
basic_block exit_bb, tree orig_block)
{
- VEC(basic_block,heap) *bbs, *dom_bbs;
+ vec<basic_block> bbs, dom_bbs;
basic_block dom_entry = get_immediate_dominator (CDI_DOMINATORS, entry_bb);
basic_block after, bb, *entry_pred, *exit_succ, abb;
struct function *saved_cfun = cfun;
@@ -6561,15 +6551,15 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
/* Collect all the blocks in the region. Manually add ENTRY_BB
because it won't be added by dfs_enumerate_from. */
- bbs = NULL;
- VEC_safe_push (basic_block, heap, bbs, entry_bb);
+ bbs.create (0);
+ bbs.safe_push (entry_bb);
gather_blocks_in_sese_region (entry_bb, exit_bb, &bbs);
/* The blocks that used to be dominated by something in BBS will now be
dominated by the new block. */
dom_bbs = get_dominated_by_region (CDI_DOMINATORS,
- VEC_address (basic_block, bbs),
- VEC_length (basic_block, bbs));
+ bbs.address (),
+ bbs.length ());
/* Detach ENTRY_BB and EXIT_BB from CFUN->CFG. We need to remember
the predecessor edges to ENTRY_BB and the successor edges to
@@ -6624,7 +6614,7 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
{
eh_region region = NULL;
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
region = find_outermost_region_in_block (saved_cfun, bb, region);
init_eh_for_function ();
@@ -6639,7 +6629,7 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
pop_cfun ();
/* Move blocks from BBS into DEST_CFUN. */
- gcc_assert (VEC_length (basic_block, bbs) >= 2);
+ gcc_assert (bbs.length () >= 2);
after = dest_cfun->cfg->x_entry_block_ptr;
vars_map = pointer_map_create ();
@@ -6653,7 +6643,7 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
d.eh_map = eh_map;
d.remap_decls_p = true;
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
{
/* No need to update edge counts on the last block. It has
already been updated earlier when we detached the region from
@@ -6718,9 +6708,9 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
}
set_immediate_dominator (CDI_DOMINATORS, bb, dom_entry);
- FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, abb)
+ FOR_EACH_VEC_ELT (dom_bbs, i, abb)
set_immediate_dominator (CDI_DOMINATORS, abb, bb);
- VEC_free (basic_block, heap, dom_bbs);
+ dom_bbs.release ();
if (exit_bb)
{
@@ -6731,7 +6721,7 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
free (entry_prob);
free (entry_flag);
free (entry_pred);
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
return bb;
}
@@ -6791,7 +6781,7 @@ dump_function_to_file (tree fndecl, FILE *file, int flags)
ignore_topmost_bind = true;
fprintf (file, "{\n");
- if (!VEC_empty (tree, fun->local_decls))
+ if (!vec_safe_is_empty (fun->local_decls))
FOR_EACH_LOCAL_DECL (fun, ix, var)
{
print_generic_decl (file, var, flags);
@@ -6818,7 +6808,8 @@ dump_function_to_file (tree fndecl, FILE *file, int flags)
}
}
- if (fun && fun->decl == fndecl && fun->cfg
+ if (fun && fun->decl == fndecl
+ && fun->cfg
&& basic_block_info_for_function (fun))
{
/* If the CFG has been built, emit a CFG-based dump. */
@@ -7278,8 +7269,8 @@ gimple_flow_call_edges_add (sbitmap blocks)
void
remove_edge_and_dominated_blocks (edge e)
{
- VEC (basic_block, heap) *bbs_to_remove = NULL;
- VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
+ vec<basic_block> bbs_to_remove = vec<basic_block>();
+ vec<basic_block> bbs_to_fix_dom = vec<basic_block>();
bitmap df, df_idom;
edge f;
edge_iterator ei;
@@ -7331,7 +7322,7 @@ remove_edge_and_dominated_blocks (edge e)
else
{
bbs_to_remove = get_all_dominated_blocks (CDI_DOMINATORS, e->dest);
- FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
+ FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
{
FOR_EACH_EDGE (f, ei, bb->succs)
{
@@ -7339,7 +7330,7 @@ remove_edge_and_dominated_blocks (edge e)
bitmap_set_bit (df, f->dest->index);
}
}
- FOR_EACH_VEC_ELT (basic_block, bbs_to_remove, i, bb)
+ FOR_EACH_VEC_ELT (bbs_to_remove, i, bb)
bitmap_clear_bit (df, bb->index);
EXECUTE_IF_SET_IN_BITMAP (df, 0, i, bi)
@@ -7366,8 +7357,8 @@ remove_edge_and_dominated_blocks (edge e)
released DEFs into debug stmts. See
eliminate_unnecessary_stmts() in tree-ssa-dce.c for more
details. */
- for (i = VEC_length (basic_block, bbs_to_remove); i-- > 0; )
- delete_basic_block (VEC_index (basic_block, bbs_to_remove, i));
+ for (i = bbs_to_remove.length (); i-- > 0; )
+ delete_basic_block (bbs_to_remove[i]);
}
/* Update the dominance information. The immediate dominator may change only
@@ -7386,15 +7377,15 @@ remove_edge_and_dominated_blocks (edge e)
for (dbb = first_dom_son (CDI_DOMINATORS, bb);
dbb;
dbb = next_dom_son (CDI_DOMINATORS, dbb))
- VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dbb);
+ bbs_to_fix_dom.safe_push (dbb);
}
iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
BITMAP_FREE (df);
BITMAP_FREE (df_idom);
- VEC_free (basic_block, heap, bbs_to_remove);
- VEC_free (basic_block, heap, bbs_to_fix_dom);
+ bbs_to_remove.release ();
+ bbs_to_fix_dom.release ();
}
/* Purge dead EH edges from basic block BB. */
diff --git a/gcc/tree-cfgcleanup.c b/gcc/tree-cfgcleanup.c
index 412033d17a2..f838e809982 100644
--- a/gcc/tree-cfgcleanup.c
+++ b/gcc/tree-cfgcleanup.c
@@ -50,7 +50,7 @@ bitmap cfgcleanup_altered_bbs;
/* Remove any fallthru edge from EV. Return true if an edge was removed. */
static bool
-remove_fallthru_edge (VEC(edge,gc) *ev)
+remove_fallthru_edge (vec<edge, va_gc> *ev)
{
edge_iterator ei;
edge e;
@@ -558,9 +558,9 @@ split_bbs_on_noreturn_calls (void)
/* Detect cases where a mid-block call is now known not to return. */
if (cfun->gimple_df)
- while (VEC_length (gimple, MODIFIED_NORETURN_CALLS (cfun)))
+ while (vec_safe_length (MODIFIED_NORETURN_CALLS (cfun)))
{
- stmt = VEC_pop (gimple, MODIFIED_NORETURN_CALLS (cfun));
+ stmt = MODIFIED_NORETURN_CALLS (cfun)->pop ();
bb = gimple_bb (stmt);
/* BB might be deleted at this point, so verify first
BB is present in the cfg. */
@@ -810,7 +810,7 @@ remove_forwarder_block_with_phi (basic_block bb)
if (TREE_CODE (def) == SSA_NAME)
{
- edge_var_map_vector head;
+ edge_var_map_vector *head;
edge_var_map *vm;
size_t i;
@@ -818,7 +818,7 @@ remove_forwarder_block_with_phi (basic_block bb)
redirection, replace it with the PHI argument that used
to be on E. */
head = redirect_edge_var_map_vector (e);
- FOR_EACH_VEC_ELT (edge_var_map, head, i, vm)
+ FOR_EACH_VEC_ELT (*head, i, vm)
{
tree old_arg = redirect_edge_var_map_result (vm);
tree new_arg = redirect_edge_var_map_def (vm);
diff --git a/gcc/tree-chrec.c b/gcc/tree-chrec.c
index 309d4375d51..c9c3f7f0275 100644
--- a/gcc/tree-chrec.c
+++ b/gcc/tree-chrec.c
@@ -634,12 +634,12 @@ chrec_apply (unsigned var,
expression, calls chrec_apply when the expression is not NULL. */
tree
-chrec_apply_map (tree chrec, VEC (tree, heap) *iv_map)
+chrec_apply_map (tree chrec, vec<tree> iv_map)
{
int i;
tree expr;
- FOR_EACH_VEC_ELT (tree, iv_map, i, expr)
+ FOR_EACH_VEC_ELT (iv_map, i, expr)
if (expr)
chrec = chrec_apply (i, chrec, expr);
diff --git a/gcc/tree-chrec.h b/gcc/tree-chrec.h
index 83678026752..6d61d5fa8ea 100644
--- a/gcc/tree-chrec.h
+++ b/gcc/tree-chrec.h
@@ -64,7 +64,7 @@ extern tree chrec_convert_aggressive (tree, tree);
/* Operations. */
extern tree chrec_apply (unsigned, tree, tree);
-extern tree chrec_apply_map (tree, VEC (tree, heap) *);
+extern tree chrec_apply_map (tree, vec<tree> );
extern tree chrec_replace_initial_condition (tree, tree);
extern tree initial_condition (tree);
extern tree initial_condition_in_loop_num (tree, unsigned);
diff --git a/gcc/tree-complex.c b/gcc/tree-complex.c
index 26bb25a19d9..4b278a728ce 100644
--- a/gcc/tree-complex.c
+++ b/gcc/tree-complex.c
@@ -49,17 +49,15 @@ typedef int complex_lattice_t;
#define PAIR(a, b) ((a) << 2 | (b))
-DEF_VEC_I(complex_lattice_t);
-DEF_VEC_ALLOC_I(complex_lattice_t, heap);
-static VEC(complex_lattice_t, heap) *complex_lattice_values;
+static vec<complex_lattice_t> complex_lattice_values;
/* For each complex variable, a pair of variables for the components exists in
the hashtable. */
static htab_t complex_variable_components;
/* For each complex SSA_NAME, a pair of ssa names for the components. */
-static VEC(tree, heap) *complex_ssa_name_components;
+static vec<tree> complex_ssa_name_components;
/* Lookup UID in the complex_variable_components hashtable and return the
associated tree. */
@@ -143,8 +141,7 @@ find_lattice_value (tree t)
switch (TREE_CODE (t))
{
case SSA_NAME:
- return VEC_index (complex_lattice_t, complex_lattice_values,
- SSA_NAME_VERSION (t));
+ return complex_lattice_values[SSA_NAME_VERSION (t)];
case COMPLEX_CST:
real = TREE_REALPART (t);
@@ -177,8 +174,7 @@ init_parameter_lattice_values (void)
for (parm = DECL_ARGUMENTS (cfun->decl); parm ; parm = DECL_CHAIN (parm))
if (is_complex_reg (parm)
&& (ssa_name = ssa_default_def (cfun, parm)) != NULL_TREE)
- VEC_replace (complex_lattice_t, complex_lattice_values,
- SSA_NAME_VERSION (ssa_name), VARYING);
+ complex_lattice_values[SSA_NAME_VERSION (ssa_name)] = VARYING;
}
/* Initialize simulation state for each statement. Return false if we
@@ -311,7 +307,7 @@ complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
*result_p = lhs;
ver = SSA_NAME_VERSION (lhs);
- old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
+ old_l = complex_lattice_values[ver];
switch (gimple_expr_code (stmt))
{
@@ -380,7 +376,7 @@ complex_visit_stmt (gimple stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
if (new_l == old_l)
return SSA_PROP_NOT_INTERESTING;
- VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
+ complex_lattice_values[ver] = new_l;
return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
}
@@ -406,12 +402,12 @@ complex_visit_phi (gimple phi)
new_l |= find_lattice_value (gimple_phi_arg_def (phi, i));
ver = SSA_NAME_VERSION (lhs);
- old_l = VEC_index (complex_lattice_t, complex_lattice_values, ver);
+ old_l = complex_lattice_values[ver];
if (new_l == old_l)
return SSA_PROP_NOT_INTERESTING;
- VEC_replace (complex_lattice_t, complex_lattice_values, ver, new_l);
+ complex_lattice_values[ver] = new_l;
return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
}
@@ -485,7 +481,7 @@ get_component_ssa_name (tree ssa_name, bool imag_p)
}
ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
- ret = VEC_index (tree, complex_ssa_name_components, ssa_name_index);
+ ret = complex_ssa_name_components[ssa_name_index];
if (ret == NULL)
{
if (SSA_NAME_VAR (ssa_name))
@@ -505,7 +501,7 @@ get_component_ssa_name (tree ssa_name, bool imag_p)
set_ssa_default_def (cfun, SSA_NAME_VAR (ret), ret);
}
- VEC_replace (tree, complex_ssa_name_components, ssa_name_index, ret);
+ complex_ssa_name_components[ssa_name_index] = ret;
}
return ret;
@@ -534,7 +530,7 @@ set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
This is fine. Now we should create an initialization for the value
we created earlier. */
ssa_name_index = SSA_NAME_VERSION (ssa_name) * 2 + imag_p;
- comp = VEC_index (tree, complex_ssa_name_components, ssa_name_index);
+ comp = complex_ssa_name_components[ssa_name_index];
if (comp)
;
@@ -544,7 +540,7 @@ set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
else if (is_gimple_min_invariant (value)
&& !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
{
- VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value);
+ complex_ssa_name_components[ssa_name_index] = value;
return NULL;
}
else if (TREE_CODE (value) == SSA_NAME
@@ -560,7 +556,7 @@ set_component_ssa_name (tree ssa_name, bool imag_p, tree value)
replace_ssa_name_symbol (value, comp);
}
- VEC_replace (tree, complex_ssa_name_components, ssa_name_index, value);
+ complex_ssa_name_components[ssa_name_index] = value;
return NULL;
}
@@ -1567,9 +1563,8 @@ tree_lower_complex (void)
if (!init_dont_simulate_again ())
return 0;
- complex_lattice_values = VEC_alloc (complex_lattice_t, heap, num_ssa_names);
- VEC_safe_grow_cleared (complex_lattice_t, heap,
- complex_lattice_values, num_ssa_names);
+ complex_lattice_values.create (num_ssa_names);
+ complex_lattice_values.safe_grow_cleared (num_ssa_names);
init_parameter_lattice_values ();
ssa_propagate (complex_visit_stmt, complex_visit_phi);
@@ -1577,9 +1572,8 @@ tree_lower_complex (void)
complex_variable_components = htab_create (10, int_tree_map_hash,
int_tree_map_eq, free);
- complex_ssa_name_components = VEC_alloc (tree, heap, 2*num_ssa_names);
- VEC_safe_grow_cleared (tree, heap, complex_ssa_name_components,
- 2 * num_ssa_names);
+ complex_ssa_name_components.create (2 * num_ssa_names);
+ complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names);
update_parameter_components ();
@@ -1598,8 +1592,8 @@ tree_lower_complex (void)
gsi_commit_edge_inserts ();
htab_delete (complex_variable_components);
- VEC_free (tree, heap, complex_ssa_name_components);
- VEC_free (complex_lattice_t, heap, complex_lattice_values);
+ complex_ssa_name_components.release ();
+ complex_lattice_values.release ();
return 0;
}
diff --git a/gcc/tree-data-ref.c b/gcc/tree-data-ref.c
index 0d647d7c5ea..458362c5d0a 100644
--- a/gcc/tree-data-ref.c
+++ b/gcc/tree-data-ref.c
@@ -141,19 +141,19 @@ int_divides_p (int a, int b)
/* Dump into FILE all the data references from DATAREFS. */
static void
-dump_data_references (FILE *file, VEC (data_reference_p, heap) *datarefs)
+dump_data_references (FILE *file, vec<data_reference_p> datarefs)
{
unsigned int i;
struct data_reference *dr;
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
dump_data_reference (file, dr);
}
/* Dump into STDERR all the data references from DATAREFS. */
DEBUG_FUNCTION void
-debug_data_references (VEC (data_reference_p, heap) *datarefs)
+debug_data_references (vec<data_reference_p> datarefs)
{
dump_data_references (stderr, datarefs);
}
@@ -199,8 +199,8 @@ dump_affine_function (FILE *outf, affine_fn fn)
unsigned i;
tree coef;
- print_generic_expr (outf, VEC_index (tree, fn, 0), TDF_SLIM);
- for (i = 1; VEC_iterate (tree, fn, i, coef); i++)
+ print_generic_expr (outf, fn[0], TDF_SLIM);
+ for (i = 1; fn.iterate (i, &coef); i++)
{
fprintf (outf, " + ");
print_generic_expr (outf, coef, TDF_SLIM);
@@ -311,13 +311,13 @@ print_direction_vector (FILE *outf,
/* Print a vector of direction vectors. */
static void
-print_dir_vectors (FILE *outf, VEC (lambda_vector, heap) *dir_vects,
+print_dir_vectors (FILE *outf, vec<lambda_vector> dir_vects,
int length)
{
unsigned j;
lambda_vector v;
- FOR_EACH_VEC_ELT (lambda_vector, dir_vects, j, v)
+ FOR_EACH_VEC_ELT (dir_vects, j, v)
print_direction_vector (outf, v, length);
}
@@ -336,13 +336,13 @@ print_lambda_vector (FILE * outfile, lambda_vector vector, int n)
/* Print a vector of distance vectors. */
static void
-print_dist_vectors (FILE *outf, VEC (lambda_vector, heap) *dist_vects,
+print_dist_vectors (FILE *outf, vec<lambda_vector> dist_vects,
int length)
{
unsigned j;
lambda_vector v;
- FOR_EACH_VEC_ELT (lambda_vector, dist_vects, j, v)
+ FOR_EACH_VEC_ELT (dist_vects, j, v)
print_lambda_vector (outf, v, length);
}
@@ -399,7 +399,7 @@ dump_data_dependence_relation (FILE *outf,
fprintf (outf, " inner loop index: %d\n", DDR_INNER_LOOP (ddr));
fprintf (outf, " loop nest: (");
- FOR_EACH_VEC_ELT (loop_p, DDR_LOOP_NEST (ddr), i, loopi)
+ FOR_EACH_VEC_ELT (DDR_LOOP_NEST (ddr), i, loopi)
fprintf (outf, "%d ", loopi->num);
fprintf (outf, ")\n");
@@ -433,19 +433,19 @@ debug_data_dependence_relation (struct data_dependence_relation *ddr)
void
dump_data_dependence_relations (FILE *file,
- VEC (ddr_p, heap) *ddrs)
+ vec<ddr_p> ddrs)
{
unsigned int i;
struct data_dependence_relation *ddr;
- FOR_EACH_VEC_ELT (ddr_p, ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (ddrs, i, ddr)
dump_data_dependence_relation (file, ddr);
}
/* Dump to STDERR all the dependence relations from DDRS. */
DEBUG_FUNCTION void
-debug_data_dependence_relations (VEC (ddr_p, heap) *ddrs)
+debug_data_dependence_relations (vec<ddr_p> ddrs)
{
dump_data_dependence_relations (stderr, ddrs);
}
@@ -456,23 +456,23 @@ debug_data_dependence_relations (VEC (ddr_p, heap) *ddrs)
considered nest. */
static void
-dump_dist_dir_vectors (FILE *file, VEC (ddr_p, heap) *ddrs)
+dump_dist_dir_vectors (FILE *file, vec<ddr_p> ddrs)
{
unsigned int i, j;
struct data_dependence_relation *ddr;
lambda_vector v;
- FOR_EACH_VEC_ELT (ddr_p, ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (ddrs, i, ddr)
if (DDR_ARE_DEPENDENT (ddr) == NULL_TREE && DDR_AFFINE_P (ddr))
{
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIST_VECTS (ddr), j, v)
+ FOR_EACH_VEC_ELT (DDR_DIST_VECTS (ddr), j, v)
{
fprintf (file, "DISTANCE_V (");
print_lambda_vector (file, v, DDR_NB_LOOPS (ddr));
fprintf (file, ")\n");
}
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIR_VECTS (ddr), j, v)
+ FOR_EACH_VEC_ELT (DDR_DIR_VECTS (ddr), j, v)
{
fprintf (file, "DIRECTION_V (");
print_direction_vector (file, v, DDR_NB_LOOPS (ddr));
@@ -486,19 +486,19 @@ dump_dist_dir_vectors (FILE *file, VEC (ddr_p, heap) *ddrs)
/* Dumps the data dependence relations DDRS in FILE. */
static void
-dump_ddrs (FILE *file, VEC (ddr_p, heap) *ddrs)
+dump_ddrs (FILE *file, vec<ddr_p> ddrs)
{
unsigned int i;
struct data_dependence_relation *ddr;
- FOR_EACH_VEC_ELT (ddr_p, ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (ddrs, i, ddr)
dump_data_dependence_relation (file, ddr);
fprintf (file, "\n\n");
}
DEBUG_FUNCTION void
-debug_ddrs (VEC (ddr_p, heap) *ddrs)
+debug_ddrs (vec<ddr_p> ddrs)
{
dump_ddrs (stderr, ddrs);
}
@@ -820,7 +820,7 @@ dr_analyze_innermost (struct data_reference *dr, struct loop *nest)
static void
dr_analyze_indices (struct data_reference *dr, loop_p nest, loop_p loop)
{
- VEC (tree, heap) *access_fns = NULL;
+ vec<tree> access_fns = vec<tree>();
tree ref, op;
tree base, off, access_fn;
basic_block before_loop;
@@ -830,7 +830,7 @@ dr_analyze_indices (struct data_reference *dr, loop_p nest, loop_p loop)
if (!nest)
{
DR_BASE_OBJECT (dr) = DR_REF (dr);
- DR_ACCESS_FNS (dr) = NULL;
+ DR_ACCESS_FNS (dr).create (0);
return;
}
@@ -843,12 +843,12 @@ dr_analyze_indices (struct data_reference *dr, loop_p nest, loop_p loop)
if (TREE_CODE (ref) == REALPART_EXPR)
{
ref = TREE_OPERAND (ref, 0);
- VEC_safe_push (tree, heap, access_fns, integer_zero_node);
+ access_fns.safe_push (integer_zero_node);
}
else if (TREE_CODE (ref) == IMAGPART_EXPR)
{
ref = TREE_OPERAND (ref, 0);
- VEC_safe_push (tree, heap, access_fns, integer_one_node);
+ access_fns.safe_push (integer_one_node);
}
/* Analyze access functions of dimensions we know to be independent. */
@@ -859,7 +859,7 @@ dr_analyze_indices (struct data_reference *dr, loop_p nest, loop_p loop)
op = TREE_OPERAND (ref, 1);
access_fn = analyze_scalar_evolution (loop, op);
access_fn = instantiate_scev (before_loop, loop, access_fn);
- VEC_safe_push (tree, heap, access_fns, access_fn);
+ access_fns.safe_push (access_fn);
}
else if (TREE_CODE (ref) == COMPONENT_REF
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
@@ -873,7 +873,7 @@ dr_analyze_indices (struct data_reference *dr, loop_p nest, loop_p loop)
fold_convert (bitsizetype, off),
bitsize_int (BITS_PER_UNIT)),
DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1)));
- VEC_safe_push (tree, heap, access_fns, off);
+ access_fns.safe_push (off);
}
else
/* If we have an unhandled component we could not translate
@@ -920,7 +920,7 @@ dr_analyze_indices (struct data_reference *dr, loop_p nest, loop_p loop)
MEM_REF, TREE_TYPE (ref),
base, memoff);
DR_UNCONSTRAINED_BASE (dr) = true;
- VEC_safe_push (tree, heap, access_fns, access_fn);
+ access_fns.safe_push (access_fn);
}
}
else if (DECL_P (ref))
@@ -957,7 +957,7 @@ dr_analyze_alias (struct data_reference *dr)
void
free_data_ref (data_reference_p dr)
{
- VEC_free (tree, heap, DR_ACCESS_FNS (dr));
+ DR_ACCESS_FNS (dr).release ();
free (dr);
}
@@ -1062,14 +1062,13 @@ dr_equal_offsets_p (struct data_reference *dra,
static bool
affine_function_equal_p (affine_fn fna, affine_fn fnb)
{
- unsigned i, n = VEC_length (tree, fna);
+ unsigned i, n = fna.length ();
- if (n != VEC_length (tree, fnb))
+ if (n != fnb.length ())
return false;
for (i = 0; i < n; i++)
- if (!operand_equal_p (VEC_index (tree, fna, i),
- VEC_index (tree, fnb, i), 0))
+ if (!operand_equal_p (fna[i], fnb[i], 0))
return false;
return true;
@@ -1085,13 +1084,13 @@ common_affine_function (conflict_function *cf)
affine_fn comm;
if (!CF_NONTRIVIAL_P (cf))
- return NULL;
+ return affine_fn();
comm = cf->fns[0];
for (i = 1; i < cf->n; i++)
if (!affine_function_equal_p (comm, cf->fns[i]))
- return NULL;
+ return affine_fn();
return comm;
}
@@ -1101,7 +1100,7 @@ common_affine_function (conflict_function *cf)
static tree
affine_function_base (affine_fn fn)
{
- return VEC_index (tree, fn, 0);
+ return fn[0];
}
/* Returns true if FN is a constant. */
@@ -1112,7 +1111,7 @@ affine_function_constant_p (affine_fn fn)
unsigned i;
tree coef;
- for (i = 1; VEC_iterate (tree, fn, i, coef); i++)
+ for (i = 1; fn.iterate (i, &coef); i++)
if (!integer_zerop (coef))
return false;
@@ -1150,36 +1149,30 @@ affine_fn_op (enum tree_code op, affine_fn fna, affine_fn fnb)
affine_fn ret;
tree coef;
- if (VEC_length (tree, fnb) > VEC_length (tree, fna))
+ if (fnb.length () > fna.length ())
{
- n = VEC_length (tree, fna);
- m = VEC_length (tree, fnb);
+ n = fna.length ();
+ m = fnb.length ();
}
else
{
- n = VEC_length (tree, fnb);
- m = VEC_length (tree, fna);
+ n = fnb.length ();
+ m = fna.length ();
}
- ret = VEC_alloc (tree, heap, m);
+ ret.create (m);
for (i = 0; i < n; i++)
{
- tree type = signed_type_for_types (TREE_TYPE (VEC_index (tree, fna, i)),
- TREE_TYPE (VEC_index (tree, fnb, i)));
-
- VEC_quick_push (tree, ret,
- fold_build2 (op, type,
- VEC_index (tree, fna, i),
- VEC_index (tree, fnb, i)));
+ tree type = signed_type_for_types (TREE_TYPE (fna[i]),
+ TREE_TYPE (fnb[i]));
+ ret.quick_push (fold_build2 (op, type, fna[i], fnb[i]));
}
- for (; VEC_iterate (tree, fna, i, coef); i++)
- VEC_quick_push (tree, ret,
- fold_build2 (op, signed_type_for (TREE_TYPE (coef)),
+ for (; fna.iterate (i, &coef); i++)
+ ret.quick_push (fold_build2 (op, signed_type_for (TREE_TYPE (coef)),
coef, integer_zero_node));
- for (; VEC_iterate (tree, fnb, i, coef); i++)
- VEC_quick_push (tree, ret,
- fold_build2 (op, signed_type_for (TREE_TYPE (coef)),
+ for (; fnb.iterate (i, &coef); i++)
+ ret.quick_push (fold_build2 (op, signed_type_for (TREE_TYPE (coef)),
integer_zero_node, coef));
return ret;
@@ -1206,7 +1199,7 @@ affine_fn_minus (affine_fn fna, affine_fn fnb)
static void
affine_fn_free (affine_fn fn)
{
- VEC_free (tree, heap, fn);
+ fn.release ();
}
/* Determine for each subscript in the data dependence relation DDR
@@ -1232,7 +1225,7 @@ compute_subscript_distance (struct data_dependence_relation *ddr)
fn_a = common_affine_function (cf_a);
fn_b = common_affine_function (cf_b);
- if (!fn_a || !fn_b)
+ if (!fn_a.exists () || !fn_b.exists ())
{
SUB_DISTANCE (subscript) = chrec_dont_know;
return;
@@ -1368,7 +1361,7 @@ dr_may_alias_p (const struct data_reference *a, const struct data_reference *b,
struct data_dependence_relation *
initialize_data_dependence_relation (struct data_reference *a,
struct data_reference *b,
- VEC (loop_p, heap) *loop_nest)
+ vec<loop_p> loop_nest)
{
struct data_dependence_relation *res;
unsigned int i;
@@ -1376,11 +1369,11 @@ initialize_data_dependence_relation (struct data_reference *a,
res = XNEW (struct data_dependence_relation);
DDR_A (res) = a;
DDR_B (res) = b;
- DDR_LOOP_NEST (res) = NULL;
+ DDR_LOOP_NEST (res).create (0);
DDR_REVERSED_P (res) = false;
- DDR_SUBSCRIPTS (res) = NULL;
- DDR_DIR_VECTS (res) = NULL;
- DDR_DIST_VECTS (res) = NULL;
+ DDR_SUBSCRIPTS (res).create (0);
+ DDR_DIR_VECTS (res).create (0);
+ DDR_DIST_VECTS (res).create (0);
if (a == NULL || b == NULL)
{
@@ -1389,7 +1382,7 @@ initialize_data_dependence_relation (struct data_reference *a,
}
/* If the data references do not alias, then they are independent. */
- if (!dr_may_alias_p (a, b, loop_nest != NULL))
+ if (!dr_may_alias_p (a, b, loop_nest.exists ()))
{
DDR_ARE_DEPENDENT (res) = chrec_known;
return res;
@@ -1398,8 +1391,8 @@ initialize_data_dependence_relation (struct data_reference *a,
/* The case where the references are exactly the same. */
if (operand_equal_p (DR_REF (a), DR_REF (b), 0))
{
- if (loop_nest
- && !object_address_invariant_in_loop_p (VEC_index (loop_p, loop_nest, 0),
+ if (loop_nest.exists ()
+ && !object_address_invariant_in_loop_p (loop_nest[0],
DR_BASE_OBJECT (a)))
{
DDR_ARE_DEPENDENT (res) = chrec_dont_know;
@@ -1407,7 +1400,7 @@ initialize_data_dependence_relation (struct data_reference *a,
}
DDR_AFFINE_P (res) = true;
DDR_ARE_DEPENDENT (res) = NULL_TREE;
- DDR_SUBSCRIPTS (res) = VEC_alloc (subscript_p, heap, DR_NUM_DIMENSIONS (a));
+ DDR_SUBSCRIPTS (res).create (DR_NUM_DIMENSIONS (a));
DDR_LOOP_NEST (res) = loop_nest;
DDR_INNER_LOOP (res) = 0;
DDR_SELF_REFERENCE (res) = true;
@@ -1420,7 +1413,7 @@ initialize_data_dependence_relation (struct data_reference *a,
SUB_CONFLICTS_IN_B (subscript) = conflict_fn_not_known ();
SUB_LAST_CONFLICT (subscript) = chrec_dont_know;
SUB_DISTANCE (subscript) = chrec_dont_know;
- VEC_safe_push (subscript_p, heap, DDR_SUBSCRIPTS (res), subscript);
+ DDR_SUBSCRIPTS (res).safe_push (subscript);
}
return res;
}
@@ -1436,8 +1429,8 @@ initialize_data_dependence_relation (struct data_reference *a,
/* If the base of the object is not invariant in the loop nest, we cannot
analyze it. TODO -- in fact, it would suffice to record that there may
be arbitrary dependences in the loops where the base object varies. */
- if (loop_nest
- && !object_address_invariant_in_loop_p (VEC_index (loop_p, loop_nest, 0),
+ if (loop_nest.exists ()
+ && !object_address_invariant_in_loop_p (loop_nest[0],
DR_BASE_OBJECT (a)))
{
DDR_ARE_DEPENDENT (res) = chrec_dont_know;
@@ -1455,7 +1448,7 @@ initialize_data_dependence_relation (struct data_reference *a,
DDR_AFFINE_P (res) = true;
DDR_ARE_DEPENDENT (res) = NULL_TREE;
- DDR_SUBSCRIPTS (res) = VEC_alloc (subscript_p, heap, DR_NUM_DIMENSIONS (a));
+ DDR_SUBSCRIPTS (res).create (DR_NUM_DIMENSIONS (a));
DDR_LOOP_NEST (res) = loop_nest;
DDR_INNER_LOOP (res) = 0;
DDR_SELF_REFERENCE (res) = false;
@@ -1469,7 +1462,7 @@ initialize_data_dependence_relation (struct data_reference *a,
SUB_CONFLICTS_IN_B (subscript) = conflict_fn_not_known ();
SUB_LAST_CONFLICT (subscript) = chrec_dont_know;
SUB_DISTANCE (subscript) = chrec_dont_know;
- VEC_safe_push (subscript_p, heap, DDR_SUBSCRIPTS (res), subscript);
+ DDR_SUBSCRIPTS (res).safe_push (subscript);
}
return res;
@@ -1493,18 +1486,18 @@ free_conflict_function (conflict_function *f)
/* Frees memory used by SUBSCRIPTS. */
static void
-free_subscripts (VEC (subscript_p, heap) *subscripts)
+free_subscripts (vec<subscript_p> subscripts)
{
unsigned i;
subscript_p s;
- FOR_EACH_VEC_ELT (subscript_p, subscripts, i, s)
+ FOR_EACH_VEC_ELT (subscripts, i, s)
{
free_conflict_function (s->conflicting_iterations_in_a);
free_conflict_function (s->conflicting_iterations_in_b);
free (s);
}
- VEC_free (subscript_p, heap, subscripts);
+ subscripts.release ();
}
/* Set DDR_ARE_DEPENDENT to CHREC and finalize the subscript overlap
@@ -1523,7 +1516,7 @@ finalize_ddr_dependent (struct data_dependence_relation *ddr,
DDR_ARE_DEPENDENT (ddr) = chrec;
free_subscripts (DDR_SUBSCRIPTS (ddr));
- DDR_SUBSCRIPTS (ddr) = NULL;
+ DDR_SUBSCRIPTS (ddr).create (0);
}
/* The dependence relation DDR cannot be represented by a distance
@@ -1614,8 +1607,9 @@ conflict_fn (unsigned n, ...)
static affine_fn
affine_fn_cst (tree cst)
{
- affine_fn fn = VEC_alloc (tree, heap, 1);
- VEC_quick_push (tree, fn, cst);
+ affine_fn fn;
+ fn.create (1);
+ fn.quick_push (cst);
return fn;
}
@@ -1624,14 +1618,15 @@ affine_fn_cst (tree cst)
static affine_fn
affine_fn_univar (tree cst, unsigned dim, tree coef)
{
- affine_fn fn = VEC_alloc (tree, heap, dim + 1);
+ affine_fn fn;
+ fn.create (dim + 1);
unsigned i;
gcc_assert (dim > 0);
- VEC_quick_push (tree, fn, cst);
+ fn.quick_push (cst);
for (i = 1; i < dim; i++)
- VEC_quick_push (tree, fn, integer_zero_node);
- VEC_quick_push (tree, fn, coef);
+ fn.quick_push (integer_zero_node);
+ fn.quick_push (coef);
return fn;
}
@@ -3012,11 +3007,11 @@ save_dist_v (struct data_dependence_relation *ddr, lambda_vector dist_v)
unsigned i;
lambda_vector v;
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIST_VECTS (ddr), i, v)
+ FOR_EACH_VEC_ELT (DDR_DIST_VECTS (ddr), i, v)
if (lambda_vector_equal (v, dist_v, DDR_NB_LOOPS (ddr)))
return;
- VEC_safe_push (lambda_vector, heap, DDR_DIST_VECTS (ddr), dist_v);
+ DDR_DIST_VECTS (ddr).safe_push (dist_v);
}
/* Helper function for uniquely inserting direction vectors. */
@@ -3027,11 +3022,11 @@ save_dir_v (struct data_dependence_relation *ddr, lambda_vector dir_v)
unsigned i;
lambda_vector v;
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIR_VECTS (ddr), i, v)
+ FOR_EACH_VEC_ELT (DDR_DIR_VECTS (ddr), i, v)
if (lambda_vector_equal (v, dir_v, DDR_NB_LOOPS (ddr)))
return;
- VEC_safe_push (lambda_vector, heap, DDR_DIR_VECTS (ddr), dir_v);
+ DDR_DIR_VECTS (ddr).safe_push (dir_v);
}
/* Add a distance of 1 on all the loops outer than INDEX. If we
@@ -3480,7 +3475,7 @@ build_classic_dir_vector (struct data_dependence_relation *ddr)
unsigned i, j;
lambda_vector dist_v;
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIST_VECTS (ddr), i, dist_v)
+ FOR_EACH_VEC_ELT (DDR_DIST_VECTS (ddr), i, dist_v)
{
lambda_vector dir_v = lambda_vector_new (DDR_NB_LOOPS (ddr));
@@ -3505,8 +3500,7 @@ subscript_dependence_tester_1 (struct data_dependence_relation *ddr,
struct subscript *subscript;
tree res = NULL_TREE;
- for (i = 0; VEC_iterate (subscript_p, DDR_SUBSCRIPTS (ddr), i, subscript);
- i++)
+ for (i = 0; DDR_SUBSCRIPTS (ddr).iterate (i, &subscript); i++)
{
conflict_function *overlaps_a, *overlaps_b;
@@ -3583,10 +3577,10 @@ access_functions_are_affine_or_constant_p (const struct data_reference *a,
const struct loop *loop_nest)
{
unsigned int i;
- VEC(tree,heap) *fns = DR_ACCESS_FNS (a);
+ vec<tree> fns = DR_ACCESS_FNS (a);
tree t;
- FOR_EACH_VEC_ELT (tree, fns, i, t)
+ FOR_EACH_VEC_ELT (fns, i, t)
if (!evolution_function_is_invariant_p (t, loop_nest->num)
&& !evolution_function_is_affine_multivariate_p (t, loop_nest->num))
return false;
@@ -3674,7 +3668,7 @@ omega_extract_distance_vectors (omega_pb pb,
problem that we have initialized until now. On top of this we
add new constraints. */
for (i = 0; i <= DDR_INNER_LOOP (ddr)
- && VEC_iterate (loop_p, DDR_LOOP_NEST (ddr), i, loopi); i++)
+ && DDR_LOOP_NEST (ddr).iterate (i, &loopi); i++)
{
int dist = 0;
omega_pb copy = omega_alloc_problem (2 * DDR_NB_LOOPS (ddr),
@@ -3683,8 +3677,7 @@ omega_extract_distance_vectors (omega_pb pb,
omega_copy_problem (copy, pb);
/* For all the outer loops "loop_j", add "dj = 0". */
- for (j = 0;
- j < i && VEC_iterate (loop_p, DDR_LOOP_NEST (ddr), j, loopj); j++)
+ for (j = 0; j < i && DDR_LOOP_NEST (ddr).iterate (j, &loopj); j++)
{
eq = omega_add_zero_eq (copy, omega_black);
copy->eqs[eq].coef[j + 1] = 1;
@@ -3713,8 +3706,7 @@ omega_extract_distance_vectors (omega_pb pb,
{
/* Reinitialize problem... */
omega_copy_problem (copy, pb);
- for (j = 0;
- j < i && VEC_iterate (loop_p, DDR_LOOP_NEST (ddr), j, loopj); j++)
+ for (j = 0; j < i && DDR_LOOP_NEST (ddr).iterate (j, &loopj); j++)
{
eq = omega_add_zero_eq (copy, omega_black);
copy->eqs[eq].coef[j + 1] = 1;
@@ -3858,7 +3850,7 @@ init_omega_for_ddr_1 (struct data_reference *dra, struct data_reference *drb,
- coef[nb_loops + 1, 2*nb_loops] are the loop variables: "loop_x".
*/
for (i = 0; i <= DDR_INNER_LOOP (ddr)
- && VEC_iterate (loop_p, DDR_LOOP_NEST (ddr), i, loopi); i++)
+ && DDR_LOOP_NEST (ddr).iterate (i, &loopi); i++)
{
HOST_WIDE_INT nbi = max_stmt_executions_int (loopi);
@@ -4033,8 +4025,8 @@ init_omega_for_ddr (struct data_dependence_relation *ddr,
static bool
ddr_consistent_p (FILE *file,
struct data_dependence_relation *ddr,
- VEC (lambda_vector, heap) *dist_vects,
- VEC (lambda_vector, heap) *dir_vects)
+ vec<lambda_vector> dist_vects,
+ vec<lambda_vector> dir_vects)
{
unsigned int i, j;
@@ -4042,15 +4034,15 @@ ddr_consistent_p (FILE *file,
if (dump_file && (dump_flags & TDF_DETAILS))
file = dump_file;
- if (VEC_length (lambda_vector, dist_vects) != DDR_NUM_DIST_VECTS (ddr))
+ if (dist_vects.length () != DDR_NUM_DIST_VECTS (ddr))
{
lambda_vector b_dist_v;
fprintf (file, "\n(Number of distance vectors differ: Banerjee has %d, Omega has %d.\n",
- VEC_length (lambda_vector, dist_vects),
+ dist_vects.length (),
DDR_NUM_DIST_VECTS (ddr));
fprintf (file, "Banerjee dist vectors:\n");
- FOR_EACH_VEC_ELT (lambda_vector, dist_vects, i, b_dist_v)
+ FOR_EACH_VEC_ELT (dist_vects, i, b_dist_v)
print_lambda_vector (file, b_dist_v, DDR_NB_LOOPS (ddr));
fprintf (file, "Omega dist vectors:\n");
@@ -4064,10 +4056,10 @@ ddr_consistent_p (FILE *file,
return false;
}
- if (VEC_length (lambda_vector, dir_vects) != DDR_NUM_DIR_VECTS (ddr))
+ if (dir_vects.length () != DDR_NUM_DIR_VECTS (ddr))
{
fprintf (file, "\n(Number of direction vectors differ: Banerjee has %d, Omega has %d.)\n",
- VEC_length (lambda_vector, dir_vects),
+ dir_vects.length (),
DDR_NUM_DIR_VECTS (ddr));
return false;
}
@@ -4079,11 +4071,11 @@ ddr_consistent_p (FILE *file,
/* Distance vectors are not ordered in the same way in the DDR
and in the DIST_VECTS: search for a matching vector. */
- FOR_EACH_VEC_ELT (lambda_vector, dist_vects, j, a_dist_v)
+ FOR_EACH_VEC_ELT (dist_vects, j, a_dist_v)
if (lambda_vector_equal (a_dist_v, b_dist_v, DDR_NB_LOOPS (ddr)))
break;
- if (j == VEC_length (lambda_vector, dist_vects))
+ if (j == dist_vects.length ())
{
fprintf (file, "\n(Dist vectors from the first dependence analyzer:\n");
print_dist_vectors (file, dist_vects, DDR_NB_LOOPS (ddr));
@@ -4102,11 +4094,11 @@ ddr_consistent_p (FILE *file,
/* Direction vectors are not ordered in the same way in the DDR
and in the DIR_VECTS: search for a matching vector. */
- FOR_EACH_VEC_ELT (lambda_vector, dir_vects, j, a_dir_v)
+ FOR_EACH_VEC_ELT (dir_vects, j, a_dir_v)
if (lambda_vector_equal (a_dir_v, b_dir_v, DDR_NB_LOOPS (ddr)))
break;
- if (j == VEC_length (lambda_vector, dist_vects))
+ if (j == dist_vects.length ())
{
fprintf (file, "\n(Dir vectors from the first dependence analyzer:\n");
print_dir_vectors (file, dir_vects, DDR_NB_LOOPS (ddr));
@@ -4168,15 +4160,15 @@ compute_affine_dependence (struct data_dependence_relation *ddr,
if (DDR_ARE_DEPENDENT (ddr) == NULL_TREE)
{
bool maybe_dependent;
- VEC (lambda_vector, heap) *dir_vects, *dist_vects;
+ vec<lambda_vector> dir_vects, dist_vects;
/* Save the result of the first DD analyzer. */
dist_vects = DDR_DIST_VECTS (ddr);
dir_vects = DDR_DIR_VECTS (ddr);
/* Reset the information. */
- DDR_DIST_VECTS (ddr) = NULL;
- DDR_DIR_VECTS (ddr) = NULL;
+ DDR_DIST_VECTS (ddr).create (0);
+ DDR_DIR_VECTS (ddr).create (0);
/* Compute the same information using Omega. */
if (!init_omega_for_ddr (ddr, &maybe_dependent))
@@ -4236,16 +4228,16 @@ compute_affine_dependence (struct data_dependence_relation *ddr,
is small enough to be handled. */
bool
-compute_all_dependences (VEC (data_reference_p, heap) *datarefs,
- VEC (ddr_p, heap) **dependence_relations,
- VEC (loop_p, heap) *loop_nest,
+compute_all_dependences (vec<data_reference_p> datarefs,
+ vec<ddr_p> *dependence_relations,
+ vec<loop_p> loop_nest,
bool compute_self_and_rr)
{
struct data_dependence_relation *ddr;
struct data_reference *a, *b;
unsigned int i, j;
- if ((int) VEC_length (data_reference_p, datarefs)
+ if ((int) datarefs.length ()
> PARAM_VALUE (PARAM_LOOP_MAX_DATAREFS_FOR_DATADEPS))
{
struct data_dependence_relation *ddr;
@@ -4253,27 +4245,27 @@ compute_all_dependences (VEC (data_reference_p, heap) *datarefs,
/* Insert a single relation into dependence_relations:
chrec_dont_know. */
ddr = initialize_data_dependence_relation (NULL, NULL, loop_nest);
- VEC_safe_push (ddr_p, heap, *dependence_relations, ddr);
+ dependence_relations->safe_push (ddr);
return false;
}
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, a)
- for (j = i + 1; VEC_iterate (data_reference_p, datarefs, j, b); j++)
+ FOR_EACH_VEC_ELT (datarefs, i, a)
+ for (j = i + 1; datarefs.iterate (j, &b); j++)
if (DR_IS_WRITE (a) || DR_IS_WRITE (b) || compute_self_and_rr)
{
ddr = initialize_data_dependence_relation (a, b, loop_nest);
- VEC_safe_push (ddr_p, heap, *dependence_relations, ddr);
- if (loop_nest)
- compute_affine_dependence (ddr, VEC_index (loop_p, loop_nest, 0));
+ dependence_relations->safe_push (ddr);
+ if (loop_nest.exists ())
+ compute_affine_dependence (ddr, loop_nest[0]);
}
if (compute_self_and_rr)
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, a)
+ FOR_EACH_VEC_ELT (datarefs, i, a)
{
ddr = initialize_data_dependence_relation (a, a, loop_nest);
- VEC_safe_push (ddr_p, heap, *dependence_relations, ddr);
- if (loop_nest)
- compute_affine_dependence (ddr, VEC_index (loop_p, loop_nest, 0));
+ dependence_relations->safe_push (ddr);
+ if (loop_nest.exists ())
+ compute_affine_dependence (ddr, loop_nest[0]);
}
return true;
@@ -4290,21 +4282,19 @@ typedef struct data_ref_loc_d
bool is_read;
} data_ref_loc;
-DEF_VEC_O (data_ref_loc);
-DEF_VEC_ALLOC_O (data_ref_loc, heap);
/* Stores the locations of memory references in STMT to REFERENCES. Returns
true if STMT clobbers memory, false otherwise. */
static bool
-get_references_in_stmt (gimple stmt, VEC (data_ref_loc, heap) **references)
+get_references_in_stmt (gimple stmt, vec<data_ref_loc> *references)
{
bool clobbers_memory = false;
data_ref_loc ref;
tree *op0, *op1;
enum gimple_code stmt_code = gimple_code (stmt);
- *references = NULL;
+ references->create (0);
/* ASM_EXPR and CALL_EXPR may embed arbitrary side effects.
As we cannot model data-references to not spelled out
@@ -4331,7 +4321,7 @@ get_references_in_stmt (gimple stmt, VEC (data_ref_loc, heap) **references)
{
ref.pos = op1;
ref.is_read = true;
- VEC_safe_push (data_ref_loc, heap, *references, ref);
+ references->safe_push (ref);
}
}
else if (stmt_code == GIMPLE_CALL)
@@ -4349,7 +4339,7 @@ get_references_in_stmt (gimple stmt, VEC (data_ref_loc, heap) **references)
{
ref.pos = op1;
ref.is_read = true;
- VEC_safe_push (data_ref_loc, heap, *references, ref);
+ references->safe_push (ref);
}
}
}
@@ -4362,7 +4352,7 @@ get_references_in_stmt (gimple stmt, VEC (data_ref_loc, heap) **references)
{
ref.pos = op0;
ref.is_read = false;
- VEC_safe_push (data_ref_loc, heap, *references, ref);
+ references->safe_push (ref);
}
return clobbers_memory;
}
@@ -4373,28 +4363,28 @@ get_references_in_stmt (gimple stmt, VEC (data_ref_loc, heap) **references)
bool
find_data_references_in_stmt (struct loop *nest, gimple stmt,
- VEC (data_reference_p, heap) **datarefs)
+ vec<data_reference_p> *datarefs)
{
unsigned i;
- VEC (data_ref_loc, heap) *references;
+ vec<data_ref_loc> references;
data_ref_loc *ref;
bool ret = true;
data_reference_p dr;
if (get_references_in_stmt (stmt, &references))
{
- VEC_free (data_ref_loc, heap, references);
+ references.release ();
return false;
}
- FOR_EACH_VEC_ELT (data_ref_loc, references, i, ref)
+ FOR_EACH_VEC_ELT (references, i, ref)
{
dr = create_data_ref (nest, loop_containing_stmt (stmt),
*ref->pos, stmt, ref->is_read);
gcc_assert (dr != NULL);
- VEC_safe_push (data_reference_p, heap, *datarefs, dr);
+ datarefs->safe_push (dr);
}
- VEC_free (data_ref_loc, heap, references);
+ references.release ();
return ret;
}
@@ -4406,28 +4396,28 @@ find_data_references_in_stmt (struct loop *nest, gimple stmt,
bool
graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple stmt,
- VEC (data_reference_p, heap) **datarefs)
+ vec<data_reference_p> *datarefs)
{
unsigned i;
- VEC (data_ref_loc, heap) *references;
+ vec<data_ref_loc> references;
data_ref_loc *ref;
bool ret = true;
data_reference_p dr;
if (get_references_in_stmt (stmt, &references))
{
- VEC_free (data_ref_loc, heap, references);
+ references.release ();
return false;
}
- FOR_EACH_VEC_ELT (data_ref_loc, references, i, ref)
+ FOR_EACH_VEC_ELT (references, i, ref)
{
dr = create_data_ref (nest, loop, *ref->pos, stmt, ref->is_read);
gcc_assert (dr != NULL);
- VEC_safe_push (data_reference_p, heap, *datarefs, dr);
+ datarefs->safe_push (dr);
}
- VEC_free (data_ref_loc, heap, references);
+ references.release ();
return ret;
}
@@ -4437,7 +4427,7 @@ graphite_find_data_references_in_stmt (loop_p nest, loop_p loop, gimple stmt,
tree
find_data_references_in_bb (struct loop *loop, basic_block bb,
- VEC (data_reference_p, heap) **datarefs)
+ vec<data_reference_p> *datarefs)
{
gimple_stmt_iterator bsi;
@@ -4449,7 +4439,7 @@ find_data_references_in_bb (struct loop *loop, basic_block bb,
{
struct data_reference *res;
res = XCNEW (struct data_reference);
- VEC_safe_push (data_reference_p, heap, *datarefs, res);
+ datarefs->safe_push (res);
return chrec_dont_know;
}
@@ -4467,7 +4457,7 @@ find_data_references_in_bb (struct loop *loop, basic_block bb,
static tree
find_data_references_in_loop (struct loop *loop,
- VEC (data_reference_p, heap) **datarefs)
+ vec<data_reference_p> *datarefs)
{
basic_block bb, *bbs;
unsigned int i;
@@ -4492,7 +4482,7 @@ find_data_references_in_loop (struct loop *loop,
/* Recursive helper function. */
static bool
-find_loop_nest_1 (struct loop *loop, VEC (loop_p, heap) **loop_nest)
+find_loop_nest_1 (struct loop *loop, vec<loop_p> *loop_nest)
{
/* Inner loops of the nest should not contain siblings. Example:
when there are two consecutive loops,
@@ -4511,7 +4501,7 @@ find_loop_nest_1 (struct loop *loop, VEC (loop_p, heap) **loop_nest)
if (loop->next)
return false;
- VEC_safe_push (loop_p, heap, *loop_nest, loop);
+ loop_nest->safe_push (loop);
if (loop->inner)
return find_loop_nest_1 (loop->inner, loop_nest);
return true;
@@ -4523,9 +4513,9 @@ find_loop_nest_1 (struct loop *loop, VEC (loop_p, heap) **loop_nest)
appear in the classic distance vector. */
bool
-find_loop_nest (struct loop *loop, VEC (loop_p, heap) **loop_nest)
+find_loop_nest (struct loop *loop, vec<loop_p> *loop_nest)
{
- VEC_safe_push (loop_p, heap, *loop_nest, loop);
+ loop_nest->safe_push (loop);
if (loop->inner)
return find_loop_nest_1 (loop->inner, loop_nest);
return true;
@@ -4541,9 +4531,9 @@ find_loop_nest (struct loop *loop, VEC (loop_p, heap) **loop_nest)
bool
compute_data_dependences_for_loop (struct loop *loop,
bool compute_self_and_read_read_dependences,
- VEC (loop_p, heap) **loop_nest,
- VEC (data_reference_p, heap) **datarefs,
- VEC (ddr_p, heap) **dependence_relations)
+ vec<loop_p> *loop_nest,
+ vec<data_reference_p> *datarefs,
+ vec<ddr_p> *dependence_relations)
{
bool res = true;
@@ -4619,13 +4609,14 @@ compute_data_dependences_for_loop (struct loop *loop,
bool
compute_data_dependences_for_bb (basic_block bb,
bool compute_self_and_read_read_dependences,
- VEC (data_reference_p, heap) **datarefs,
- VEC (ddr_p, heap) **dependence_relations)
+ vec<data_reference_p> *datarefs,
+ vec<ddr_p> *dependence_relations)
{
if (find_data_references_in_bb (NULL, bb, datarefs) == chrec_dont_know)
return false;
- return compute_all_dependences (*datarefs, dependence_relations, NULL,
+ return compute_all_dependences (*datarefs, dependence_relations,
+ vec<loop_p>(),
compute_self_and_read_read_dependences);
}
@@ -4655,11 +4646,12 @@ analyze_all_data_dependences (struct loop *loop)
{
unsigned int i;
int nb_data_refs = 10;
- VEC (data_reference_p, heap) *datarefs =
- VEC_alloc (data_reference_p, heap, nb_data_refs);
- VEC (ddr_p, heap) *dependence_relations =
- VEC_alloc (ddr_p, heap, nb_data_refs * nb_data_refs);
- VEC (loop_p, heap) *loop_nest = VEC_alloc (loop_p, heap, 3);
+ vec<data_reference_p> datarefs;
+ datarefs.create (nb_data_refs);
+ vec<ddr_p> dependence_relations;
+ dependence_relations.create (nb_data_refs * nb_data_refs);
+ vec<loop_p> loop_nest;
+ loop_nest.create (3);
/* Compute DDs on the whole function. */
compute_data_dependences_for_loop (loop, false, &loop_nest, &datarefs,
@@ -4680,7 +4672,7 @@ analyze_all_data_dependences (struct loop *loop)
unsigned nb_chrec_relations = 0;
struct data_dependence_relation *ddr;
- FOR_EACH_VEC_ELT (ddr_p, dependence_relations, i, ddr)
+ FOR_EACH_VEC_ELT (dependence_relations, i, ddr)
{
if (chrec_contains_undetermined (DDR_ARE_DEPENDENT (ddr)))
nb_top_relations++;
@@ -4696,7 +4688,7 @@ analyze_all_data_dependences (struct loop *loop)
}
}
- VEC_free (loop_p, heap, loop_nest);
+ loop_nest.release ();
free_dependence_relations (dependence_relations);
free_data_refs (datarefs);
}
@@ -4722,12 +4714,10 @@ free_dependence_relation (struct data_dependence_relation *ddr)
if (ddr == NULL)
return;
- if (DDR_SUBSCRIPTS (ddr))
+ if (DDR_SUBSCRIPTS (ddr).exists ())
free_subscripts (DDR_SUBSCRIPTS (ddr));
- if (DDR_DIST_VECTS (ddr))
- VEC_free (lambda_vector, heap, DDR_DIST_VECTS (ddr));
- if (DDR_DIR_VECTS (ddr))
- VEC_free (lambda_vector, heap, DDR_DIR_VECTS (ddr));
+ DDR_DIST_VECTS (ddr).release ();
+ DDR_DIR_VECTS (ddr).release ();
free (ddr);
}
@@ -4736,29 +4726,29 @@ free_dependence_relation (struct data_dependence_relation *ddr)
DEPENDENCE_RELATIONS. */
void
-free_dependence_relations (VEC (ddr_p, heap) *dependence_relations)
+free_dependence_relations (vec<ddr_p> dependence_relations)
{
unsigned int i;
struct data_dependence_relation *ddr;
- FOR_EACH_VEC_ELT (ddr_p, dependence_relations, i, ddr)
+ FOR_EACH_VEC_ELT (dependence_relations, i, ddr)
if (ddr)
free_dependence_relation (ddr);
- VEC_free (ddr_p, heap, dependence_relations);
+ dependence_relations.release ();
}
/* Free the memory used by the data references from DATAREFS. */
void
-free_data_refs (VEC (data_reference_p, heap) *datarefs)
+free_data_refs (vec<data_reference_p> datarefs)
{
unsigned int i;
struct data_reference *dr;
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
free_data_ref (dr);
- VEC_free (data_reference_p, heap, datarefs);
+ datarefs.release ();
}
@@ -5007,14 +4997,14 @@ create_rdg_edges_for_scalar (struct graph *rdg, tree def, int idef)
/* Creates the edges of the reduced dependence graph RDG. */
static void
-create_rdg_edges (struct graph *rdg, VEC (ddr_p, heap) *ddrs)
+create_rdg_edges (struct graph *rdg, vec<ddr_p> ddrs)
{
int i;
struct data_dependence_relation *ddr;
def_operand_p def_p;
ssa_op_iter iter;
- FOR_EACH_VEC_ELT (ddr_p, ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (ddrs, i, ddr)
if (DDR_ARE_DEPENDENT (ddr) == NULL_TREE)
create_rdg_edge_for_ddr (rdg, ddr);
@@ -5027,14 +5017,14 @@ create_rdg_edges (struct graph *rdg, VEC (ddr_p, heap) *ddrs)
/* Build the vertices of the reduced dependence graph RDG. */
static void
-create_rdg_vertices (struct graph *rdg, VEC (gimple, heap) *stmts, loop_p loop)
+create_rdg_vertices (struct graph *rdg, vec<gimple> stmts, loop_p loop)
{
int i, j;
gimple stmt;
- FOR_EACH_VEC_ELT (gimple, stmts, i, stmt)
+ FOR_EACH_VEC_ELT (stmts, i, stmt)
{
- VEC (data_ref_loc, heap) *references;
+ vec<data_ref_loc> references;
data_ref_loc *ref;
struct vertex *v = &(rdg->vertices[i]);
@@ -5043,14 +5033,14 @@ create_rdg_vertices (struct graph *rdg, VEC (gimple, heap) *stmts, loop_p loop)
v->data = XNEW (struct rdg_vertex);
RDGV_STMT (v) = stmt;
- RDGV_DATAREFS (v) = NULL;
+ RDGV_DATAREFS (v).create (0);
RDGV_HAS_MEM_WRITE (v) = false;
RDGV_HAS_MEM_READS (v) = false;
if (gimple_code (stmt) == GIMPLE_PHI)
continue;
get_references_in_stmt (stmt, &references);
- FOR_EACH_VEC_ELT (data_ref_loc, references, j, ref)
+ FOR_EACH_VEC_ELT (references, j, ref)
{
data_reference_p dr;
if (!ref->is_read)
@@ -5060,9 +5050,9 @@ create_rdg_vertices (struct graph *rdg, VEC (gimple, heap) *stmts, loop_p loop)
dr = create_data_ref (loop, loop_containing_stmt (stmt),
*ref->pos, stmt, ref->is_read);
if (dr)
- VEC_safe_push (data_reference_p, heap, RDGV_DATAREFS (v), dr);
+ RDGV_DATAREFS (v).safe_push (dr);
}
- VEC_free (data_ref_loc, heap, references);
+ references.release ();
}
}
@@ -5073,7 +5063,7 @@ create_rdg_vertices (struct graph *rdg, VEC (gimple, heap) *stmts, loop_p loop)
identifying statements. */
static void
-stmts_from_loop (struct loop *loop, VEC (gimple, heap) **stmts)
+stmts_from_loop (struct loop *loop, vec<gimple> *stmts)
{
unsigned int i;
basic_block *bbs = get_loop_body_in_dom_order (loop);
@@ -5085,13 +5075,13 @@ stmts_from_loop (struct loop *loop, VEC (gimple, heap) **stmts)
gimple stmt;
for (bsi = gsi_start_phis (bb); !gsi_end_p (bsi); gsi_next (&bsi))
- VEC_safe_push (gimple, heap, *stmts, gsi_stmt (bsi));
+ stmts->safe_push (gsi_stmt (bsi));
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{
stmt = gsi_stmt (bsi);
if (gimple_code (stmt) != GIMPLE_LABEL && !is_gimple_debug (stmt))
- VEC_safe_push (gimple, heap, *stmts, stmt);
+ stmts->safe_push (stmt);
}
}
@@ -5101,12 +5091,12 @@ stmts_from_loop (struct loop *loop, VEC (gimple, heap) **stmts)
/* Returns true when all the dependences are computable. */
static bool
-known_dependences_p (VEC (ddr_p, heap) *dependence_relations)
+known_dependences_p (vec<ddr_p> dependence_relations)
{
ddr_p ddr;
unsigned int i;
- FOR_EACH_VEC_ELT (ddr_p, dependence_relations, i, ddr)
+ FOR_EACH_VEC_ELT (dependence_relations, i, ddr)
if (DDR_ARE_DEPENDENT (ddr) == chrec_dont_know)
return false;
@@ -5130,9 +5120,9 @@ build_empty_rdg (int n_stmts)
struct graph *
build_rdg (struct loop *loop,
- VEC (loop_p, heap) **loop_nest,
- VEC (ddr_p, heap) **dependence_relations,
- VEC (data_reference_p, heap) **datarefs)
+ vec<loop_p> *loop_nest,
+ vec<ddr_p> *dependence_relations,
+ vec<data_reference_p> *datarefs)
{
struct graph *rdg = NULL;
@@ -5140,12 +5130,13 @@ build_rdg (struct loop *loop,
dependence_relations)
&& known_dependences_p (*dependence_relations))
{
- VEC (gimple, heap) *stmts = VEC_alloc (gimple, heap, 10);
+ vec<gimple> stmts;
+ stmts.create (10);
stmts_from_loop (loop, &stmts);
- rdg = build_empty_rdg (VEC_length (gimple, stmts));
+ rdg = build_empty_rdg (stmts.length ());
create_rdg_vertices (rdg, stmts, loop);
create_rdg_edges (rdg, *dependence_relations);
- VEC_free (gimple, heap, stmts);
+ stmts.release ();
}
return rdg;
diff --git a/gcc/tree-data-ref.h b/gcc/tree-data-ref.h
index b00a4f752b0..83929f402cd 100644
--- a/gcc/tree-data-ref.h
+++ b/gcc/tree-data-ref.h
@@ -81,7 +81,7 @@ struct indices
tree base_object;
/* A list of chrecs. Access functions of the indices. */
- VEC(tree,heap) *access_fns;
+ vec<tree> access_fns;
/* Whether BASE_OBJECT is an access representing the whole object
or whether the access could not be constrained. */
@@ -100,9 +100,6 @@ struct dr_alias
and scalar multiplication. In this vector space, an element is a list of
integers. */
typedef int *lambda_vector;
-DEF_VEC_P(lambda_vector);
-DEF_VEC_ALLOC_P(lambda_vector,heap);
-DEF_VEC_ALLOC_P(lambda_vector,gc);
/* An integer matrix. A matrix consists of m vectors of length n (IE
all vectors are the same length). */
@@ -138,20 +135,20 @@ typedef lambda_vector *lambda_matrix;
*/
struct access_matrix
{
- VEC (loop_p, heap) *loop_nest;
+ vec<loop_p> loop_nest;
int nb_induction_vars;
- VEC (tree, heap) *parameters;
- VEC (lambda_vector, gc) *matrix;
+ vec<tree> parameters;
+ vec<lambda_vector, va_gc> *matrix;
};
#define AM_LOOP_NEST(M) (M)->loop_nest
#define AM_NB_INDUCTION_VARS(M) (M)->nb_induction_vars
#define AM_PARAMETERS(M) (M)->parameters
#define AM_MATRIX(M) (M)->matrix
-#define AM_NB_PARAMETERS(M) (VEC_length (tree, AM_PARAMETERS(M)))
+#define AM_NB_PARAMETERS(M) (AM_PARAMETERS(M)).length ()
#define AM_CONST_COLUMN_INDEX(M) (AM_NB_INDUCTION_VARS (M) + AM_NB_PARAMETERS (M))
#define AM_NB_COLUMNS(M) (AM_NB_INDUCTION_VARS (M) + AM_NB_PARAMETERS (M) + 1)
-#define AM_GET_SUBSCRIPT_ACCESS_VECTOR(M, I) VEC_index (lambda_vector, AM_MATRIX (M), I)
+#define AM_GET_SUBSCRIPT_ACCESS_VECTOR(M, I) AM_MATRIX (M)[I]
#define AM_GET_ACCESS_MATRIX_ELEMENT(M, I, J) AM_GET_SUBSCRIPT_ACCESS_VECTOR (M, I)[J]
/* Return the column in the access matrix of LOOP_NUM. */
@@ -162,7 +159,7 @@ am_vector_index_for_loop (struct access_matrix *access_matrix, int loop_num)
int i;
loop_p l;
- for (i = 0; VEC_iterate (loop_p, AM_LOOP_NEST (access_matrix), i, l); i++)
+ for (i = 0; AM_LOOP_NEST (access_matrix).iterate (i, &l); i++)
if (l->num == loop_num)
return i;
@@ -201,8 +198,8 @@ struct data_reference
#define DR_BASE_OBJECT(DR) (DR)->indices.base_object
#define DR_UNCONSTRAINED_BASE(DR) (DR)->indices.unconstrained_base
#define DR_ACCESS_FNS(DR) (DR)->indices.access_fns
-#define DR_ACCESS_FN(DR, I) VEC_index (tree, DR_ACCESS_FNS (DR), I)
-#define DR_NUM_DIMENSIONS(DR) VEC_length (tree, DR_ACCESS_FNS (DR))
+#define DR_ACCESS_FN(DR, I) DR_ACCESS_FNS (DR)[I]
+#define DR_NUM_DIMENSIONS(DR) DR_ACCESS_FNS (DR).length ()
#define DR_IS_READ(DR) (DR)->is_read
#define DR_IS_WRITE(DR) (!DR_IS_READ (DR))
#define DR_BASE_ADDRESS(DR) (DR)->innermost.base_address
@@ -214,8 +211,6 @@ struct data_reference
#define DR_ACCESS_MATRIX(DR) (DR)->access_matrix
typedef struct data_reference *data_reference_p;
-DEF_VEC_P(data_reference_p);
-DEF_VEC_ALLOC_P (data_reference_p, heap);
enum data_dependence_direction {
dir_positive,
@@ -243,7 +238,7 @@ enum data_dependence_direction {
#define CF_NOT_KNOWN_P(CF) ((CF)->n == NOT_KNOWN)
#define CF_NO_DEPENDENCE_P(CF) ((CF)->n == NO_DEPENDENCE)
-typedef VEC (tree, heap) *affine_fn;
+typedef vec<tree> affine_fn;
typedef struct
{
@@ -277,8 +272,6 @@ struct subscript
};
typedef struct subscript *subscript_p;
-DEF_VEC_P(subscript_p);
-DEF_VEC_ALLOC_P (subscript_p, heap);
#define SUB_CONFLICTS_IN_A(SUB) SUB->conflicting_iterations_in_a
#define SUB_CONFLICTS_IN_B(SUB) SUB->conflicting_iterations_in_b
@@ -310,16 +303,16 @@ struct data_dependence_relation
/* For each subscript in the dependence test, there is an element in
this array. This is the attribute that labels the edge A->B of
the data_dependence_relation. */
- VEC (subscript_p, heap) *subscripts;
+ vec<subscript_p> subscripts;
/* The analyzed loop nest. */
- VEC (loop_p, heap) *loop_nest;
+ vec<loop_p> loop_nest;
/* The classic direction vector. */
- VEC (lambda_vector, heap) *dir_vects;
+ vec<lambda_vector> dir_vects;
/* The classic distance vector. */
- VEC (lambda_vector, heap) *dist_vects;
+ vec<lambda_vector> dist_vects;
/* An index in loop_nest for the innermost loop that varies for
this data dependence relation. */
@@ -338,72 +331,70 @@ struct data_dependence_relation
};
typedef struct data_dependence_relation *ddr_p;
-DEF_VEC_P(ddr_p);
-DEF_VEC_ALLOC_P(ddr_p,heap);
#define DDR_A(DDR) DDR->a
#define DDR_B(DDR) DDR->b
#define DDR_AFFINE_P(DDR) DDR->affine_p
#define DDR_ARE_DEPENDENT(DDR) DDR->are_dependent
#define DDR_SUBSCRIPTS(DDR) DDR->subscripts
-#define DDR_SUBSCRIPT(DDR, I) VEC_index (subscript_p, DDR_SUBSCRIPTS (DDR), I)
-#define DDR_NUM_SUBSCRIPTS(DDR) VEC_length (subscript_p, DDR_SUBSCRIPTS (DDR))
+#define DDR_SUBSCRIPT(DDR, I) DDR_SUBSCRIPTS (DDR)[I]
+#define DDR_NUM_SUBSCRIPTS(DDR) DDR_SUBSCRIPTS (DDR).length ()
#define DDR_LOOP_NEST(DDR) DDR->loop_nest
/* The size of the direction/distance vectors: the number of loops in
the loop nest. */
-#define DDR_NB_LOOPS(DDR) (VEC_length (loop_p, DDR_LOOP_NEST (DDR)))
+#define DDR_NB_LOOPS(DDR) (DDR_LOOP_NEST (DDR).length ())
#define DDR_INNER_LOOP(DDR) DDR->inner_loop
#define DDR_SELF_REFERENCE(DDR) DDR->self_reference_p
#define DDR_DIST_VECTS(DDR) ((DDR)->dist_vects)
#define DDR_DIR_VECTS(DDR) ((DDR)->dir_vects)
#define DDR_NUM_DIST_VECTS(DDR) \
- (VEC_length (lambda_vector, DDR_DIST_VECTS (DDR)))
+ (DDR_DIST_VECTS (DDR).length ())
#define DDR_NUM_DIR_VECTS(DDR) \
- (VEC_length (lambda_vector, DDR_DIR_VECTS (DDR)))
+ (DDR_DIR_VECTS (DDR).length ())
#define DDR_DIR_VECT(DDR, I) \
- VEC_index (lambda_vector, DDR_DIR_VECTS (DDR), I)
+ DDR_DIR_VECTS (DDR)[I]
#define DDR_DIST_VECT(DDR, I) \
- VEC_index (lambda_vector, DDR_DIST_VECTS (DDR), I)
+ DDR_DIST_VECTS (DDR)[I]
#define DDR_REVERSED_P(DDR) DDR->reversed_p
bool dr_analyze_innermost (struct data_reference *, struct loop *);
extern bool compute_data_dependences_for_loop (struct loop *, bool,
- VEC (loop_p, heap) **,
- VEC (data_reference_p, heap) **,
- VEC (ddr_p, heap) **);
+ vec<loop_p> *,
+ vec<data_reference_p> *,
+ vec<ddr_p> *);
extern bool compute_data_dependences_for_bb (basic_block, bool,
- VEC (data_reference_p, heap) **,
- VEC (ddr_p, heap) **);
-extern void debug_ddrs (VEC (ddr_p, heap) *);
+ vec<data_reference_p> *,
+ vec<ddr_p> *);
+extern void debug_ddrs (vec<ddr_p> );
extern void dump_data_reference (FILE *, struct data_reference *);
extern void debug_data_reference (struct data_reference *);
-extern void debug_data_references (VEC (data_reference_p, heap) *);
+extern void debug_data_references (vec<data_reference_p> );
extern void debug_data_dependence_relation (struct data_dependence_relation *);
-extern void dump_data_dependence_relations (FILE *, VEC (ddr_p, heap) *);
-extern void debug_data_dependence_relations (VEC (ddr_p, heap) *);
+extern void dump_data_dependence_relations (FILE *, vec<ddr_p> );
+extern void debug_data_dependence_relations (vec<ddr_p> );
extern void free_dependence_relation (struct data_dependence_relation *);
-extern void free_dependence_relations (VEC (ddr_p, heap) *);
+extern void free_dependence_relations (vec<ddr_p> );
extern void free_data_ref (data_reference_p);
-extern void free_data_refs (VEC (data_reference_p, heap) *);
+extern void free_data_refs (vec<data_reference_p> );
extern bool find_data_references_in_stmt (struct loop *, gimple,
- VEC (data_reference_p, heap) **);
+ vec<data_reference_p> *);
extern bool graphite_find_data_references_in_stmt (loop_p, loop_p, gimple,
- VEC (data_reference_p, heap) **);
+ vec<data_reference_p> *);
struct data_reference *create_data_ref (loop_p, loop_p, tree, gimple, bool);
-extern bool find_loop_nest (struct loop *, VEC (loop_p, heap) **);
+extern bool find_loop_nest (struct loop *, vec<loop_p> *);
extern struct data_dependence_relation *initialize_data_dependence_relation
- (struct data_reference *, struct data_reference *, VEC (loop_p, heap) *);
+ (struct data_reference *, struct data_reference *, vec<loop_p>);
extern void compute_affine_dependence (struct data_dependence_relation *,
loop_p);
extern void compute_self_dependence (struct data_dependence_relation *);
-extern bool compute_all_dependences (VEC (data_reference_p, heap) *,
- VEC (ddr_p, heap) **, VEC (loop_p, heap) *,
- bool);
+extern bool compute_all_dependences (vec<data_reference_p> ,
+ vec<ddr_p> *,
+ vec<loop_p>, bool);
extern tree find_data_references_in_bb (struct loop *, basic_block,
- VEC (data_reference_p, heap) **);
+ vec<data_reference_p> *);
extern bool dr_may_alias_p (const struct data_reference *,
const struct data_reference *, bool);
@@ -473,12 +464,12 @@ ddr_is_anti_dependent (ddr_p ddr)
/* Return true when DEPENDENCE_RELATIONS contains an anti-dependence. */
static inline bool
-ddrs_have_anti_deps (VEC (ddr_p, heap) *dependence_relations)
+ddrs_have_anti_deps (vec<ddr_p> dependence_relations)
{
unsigned i;
ddr_p ddr;
- for (i = 0; VEC_iterate (ddr_p, dependence_relations, i, ddr); i++)
+ for (i = 0; dependence_relations.iterate (i, &ddr); i++)
if (ddr_is_anti_dependent (ddr))
return true;
@@ -509,7 +500,7 @@ ddr_dependence_level (ddr_p ddr)
unsigned vector;
unsigned level = 0;
- if (DDR_DIST_VECTS (ddr))
+ if (DDR_DIST_VECTS (ddr).exists ())
level = dependence_level (DDR_DIST_VECT (ddr, 0), DDR_NB_LOOPS (ddr));
for (vector = 1; vector < DDR_NUM_DIST_VECTS (ddr); vector++)
@@ -527,7 +518,7 @@ typedef struct rdg_vertex
gimple stmt;
/* Vector of data-references in this statement. */
- VEC(data_reference_p, heap) *datarefs;
+ vec<data_reference_p> datarefs;
/* True when the statement contains a write to memory. */
bool has_mem_write;
@@ -589,21 +580,21 @@ typedef struct rdg_edge
#define RDGE_RELATION(E) ((struct rdg_edge *) ((E)->data))->relation
struct graph *build_rdg (struct loop *,
- VEC (loop_p, heap) **,
- VEC (ddr_p, heap) **,
- VEC (data_reference_p, heap) **);
+ vec<loop_p> *,
+ vec<ddr_p> *,
+ vec<data_reference_p> *);
struct graph *build_empty_rdg (int);
void free_rdg (struct graph *);
/* Return the index of the variable VAR in the LOOP_NEST array. */
static inline int
-index_in_loop_nest (int var, VEC (loop_p, heap) *loop_nest)
+index_in_loop_nest (int var, vec<loop_p> loop_nest)
{
struct loop *loopi;
int var_index;
- for (var_index = 0; VEC_iterate (loop_p, loop_nest, var_index, loopi);
+ for (var_index = 0; loop_nest.iterate (var_index, &loopi);
var_index++)
if (loopi->num == var)
break;
@@ -641,14 +632,10 @@ void split_constant_offset (tree , tree *, tree *);
typedef struct rdg_component
{
int num;
- VEC (int, heap) *vertices;
+ vec<int> vertices;
} *rdgc;
-DEF_VEC_P (rdgc);
-DEF_VEC_ALLOC_P (rdgc, heap);
-DEF_VEC_P (bitmap);
-DEF_VEC_ALLOC_P (bitmap, heap);
/* Compute the greatest common divisor of a VECTOR of SIZE numbers. */
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index 423923fb66a..3bba8d3f8e2 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -700,8 +700,6 @@ struct GTY(()) numbered_tree_d
};
typedef struct numbered_tree_d numbered_tree;
-DEF_VEC_O (numbered_tree);
-DEF_VEC_ALLOC_O (numbered_tree, heap);
/* Compare two declarations references by their DECL_UID / sequence number.
Called via qsort. */
@@ -722,14 +720,14 @@ static tree
dump_enumerated_decls_push (tree *tp, int *walk_subtrees, void *data)
{
struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
- VEC (numbered_tree, heap) **list = (VEC (numbered_tree, heap) **) &wi->info;
+ vec<numbered_tree> *list = (vec<numbered_tree> *) wi->info;
numbered_tree nt;
if (!DECL_P (*tp))
return NULL_TREE;
nt.t = *tp;
- nt.num = VEC_length (numbered_tree, *list);
- VEC_safe_push (numbered_tree, heap, *list, nt);
+ nt.num = list->length ();
+ list->safe_push (nt);
*walk_subtrees = 0;
return NULL_TREE;
}
@@ -747,10 +745,11 @@ dump_enumerated_decls (FILE *file, int flags)
{
basic_block bb;
struct walk_stmt_info wi;
- VEC (numbered_tree, heap) *decl_list = VEC_alloc (numbered_tree, heap, 40);
+ vec<numbered_tree> decl_list;
+ decl_list.create (40);
memset (&wi, '\0', sizeof (wi));
- wi.info = (void*) decl_list;
+ wi.info = (void *) &decl_list;
FOR_EACH_BB (bb)
{
gimple_stmt_iterator gsi;
@@ -759,9 +758,8 @@ dump_enumerated_decls (FILE *file, int flags)
if (!is_gimple_debug (gsi_stmt (gsi)))
walk_gimple_stmt (&gsi, NULL, dump_enumerated_decls_push, &wi);
}
- decl_list = (VEC (numbered_tree, heap) *) wi.info;
- VEC_qsort (numbered_tree, decl_list, compare_decls_by_uid);
- if (VEC_length (numbered_tree, decl_list))
+ decl_list.qsort (compare_decls_by_uid);
+ if (decl_list.length ())
{
unsigned ix;
numbered_tree *ntp;
@@ -769,7 +767,7 @@ dump_enumerated_decls (FILE *file, int flags)
fprintf (file, "Declarations used by %s, sorted by DECL_UID:\n",
current_function_name ());
- FOR_EACH_VEC_ELT (numbered_tree, decl_list, ix, ntp)
+ FOR_EACH_VEC_ELT (decl_list, ix, ntp)
{
if (ntp->t == last)
continue;
@@ -779,6 +777,5 @@ dump_enumerated_decls (FILE *file, int flags)
last = ntp->t;
}
}
- VEC_free (numbered_tree, heap, decl_list);
+ decl_list.release ();
}
-
diff --git a/gcc/tree-diagnostic.c b/gcc/tree-diagnostic.c
index 3eab7ec3e5d..788b118c3e8 100644
--- a/gcc/tree-diagnostic.c
+++ b/gcc/tree-diagnostic.c
@@ -61,8 +61,6 @@ typedef struct
source_location where;
} loc_map_pair;
-DEF_VEC_O (loc_map_pair);
-DEF_VEC_ALLOC_O (loc_map_pair, heap);
/* Unwind the different macro expansions that lead to the token which
location is WHERE and emit diagnostics showing the resulting
@@ -106,7 +104,7 @@ maybe_unwind_expanded_macro_loc (diagnostic_context *context,
source_location where)
{
const struct line_map *map;
- VEC(loc_map_pair,heap) *loc_vec = NULL;
+ vec<loc_map_pair> loc_vec = vec<loc_map_pair>();
unsigned ix;
loc_map_pair loc, *iter;
@@ -127,7 +125,7 @@ maybe_unwind_expanded_macro_loc (diagnostic_context *context,
loc.where = where;
loc.map = map;
- VEC_safe_push (loc_map_pair, heap, loc_vec, loc);
+ loc_vec.safe_push (loc);
/* WHERE is the location of a token inside the expansion of a
macro. MAP is the map holding the locations of that macro
@@ -148,7 +146,7 @@ maybe_unwind_expanded_macro_loc (diagnostic_context *context,
expand_location_to_spelling_point (diagnostic->location).line;
if (!LINEMAP_SYSP (map))
- FOR_EACH_VEC_ELT (loc_map_pair, loc_vec, ix, iter)
+ FOR_EACH_VEC_ELT (loc_vec, ix, iter)
{
/* Sometimes, in the unwound macro expansion trace, we want to
print a part of the context that shows where, in the
@@ -223,7 +221,7 @@ maybe_unwind_expanded_macro_loc (diagnostic_context *context,
linemap_map_get_macro_name (iter->map));
}
- VEC_free (loc_map_pair, heap, loc_vec);
+ loc_vec.release ();
}
/* This is a diagnostic finalizer implementation that is aware of
diff --git a/gcc/tree-dump.c b/gcc/tree-dump.c
index a1511ea7eea..f06ff68f5da 100644
--- a/gcc/tree-dump.c
+++ b/gcc/tree-dump.c
@@ -290,7 +290,7 @@ dequeue_and_dump (dump_info_p di)
{
unsigned ix;
tree base;
- VEC(tree,gc) *accesses = BINFO_BASE_ACCESSES (t);
+ vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (t);
dump_child ("type", BINFO_TYPE (t));
@@ -300,8 +300,7 @@ dequeue_and_dump (dump_info_p di)
dump_int (di, "bases", BINFO_N_BASE_BINFOS (t));
for (ix = 0; BINFO_BASE_ITERATE (t, ix, base); ix++)
{
- tree access = (accesses ? VEC_index (tree, accesses, ix)
- : access_public_node);
+ tree access = (accesses ? (*accesses)[ix] : access_public_node);
const char *string = NULL;
if (access == access_public_node)
@@ -649,8 +648,7 @@ dequeue_and_dump (dump_info_p di)
{
unsigned HOST_WIDE_INT cnt;
tree index, value;
- dump_int (di, "lngt", VEC_length (constructor_elt,
- CONSTRUCTOR_ELTS (t)));
+ dump_int (di, "lngt", vec_safe_length (CONSTRUCTOR_ELTS (t)));
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), cnt, index, value)
{
dump_child ("idx", index);
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index 9056243cd4b..922fcfe454b 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -378,7 +378,7 @@ struct leh_tf_state
struct pointer_map_t *goto_queue_map;
/* The set of unique labels seen as entries in the goto queue. */
- VEC(tree,heap) *dest_array;
+ vec<tree> dest_array;
/* A label to be added at the end of the completed transformed
sequence. It will be set if may_fallthru was true *at one time*,
@@ -613,20 +613,20 @@ record_in_goto_queue_label (struct leh_tf_state *tf, treemple stmt, tree label,
if (!outside_finally_tree (temp, tf->try_finally_expr))
return;
- if (! tf->dest_array)
+ if (! tf->dest_array.exists ())
{
- tf->dest_array = VEC_alloc (tree, heap, 10);
- VEC_quick_push (tree, tf->dest_array, label);
+ tf->dest_array.create (10);
+ tf->dest_array.quick_push (label);
index = 0;
}
else
{
- int n = VEC_length (tree, tf->dest_array);
+ int n = tf->dest_array.length ();
for (index = 0; index < n; ++index)
- if (VEC_index (tree, tf->dest_array, index) == label)
+ if (tf->dest_array[index] == label)
break;
if (index == n)
- VEC_safe_push (tree, heap, tf->dest_array, label);
+ tf->dest_array.safe_push (label);
}
/* In the case of a GOTO we want to record the destination label,
@@ -753,7 +753,7 @@ do_goto_redirection (struct goto_queue_node *q, tree finlab, gimple_seq mod,
gcc_assert (q->is_label);
- q->cont_stmt = gimple_build_goto (VEC_index (tree, tf->dest_array, q->index));
+ q->cont_stmt = gimple_build_goto (tf->dest_array[q->index]);
if (mod)
gimple_seq_add_seq (&q->repl_stmt, mod);
@@ -1177,7 +1177,7 @@ lower_try_finally_onedest (struct leh_state *state, struct leh_tf_state *tf)
do_goto_redirection (q, finally_label, NULL, tf);
replace_goto_queue (tf);
- if (VEC_index (tree, tf->dest_array, 0) == tf->fallthru_label)
+ if (tf->dest_array[0] == tf->fallthru_label)
{
/* Reachable by goto to fallthru label only. Redirect it
to the new label (already created, sadly), and do not
@@ -1256,7 +1256,7 @@ lower_try_finally_copy (struct leh_state *state, struct leh_tf_state *tf)
tree label;
} *labels;
- return_index = VEC_length (tree, tf->dest_array);
+ return_index = tf->dest_array.length ();
labels = XCNEWVEC (struct labels_s, return_index + 1);
q = tf->goto_queue;
@@ -1335,7 +1335,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
int return_index, eh_index, fallthru_index;
int nlabels, ndests, j, last_case_index;
tree last_case;
- VEC (tree,heap) *case_label_vec;
+ vec<tree> case_label_vec;
gimple_seq switch_body = NULL;
gimple x, eh_else;
tree tmp;
@@ -1362,7 +1362,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
lower_eh_constructs_1 (state, &finally);
/* Prepare for switch statement generation. */
- nlabels = VEC_length (tree, tf->dest_array);
+ nlabels = tf->dest_array.length ();
return_index = nlabels;
eh_index = return_index + tf->may_return;
fallthru_index = eh_index + (tf->may_throw && !eh_else);
@@ -1371,10 +1371,10 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
finally_tmp = create_tmp_var (integer_type_node, "finally_tmp");
finally_label = create_artificial_label (finally_loc);
- /* We use VEC_quick_push on case_label_vec throughout this function,
+ /* We use vec::quick_push on case_label_vec throughout this function,
since we know the size in advance and allocate precisely as muce
space as needed. */
- case_label_vec = VEC_alloc (tree, heap, ndests);
+ case_label_vec.create (ndests);
last_case = NULL;
last_case_index = 0;
@@ -1392,7 +1392,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
tmp = build_int_cst (integer_type_node, fallthru_index);
last_case = build_case_label (tmp, NULL,
create_artificial_label (tf_loc));
- VEC_quick_push (tree, case_label_vec, last_case);
+ case_label_vec.quick_push (last_case);
last_case_index++;
x = gimple_build_label (CASE_LABEL (last_case));
@@ -1435,7 +1435,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
tmp = build_int_cst (integer_type_node, eh_index);
last_case = build_case_label (tmp, NULL,
create_artificial_label (tf_loc));
- VEC_quick_push (tree, case_label_vec, last_case);
+ case_label_vec.quick_push (last_case);
last_case_index++;
x = gimple_build_label (CASE_LABEL (last_case));
@@ -1479,8 +1479,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
}
case_index = j + q->index;
- if (VEC_length (tree, case_label_vec) <= case_index
- || !VEC_index (tree, case_label_vec, case_index))
+ if (case_label_vec.length () <= case_index || !case_label_vec[case_index])
{
tree case_lab;
void **slot;
@@ -1493,7 +1492,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
cont_map = pointer_map_create ();
slot = pointer_map_insert (cont_map, case_lab);
*slot = q->cont_stmt;
- VEC_quick_push (tree, case_label_vec, case_lab);
+ case_label_vec.quick_push (case_lab);
}
}
for (j = last_case_index; j < last_case_index + nlabels; j++)
@@ -1501,7 +1500,7 @@ lower_try_finally_switch (struct leh_state *state, struct leh_tf_state *tf)
gimple cont_stmt;
void **slot;
- last_case = VEC_index (tree, case_label_vec, j);
+ last_case = case_label_vec[j];
gcc_assert (last_case);
gcc_assert (cont_map);
@@ -1666,7 +1665,7 @@ lower_try_finally (struct leh_state *state, gimple tp)
how many destinations are reached by the finally block. Use this to
determine how we process the finally block itself. */
- ndests = VEC_length (tree, this_tf.dest_array);
+ ndests = this_tf.dest_array.length ();
ndests += this_tf.may_fallthru;
ndests += this_tf.may_return;
ndests += this_tf.may_throw;
@@ -1701,7 +1700,7 @@ lower_try_finally (struct leh_state *state, gimple tp)
gimple_seq_add_stmt (&this_tf.top_p_seq, x);
}
- VEC_free (tree, heap, this_tf.dest_array);
+ this_tf.dest_array.release ();
free (this_tf.goto_queue);
if (this_tf.goto_queue_map)
pointer_map_destroy (this_tf.goto_queue_map);
@@ -2811,7 +2810,7 @@ maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
{
eh_landing_pad old_lp, new_lp;
- old_lp = VEC_index (eh_landing_pad, old_fun->eh->lp_array, old_lp_nr);
+ old_lp = (*old_fun->eh->lp_array)[old_lp_nr];
slot = pointer_map_contains (map, old_lp);
new_lp = (eh_landing_pad) *slot;
new_lp_nr = new_lp->index;
@@ -2820,7 +2819,7 @@ maybe_duplicate_eh_stmt_fn (struct function *new_fun, gimple new_stmt,
{
eh_region old_r, new_r;
- old_r = VEC_index (eh_region, old_fun->eh->region_array, -old_lp_nr);
+ old_r = (*old_fun->eh->region_array)[-old_lp_nr];
slot = pointer_map_contains (map, old_r);
new_r = (eh_region) *slot;
new_lp_nr = -new_r->index;
@@ -3333,7 +3332,7 @@ lower_eh_dispatch (basic_block src, gimple stmt)
{
case ERT_TRY:
{
- VEC (tree, heap) *labels = NULL;
+ vec<tree> labels = vec<tree>();
tree default_label = NULL;
eh_catch c;
edge_iterator ei;
@@ -3369,7 +3368,7 @@ lower_eh_dispatch (basic_block src, gimple stmt)
{
tree t = build_case_label (TREE_VALUE (flt_node),
NULL, lab);
- VEC_safe_push (tree, heap, labels, t);
+ labels.safe_push (t);
pointer_set_insert (seen_values, TREE_VALUE (flt_node));
have_label = true;
}
@@ -3400,7 +3399,7 @@ lower_eh_dispatch (basic_block src, gimple stmt)
/* Don't generate a switch if there's only a default case.
This is common in the form of try { A; } catch (...) { B; }. */
- if (labels == NULL)
+ if (!labels.exists ())
{
e = single_succ_edge (src);
e->flags |= EDGE_FALLTHRU;
@@ -3422,7 +3421,7 @@ lower_eh_dispatch (basic_block src, gimple stmt)
x = gimple_build_switch (filter, default_label, labels);
gsi_insert_before (&gsi, x, GSI_SAME_STMT);
- VEC_free (tree, heap, labels);
+ labels.release ();
}
pointer_set_destroy (seen_values);
}
@@ -3533,9 +3532,8 @@ remove_unreachable_handlers (void)
basic_block bb;
int lp_nr, r_nr;
- r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
- lp_reachable
- = sbitmap_alloc (VEC_length (eh_landing_pad, cfun->eh->lp_array));
+ r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
+ lp_reachable = sbitmap_alloc (cfun->eh->lp_array->length ());
bitmap_clear (r_reachable);
bitmap_clear (lp_reachable);
@@ -3588,7 +3586,7 @@ remove_unreachable_handlers (void)
}
for (r_nr = 1;
- VEC_iterate (eh_region, cfun->eh->region_array, r_nr, region); ++r_nr)
+ vec_safe_iterate (cfun->eh->region_array, r_nr, &region); ++r_nr)
if (region && !bitmap_bit_p (r_reachable, r_nr))
{
if (dump_file)
@@ -3597,7 +3595,7 @@ remove_unreachable_handlers (void)
}
for (lp_nr = 1;
- VEC_iterate (eh_landing_pad, cfun->eh->lp_array, lp_nr, lp); ++lp_nr)
+ vec_safe_iterate (cfun->eh->lp_array, lp_nr, &lp); ++lp_nr)
if (lp && !bitmap_bit_p (lp_reachable, lp_nr))
{
if (dump_file)
@@ -3632,7 +3630,7 @@ maybe_remove_unreachable_handlers (void)
if (cfun->eh == NULL)
return;
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
if (lp && lp->post_landing_pad)
{
if (label_to_block (lp->post_landing_pad) == NULL)
@@ -3655,7 +3653,7 @@ remove_unreachable_handlers_no_lp (void)
sbitmap r_reachable;
basic_block bb;
- r_reachable = sbitmap_alloc (VEC_length (eh_region, cfun->eh->region_array));
+ r_reachable = sbitmap_alloc (cfun->eh->region_array->length ());
bitmap_clear (r_reachable);
FOR_EACH_BB (bb)
@@ -3676,7 +3674,7 @@ remove_unreachable_handlers_no_lp (void)
}
}
- for (i = 1; VEC_iterate (eh_region, cfun->eh->region_array, i, r); ++i)
+ for (i = 1; cfun->eh->region_array->iterate (i, &r); ++i)
if (r && r->landing_pads == NULL && r->type != ERT_MUST_NOT_THROW
&& !bitmap_bit_p (r_reachable, i))
{
@@ -3802,7 +3800,7 @@ unsplit_all_eh (void)
eh_landing_pad lp;
int i;
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
if (lp)
changed |= unsplit_eh (lp);
@@ -4252,7 +4250,7 @@ cleanup_all_empty_eh (void)
eh_landing_pad lp;
int i;
- for (i = 1; VEC_iterate (eh_landing_pad, cfun->eh->lp_array, i, lp); ++i)
+ for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
if (lp)
changed |= cleanup_empty_eh (lp);
diff --git a/gcc/tree-emutls.c b/gcc/tree-emutls.c
index 7b058b11771..93270b861c2 100644
--- a/gcc/tree-emutls.c
+++ b/gcc/tree-emutls.c
@@ -59,11 +59,11 @@ along with GCC; see the file COPYING3. If not see
the index of a TLS variable equals the index of its control variable in
the other vector. */
static varpool_node_set tls_vars;
-static VEC(varpool_node_ptr, heap) *control_vars;
+static vec<varpool_node_ptr> control_vars;
/* For the current basic block, an SSA_NAME that has computed the address
of the TLS variable at the corresponding index. */
-static VEC(tree, heap) *access_vars;
+static vec<tree> access_vars;
/* The type of the control structure, shared with the emutls.c runtime. */
static tree emutls_object_type;
@@ -148,30 +148,31 @@ default_emutls_var_fields (tree type, tree *name ATTRIBUTE_UNUSED)
tree
default_emutls_var_init (tree to, tree decl, tree proxy)
{
- VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 4);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, 4);
constructor_elt elt;
tree type = TREE_TYPE (to);
tree field = TYPE_FIELDS (type);
elt.index = field;
elt.value = fold_convert (TREE_TYPE (field), DECL_SIZE_UNIT (decl));
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
field = DECL_CHAIN (field);
elt.index = field;
elt.value = build_int_cst (TREE_TYPE (field),
DECL_ALIGN_UNIT (decl));
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
field = DECL_CHAIN (field);
elt.index = field;
elt.value = null_pointer_node;
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
field = DECL_CHAIN (field);
elt.index = field;
elt.value = proxy;
- VEC_quick_push (constructor_elt, v, elt);
+ v->quick_push (elt);
return build_constructor (type, v);
}
@@ -365,7 +366,7 @@ emutls_decl (tree decl)
unsigned int i;
i = emutls_index (decl);
- var = VEC_index (varpool_node_ptr, control_vars, i);
+ var = control_vars[i];
return var->symbol.decl;
}
@@ -420,14 +421,14 @@ gen_emutls_addr (tree decl, struct lower_emutls_data *d)
/* Compute the address of the TLS variable with help from runtime. */
index = emutls_index (decl);
- addr = VEC_index (tree, access_vars, index);
+ addr = access_vars[index];
if (addr == NULL)
{
struct varpool_node *cvar;
tree cdecl;
gimple x;
- cvar = VEC_index (varpool_node_ptr, control_vars, index);
+ cvar = control_vars[index];
cdecl = cvar->symbol.decl;
TREE_ADDRESSABLE (cdecl) = 1;
@@ -448,7 +449,7 @@ gen_emutls_addr (tree decl, struct lower_emutls_data *d)
ipa_record_reference ((symtab_node)d->cfun_node, (symtab_node)cvar, IPA_REF_ADDR, x);
/* Record this ssa_name for possible use later in the basic block. */
- VEC_replace (tree, access_vars, index, addr);
+ access_vars[index] = addr;
}
return addr;
@@ -606,8 +607,8 @@ lower_emutls_phi_arg (gimple phi, unsigned int i, struct lower_emutls_data *d)
static inline void
clear_access_vars (void)
{
- memset (VEC_address (tree, access_vars), 0,
- VEC_length (tree, access_vars) * sizeof(tree));
+ memset (access_vars.address (), 0,
+ access_vars.length () * sizeof(tree));
}
/* Lower the entire function NODE. */
@@ -703,7 +704,7 @@ create_emultls_var (struct varpool_node *var, void *data)
cdecl = new_emutls_decl (var->symbol.decl, var->alias_of);
cvar = varpool_get_node (cdecl);
- VEC_quick_push (varpool_node_ptr, control_vars, cvar);
+ control_vars.quick_push (cvar);
if (!var->alias)
{
@@ -749,7 +750,7 @@ ipa_lower_emutls (void)
}
/* If we found no TLS variables, then there is no further work to do. */
- if (tls_vars->nodes == NULL)
+ if (!tls_vars->nodes.exists ())
{
tls_vars = NULL;
if (dump_file)
@@ -758,15 +759,15 @@ ipa_lower_emutls (void)
}
/* Allocate the on-the-side arrays that share indicies with the TLS vars. */
- n_tls = VEC_length (varpool_node_ptr, tls_vars->nodes);
- control_vars = VEC_alloc (varpool_node_ptr, heap, n_tls);
- access_vars = VEC_alloc (tree, heap, n_tls);
- VEC_safe_grow (tree, heap, access_vars, n_tls);
+ n_tls = tls_vars->nodes.length ();
+ control_vars.create (n_tls);
+ access_vars.create (n_tls);
+ access_vars.safe_grow_cleared (n_tls);
/* Create the control variables for each TLS variable. */
- FOR_EACH_VEC_ELT (varpool_node_ptr, tls_vars->nodes, i, var)
+ FOR_EACH_VEC_ELT (tls_vars->nodes, i, var)
{
- var = VEC_index (varpool_node_ptr, tls_vars->nodes, i);
+ var = tls_vars->nodes[i];
if (var->alias && !var->alias_of)
any_aliases = true;
@@ -778,7 +779,7 @@ ipa_lower_emutls (void)
if (any_aliases)
{
alias_pair *p;
- FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
+ FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
if (DECL_THREAD_LOCAL_P (p->decl))
{
p->decl = emutls_decl (p->decl);
@@ -795,8 +796,8 @@ ipa_lower_emutls (void)
if (ctor_body)
cgraph_build_static_cdtor ('I', ctor_body, DEFAULT_INIT_PRIORITY);
- VEC_free (varpool_node_ptr, heap, control_vars);
- VEC_free (tree, heap, access_vars);
+ control_vars.release ();
+ access_vars.release ();
free_varpool_node_set (tls_vars);
return TODO_ggc_collect | TODO_verify_all;
diff --git a/gcc/tree-flow.h b/gcc/tree-flow.h
index 7c27d96381c..81c5bb3377b 100644
--- a/gcc/tree-flow.h
+++ b/gcc/tree-flow.h
@@ -50,10 +50,10 @@ struct GTY(()) gimple_df {
indirect call has been turned into a noreturn call. When this
happens, all the instructions after the call are no longer
reachable and must be deleted as dead. */
- VEC(gimple,gc) *modified_noreturn_calls;
+ vec<gimple, va_gc> *modified_noreturn_calls;
/* Array of all SSA_NAMEs used in the function. */
- VEC(tree,gc) *ssa_names;
+ vec<tree, va_gc> *ssa_names;
/* Artificial variable used for the virtual operand FUD chain. */
tree vop;
@@ -66,7 +66,7 @@ struct GTY(()) gimple_df {
struct pointer_map_t * GTY((skip(""))) decls_to_pointers;
/* Free list of SSA_NAMEs. */
- VEC(tree,gc) *free_ssanames;
+ vec<tree, va_gc> *free_ssanames;
/* Hashtable holding definition for symbol. If this field is not NULL, it
means that the first reference to this variable in the function is a
@@ -289,8 +289,8 @@ extern int int_tree_map_eq (const void *, const void *);
extern unsigned int uid_decl_map_hash (const void *);
extern int uid_decl_map_eq (const void *, const void *);
-#define num_ssa_names (VEC_length (tree, cfun->gimple_df->ssa_names))
-#define ssa_name(i) (VEC_index (tree, cfun->gimple_df->ssa_names, (i)))
+#define num_ssa_names (vec_safe_length (cfun->gimple_df->ssa_names))
+#define ssa_name(i) ((*cfun->gimple_df->ssa_names)[(i)])
/* Macros for showing usage statistics. */
#define SCALE(x) ((unsigned long) ((x) < 1024*10 \
@@ -334,7 +334,7 @@ struct omp_region
/* If this is a combined parallel+workshare region, this is a list
of additional arguments needed by the combined parallel+workshare
library call. */
- VEC(tree,gc) *ws_args;
+ vec<tree, va_gc> *ws_args;
/* The code for the omp directive of this region. */
enum gimple_code type;
@@ -402,7 +402,7 @@ extern bool gimple_duplicate_sese_region (edge, edge, basic_block *, unsigned,
extern bool gimple_duplicate_sese_tail (edge, edge, basic_block *, unsigned,
basic_block *);
extern void gather_blocks_in_sese_region (basic_block entry, basic_block exit,
- VEC(basic_block,heap) **bbs_p);
+ vec<basic_block> *bbs_p);
extern void add_phi_args_after_copy_bb (basic_block);
extern void add_phi_args_after_copy (basic_block *, unsigned, edge);
extern bool gimple_purge_dead_eh_edges (basic_block);
@@ -480,17 +480,15 @@ struct _edge_var_map {
};
typedef struct _edge_var_map edge_var_map;
-DEF_VEC_O(edge_var_map);
-DEF_VEC_ALLOC_O(edge_var_map, heap);
/* A vector of var maps. */
-typedef VEC(edge_var_map, heap) *edge_var_map_vector;
+typedef vec<edge_var_map> edge_var_map_vector;
extern void init_tree_ssa (struct function *);
extern void redirect_edge_var_map_add (edge, tree, tree, source_location);
extern void redirect_edge_var_map_clear (edge);
extern void redirect_edge_var_map_dup (edge, edge);
-extern edge_var_map_vector redirect_edge_var_map_vector (edge);
+extern edge_var_map_vector *redirect_edge_var_map_vector (edge);
extern void redirect_edge_var_map_destroy (void);
extern edge ssa_redirect_edge (edge, basic_block);
@@ -654,7 +652,7 @@ basic_block ip_end_pos (struct loop *);
basic_block ip_normal_pos (struct loop *);
bool gimple_duplicate_loop_to_header_edge (struct loop *, edge,
unsigned int, sbitmap,
- edge, VEC (edge, heap) **,
+ edge, vec<edge> *,
int);
struct loop *slpeel_tree_duplicate_loop_to_edge_cfg (struct loop *, edge);
void rename_variables_in_loop (struct loop *);
@@ -680,15 +678,15 @@ void mark_virtual_phi_result_for_renaming (gimple);
/* In tree-ssa-threadedge.c */
extern void threadedge_initialize_values (void);
extern void threadedge_finalize_values (void);
-extern VEC(tree,heap) *ssa_name_values;
+extern vec<tree> ssa_name_values;
#define SSA_NAME_VALUE(x) \
- (SSA_NAME_VERSION(x) < VEC_length(tree, ssa_name_values) \
- ? VEC_index(tree, ssa_name_values, SSA_NAME_VERSION(x)) \
+ (SSA_NAME_VERSION(x) < ssa_name_values.length () \
+ ? ssa_name_values[SSA_NAME_VERSION(x)] \
: NULL_TREE)
extern void set_ssa_name_value (tree, tree);
extern bool potentially_threadable_block (basic_block);
extern void thread_across_edge (gimple, edge, bool,
- VEC(tree, heap) **, tree (*) (gimple, gimple));
+ vec<tree> *, tree (*) (gimple, gimple));
extern void propagate_threaded_block_debug_into (basic_block, basic_block);
/* In tree-ssa-loop-im.c */
diff --git a/gcc/tree-if-conv.c b/gcc/tree-if-conv.c
index 0afbcde91c2..b7e28048398 100644
--- a/gcc/tree-if-conv.c
+++ b/gcc/tree-if-conv.c
@@ -519,13 +519,13 @@ struct ifc_dr {
static bool
memrefs_read_or_written_unconditionally (gimple stmt,
- VEC (data_reference_p, heap) *drs)
+ vec<data_reference_p> drs)
{
int i, j;
data_reference_p a, b;
tree ca = bb_predicate (gimple_bb (stmt));
- for (i = 0; VEC_iterate (data_reference_p, drs, i, a); i++)
+ for (i = 0; drs.iterate (i, &a); i++)
if (DR_STMT (a) == stmt)
{
bool found = false;
@@ -537,7 +537,7 @@ memrefs_read_or_written_unconditionally (gimple stmt,
if (x == 1)
continue;
- for (j = 0; VEC_iterate (data_reference_p, drs, j, b); j++)
+ for (j = 0; drs.iterate (j, &b); j++)
{
tree ref_base_a = DR_REF (a);
tree ref_base_b = DR_REF (b);
@@ -591,13 +591,13 @@ memrefs_read_or_written_unconditionally (gimple stmt,
static bool
write_memrefs_written_at_least_once (gimple stmt,
- VEC (data_reference_p, heap) *drs)
+ vec<data_reference_p> drs)
{
int i, j;
data_reference_p a, b;
tree ca = bb_predicate (gimple_bb (stmt));
- for (i = 0; VEC_iterate (data_reference_p, drs, i, a); i++)
+ for (i = 0; drs.iterate (i, &a); i++)
if (DR_STMT (a) == stmt
&& DR_IS_WRITE (a))
{
@@ -610,7 +610,7 @@ write_memrefs_written_at_least_once (gimple stmt,
if (x == 1)
continue;
- for (j = 0; VEC_iterate (data_reference_p, drs, j, b); j++)
+ for (j = 0; drs.iterate (j, &b); j++)
if (DR_STMT (b) != stmt
&& DR_IS_WRITE (b)
&& same_data_refs_base_objects (a, b))
@@ -658,7 +658,7 @@ write_memrefs_written_at_least_once (gimple stmt,
iteration unconditionally. */
static bool
-ifcvt_memrefs_wont_trap (gimple stmt, VEC (data_reference_p, heap) *refs)
+ifcvt_memrefs_wont_trap (gimple stmt, vec<data_reference_p> refs)
{
return write_memrefs_written_at_least_once (stmt, refs)
&& memrefs_read_or_written_unconditionally (stmt, refs);
@@ -669,7 +669,7 @@ ifcvt_memrefs_wont_trap (gimple stmt, VEC (data_reference_p, heap) *refs)
not trap in the innermost loop containing STMT. */
static bool
-ifcvt_could_trap_p (gimple stmt, VEC (data_reference_p, heap) *refs)
+ifcvt_could_trap_p (gimple stmt, vec<data_reference_p> refs)
{
if (gimple_vuse (stmt)
&& !gimple_could_trap_p_1 (stmt, false, false)
@@ -688,7 +688,7 @@ ifcvt_could_trap_p (gimple stmt, VEC (data_reference_p, heap) *refs)
static bool
if_convertible_gimple_assign_stmt_p (gimple stmt,
- VEC (data_reference_p, heap) *refs)
+ vec<data_reference_p> refs)
{
tree lhs = gimple_assign_lhs (stmt);
basic_block bb;
@@ -756,7 +756,7 @@ if_convertible_gimple_assign_stmt_p (gimple stmt,
- it is a GIMPLE_LABEL or a GIMPLE_COND. */
static bool
-if_convertible_stmt_p (gimple stmt, VEC (data_reference_p, heap) *refs)
+if_convertible_stmt_p (gimple stmt, vec<data_reference_p> refs)
{
switch (gimple_code (stmt))
{
@@ -1070,9 +1070,9 @@ predicate_bbs (loop_p loop)
static bool
if_convertible_loop_p_1 (struct loop *loop,
- VEC (loop_p, heap) **loop_nest,
- VEC (data_reference_p, heap) **refs,
- VEC (ddr_p, heap) **ddrs)
+ vec<loop_p> *loop_nest,
+ vec<data_reference_p> *refs,
+ vec<ddr_p> *ddrs)
{
bool res;
unsigned int i;
@@ -1115,7 +1115,7 @@ if_convertible_loop_p_1 (struct loop *loop,
{
data_reference_p dr;
- for (i = 0; VEC_iterate (data_reference_p, *refs, i, dr); i++)
+ for (i = 0; refs->iterate (i, &dr); i++)
{
dr->aux = XNEW (struct ifc_dr);
DR_WRITTEN_AT_LEAST_ONCE (dr) = -1;
@@ -1159,9 +1159,9 @@ if_convertible_loop_p (struct loop *loop)
edge e;
edge_iterator ei;
bool res = false;
- VEC (data_reference_p, heap) *refs;
- VEC (ddr_p, heap) *ddrs;
- VEC (loop_p, heap) *loop_nest;
+ vec<data_reference_p> refs;
+ vec<ddr_p> ddrs;
+ vec<loop_p> loop_nest;
/* Handle only innermost loop. */
if (!loop || loop->inner)
@@ -1193,9 +1193,9 @@ if_convertible_loop_p (struct loop *loop)
if (loop_exit_edge_p (loop, e))
return false;
- refs = VEC_alloc (data_reference_p, heap, 5);
- ddrs = VEC_alloc (ddr_p, heap, 25);
- loop_nest = VEC_alloc (loop_p, heap, 3);
+ refs.create (5);
+ ddrs.create (25);
+ loop_nest.create (3);
res = if_convertible_loop_p_1 (loop, &loop_nest, &refs, &ddrs);
if (flag_tree_loop_if_convert_stores)
@@ -1203,11 +1203,11 @@ if_convertible_loop_p (struct loop *loop)
data_reference_p dr;
unsigned int i;
- for (i = 0; VEC_iterate (data_reference_p, refs, i, dr); i++)
+ for (i = 0; refs.iterate (i, &dr); i++)
free (dr->aux);
}
- VEC_free (loop_p, heap, loop_nest);
+ loop_nest.release ();
free_data_refs (refs);
free_dependence_relations (ddrs);
return res;
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index 69a664dbfcc..a06d7b92a3c 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -536,7 +536,8 @@ can_be_nonlocal (tree decl, copy_body_data *id)
}
static tree
-remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
+remap_decls (tree decls, vec<tree, va_gc> *nonlocalized_list,
+ copy_body_data *id)
{
tree old_var;
tree new_decls = NULL_TREE;
@@ -556,7 +557,7 @@ remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
&& !DECL_IGNORED_P (old_var)
&& nonlocalized_list)
- VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
+ vec_safe_push (nonlocalized_list, old_var);
continue;
}
@@ -574,7 +575,7 @@ remap_decls (tree decls, VEC(tree,gc) **nonlocalized_list, copy_body_data *id)
if ((!optimize || debug_info_level > DINFO_LEVEL_TERSE)
&& !DECL_IGNORED_P (old_var)
&& nonlocalized_list)
- VEC_safe_push (tree, gc, *nonlocalized_list, old_var);
+ vec_safe_push (nonlocalized_list, old_var);
}
else
{
@@ -616,12 +617,12 @@ remap_block (tree *block, copy_body_data *id)
BLOCK_ABSTRACT_ORIGIN (new_block) = old_block;
BLOCK_SOURCE_LOCATION (new_block) = BLOCK_SOURCE_LOCATION (old_block);
BLOCK_NONLOCALIZED_VARS (new_block)
- = VEC_copy (tree, gc, BLOCK_NONLOCALIZED_VARS (old_block));
+ = vec_safe_copy (BLOCK_NONLOCALIZED_VARS (old_block));
*block = new_block;
/* Remap its variables. */
BLOCK_VARS (new_block) = remap_decls (BLOCK_VARS (old_block),
- &BLOCK_NONLOCALIZED_VARS (new_block),
+ BLOCK_NONLOCALIZED_VARS (new_block),
id);
if (id->transform_lang_insert_block)
@@ -1385,7 +1386,7 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
copy = gimple_build_debug_bind (gimple_debug_bind_get_var (stmt),
gimple_debug_bind_get_value (stmt),
stmt);
- VEC_safe_push (gimple, heap, id->debug_stmts, copy);
+ id->debug_stmts.safe_push (copy);
return copy;
}
if (gimple_debug_source_bind_p (stmt))
@@ -1393,7 +1394,7 @@ remap_gimple_stmt (gimple stmt, copy_body_data *id)
copy = gimple_build_debug_source_bind
(gimple_debug_source_bind_get_var (stmt),
gimple_debug_source_bind_get_value (stmt), stmt);
- VEC_safe_push (gimple, heap, id->debug_stmts, copy);
+ id->debug_stmts.safe_push (copy);
return copy;
}
@@ -1585,7 +1586,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
all arguments corresponding to ... in the caller. */
tree p;
gimple new_call;
- VEC(tree, heap) *argarray;
+ vec<tree> argarray;
size_t nargs = gimple_call_num_args (id->gimple_call);
size_t n;
@@ -1594,16 +1595,16 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
/* Create the new array of arguments. */
n = nargs + gimple_call_num_args (stmt);
- argarray = VEC_alloc (tree, heap, n);
- VEC_safe_grow (tree, heap, argarray, n);
+ argarray.create (n);
+ argarray.safe_grow_cleared (n);
/* Copy all the arguments before '...' */
- memcpy (VEC_address (tree, argarray),
+ memcpy (argarray.address (),
gimple_call_arg_ptr (stmt, 0),
gimple_call_num_args (stmt) * sizeof (tree));
/* Append the arguments passed in '...' */
- memcpy (VEC_address(tree, argarray) + gimple_call_num_args (stmt),
+ memcpy (argarray.address () + gimple_call_num_args (stmt),
gimple_call_arg_ptr (id->gimple_call, 0)
+ (gimple_call_num_args (id->gimple_call) - nargs),
nargs * sizeof (tree));
@@ -1611,7 +1612,7 @@ copy_bb (copy_body_data *id, basic_block bb, int frequency_scale,
new_call = gimple_build_call_vec (gimple_call_fn (stmt),
argarray);
- VEC_free (tree, heap, argarray);
+ argarray.release ();
/* Copy all GIMPLE_CALL flags, location and block, except
GF_CALL_VA_ARG_PACK. */
@@ -2178,7 +2179,7 @@ maybe_move_debug_stmts_to_successors (copy_body_data *id, basic_block new_bb)
else
gcc_unreachable ();
gsi_insert_before (&dsi, new_stmt, GSI_SAME_STMT);
- VEC_safe_push (gimple, heap, id->debug_stmts, new_stmt);
+ id->debug_stmts.safe_push (new_stmt);
gsi_prev (&ssi);
}
}
@@ -2383,16 +2384,15 @@ copy_debug_stmt (gimple stmt, copy_body_data *id)
&& TREE_CODE (t) == PARM_DECL
&& id->gimple_call)
{
- VEC(tree, gc) **debug_args = decl_debug_args_lookup (id->src_fn);
+ vec<tree, va_gc> **debug_args = decl_debug_args_lookup (id->src_fn);
unsigned int i;
if (debug_args != NULL)
{
- for (i = 0; i < VEC_length (tree, *debug_args); i += 2)
- if (VEC_index (tree, *debug_args, i) == DECL_ORIGIN (t)
- && TREE_CODE (VEC_index (tree, *debug_args, i + 1))
- == DEBUG_EXPR_DECL)
+ for (i = 0; i < vec_safe_length (*debug_args); i += 2)
+ if ((**debug_args)[i] == DECL_ORIGIN (t)
+ && TREE_CODE ((**debug_args)[i + 1]) == DEBUG_EXPR_DECL)
{
- t = VEC_index (tree, *debug_args, i + 1);
+ t = (**debug_args)[i + 1];
stmt->gsbase.subcode = GIMPLE_DEBUG_BIND;
gimple_debug_bind_set_value (stmt, t);
break;
@@ -2417,13 +2417,13 @@ copy_debug_stmts (copy_body_data *id)
size_t i;
gimple stmt;
- if (!id->debug_stmts)
+ if (!id->debug_stmts.exists ())
return;
- FOR_EACH_VEC_ELT (gimple, id->debug_stmts, i, stmt)
+ FOR_EACH_VEC_ELT (id->debug_stmts, i, stmt)
copy_debug_stmt (stmt, id);
- VEC_free (gimple, heap, id->debug_stmts);
+ id->debug_stmts.release ();
}
/* Make a copy of the body of SRC_FN so that it can be inserted inline in
@@ -4318,7 +4318,7 @@ optimize_inline_calls (tree fn)
fold_marked_statements (last, id.statements_to_fold);
pointer_set_destroy (id.statements_to_fold);
- gcc_assert (!id.debug_stmts);
+ gcc_assert (!id.debug_stmts.exists ());
/* If we didn't inline into the function there is nothing to do. */
if (!inlined_p)
@@ -4398,8 +4398,7 @@ copy_tree_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
if (flag_mudflap && mf_marked_p (*tp))
mf_mark (new_tree);
- CONSTRUCTOR_ELTS (new_tree) = VEC_copy (constructor_elt, gc,
- CONSTRUCTOR_ELTS (*tp));
+ CONSTRUCTOR_ELTS (new_tree) = vec_safe_copy (CONSTRUCTOR_ELTS (*tp));
*tp = new_tree;
}
else if (code == STATEMENT_LIST)
@@ -4681,7 +4680,8 @@ replace_locals_stmt (gimple_stmt_iterator *gsip,
/* This will remap a lot of the same decls again, but this should be
harmless. */
if (gimple_bind_vars (stmt))
- gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt), NULL, id));
+ gimple_bind_set_vars (stmt, remap_decls (gimple_bind_vars (stmt),
+ NULL, id));
}
/* Keep iterating. */
@@ -5048,10 +5048,10 @@ update_clone_info (copy_body_data * id)
if (node->clone.tree_map)
{
unsigned int i;
- for (i = 0; i < VEC_length (ipa_replace_map_p, node->clone.tree_map); i++)
+ for (i = 0; i < vec_safe_length (node->clone.tree_map); i++)
{
struct ipa_replace_map *replace_info;
- replace_info = VEC_index (ipa_replace_map_p, node->clone.tree_map, i);
+ replace_info = (*node->clone.tree_map)[i];
walk_tree (&replace_info->old_tree, copy_tree_body_r, id, NULL);
walk_tree (&replace_info->new_tree, copy_tree_body_r, id, NULL);
}
@@ -5087,7 +5087,7 @@ update_clone_info (copy_body_data * id)
*/
void
tree_function_versioning (tree old_decl, tree new_decl,
- VEC(ipa_replace_map_p,gc)* tree_map,
+ vec<ipa_replace_map_p, va_gc> *tree_map,
bool update_clones, bitmap args_to_skip,
bool skip_return, bitmap blocks_to_copy,
basic_block new_entry)
@@ -5099,7 +5099,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
unsigned i;
struct ipa_replace_map *replace_info;
basic_block old_entry_block, bb;
- VEC (gimple, heap) *init_stmts = VEC_alloc (gimple, heap, 10);
+ vec<gimple> init_stmts;
+ init_stmts.create (10);
tree vars = NULL_TREE;
gcc_assert (TREE_CODE (old_decl) == FUNCTION_DECL
@@ -5114,14 +5115,14 @@ tree_function_versioning (tree old_decl, tree new_decl,
/* Copy over debug args. */
if (DECL_HAS_DEBUG_ARGS_P (old_decl))
{
- VEC(tree, gc) **new_debug_args, **old_debug_args;
+ vec<tree, va_gc> **new_debug_args, **old_debug_args;
gcc_checking_assert (decl_debug_args_lookup (new_decl) == NULL);
DECL_HAS_DEBUG_ARGS_P (new_decl) = 0;
old_debug_args = decl_debug_args_lookup (old_decl);
if (old_debug_args)
{
new_debug_args = decl_debug_args_insert (new_decl);
- *new_debug_args = VEC_copy (tree, gc, *old_debug_args);
+ *new_debug_args = vec_safe_copy (*old_debug_args);
}
}
@@ -5148,19 +5149,17 @@ tree_function_versioning (tree old_decl, tree new_decl,
id.src_node = old_version_node;
id.dst_node = new_version_node;
id.src_cfun = DECL_STRUCT_FUNCTION (old_decl);
- if (id.src_node->ipa_transforms_to_apply)
+ if (id.src_node->ipa_transforms_to_apply.exists ())
{
- VEC(ipa_opt_pass,heap) * old_transforms_to_apply = id.dst_node->ipa_transforms_to_apply;
+ vec<ipa_opt_pass> old_transforms_to_apply
+ = id.dst_node->ipa_transforms_to_apply;
unsigned int i;
- id.dst_node->ipa_transforms_to_apply = VEC_copy (ipa_opt_pass, heap,
- id.src_node->ipa_transforms_to_apply);
- for (i = 0; i < VEC_length (ipa_opt_pass, old_transforms_to_apply); i++)
- VEC_safe_push (ipa_opt_pass, heap, id.dst_node->ipa_transforms_to_apply,
- VEC_index (ipa_opt_pass,
- old_transforms_to_apply,
- i));
- VEC_free (ipa_opt_pass, heap, old_transforms_to_apply);
+ id.dst_node->ipa_transforms_to_apply
+ = id.src_node->ipa_transforms_to_apply.copy ();
+ for (i = 0; i < old_transforms_to_apply.length (); i++)
+ id.dst_node->ipa_transforms_to_apply.safe_push (old_transforms_to_apply[i]);
+ old_transforms_to_apply.release ();
}
id.copy_decl = copy_decl_no_change;
@@ -5186,10 +5185,10 @@ tree_function_versioning (tree old_decl, tree new_decl,
/* If there's a tree_map, prepare for substitution. */
if (tree_map)
- for (i = 0; i < VEC_length (ipa_replace_map_p, tree_map); i++)
+ for (i = 0; i < tree_map->length (); i++)
{
gimple init;
- replace_info = VEC_index (ipa_replace_map_p, tree_map, i);
+ replace_info = (*tree_map)[i];
if (replace_info->replace_p)
{
tree op = replace_info->new_tree;
@@ -5214,7 +5213,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
NULL,
&vars);
if (init)
- VEC_safe_push (gimple, heap, init_stmts, init);
+ init_stmts.safe_push (init);
}
}
/* Copy the function's arguments. */
@@ -5228,7 +5227,7 @@ tree_function_versioning (tree old_decl, tree new_decl,
declare_inline_vars (DECL_INITIAL (new_decl), vars);
- if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))
+ if (!vec_safe_is_empty (DECL_STRUCT_FUNCTION (old_decl)->local_decls))
/* Add local vars. */
add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id);
@@ -5270,8 +5269,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
debug stmts doesn't affect BB count, which may in the end cause
codegen differences. */
bb = split_edge (single_succ_edge (ENTRY_BLOCK_PTR));
- while (VEC_length (gimple, init_stmts))
- insert_init_stmt (&id, bb, VEC_pop (gimple, init_stmts));
+ while (init_stmts.length ())
+ insert_init_stmt (&id, bb, init_stmts.pop ());
update_clone_info (&id);
/* Remap the nonlocal_goto_save_area, if any. */
@@ -5326,8 +5325,8 @@ tree_function_versioning (tree old_decl, tree new_decl,
free_dominance_info (CDI_DOMINATORS);
free_dominance_info (CDI_POST_DOMINATORS);
- gcc_assert (!id.debug_stmts);
- VEC_free (gimple, heap, init_stmts);
+ gcc_assert (!id.debug_stmts.exists ());
+ init_stmts.release ();
pop_cfun ();
return;
}
diff --git a/gcc/tree-inline.h b/gcc/tree-inline.h
index b156e82a517..f6585fbf2b5 100644
--- a/gcc/tree-inline.h
+++ b/gcc/tree-inline.h
@@ -22,8 +22,6 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_TREE_INLINE_H
#define GCC_TREE_INLINE_H
-#include "vecir.h" /* For VEC(gimple,heap). */
-
struct cgraph_edge;
/* Indicate the desired behavior wrt call graph edges. We can either
@@ -119,7 +117,7 @@ typedef struct copy_body_data
basic_block entry_bb;
/* Debug statements that need processing. */
- VEC(gimple,heap) *debug_stmts;
+ vec<gimple> debug_stmts;
/* A map from local declarations in the inlined function to
equivalents in the function into which it is being inlined, where
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index c5789663f8a..2a389895d3d 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -39,7 +39,6 @@ along with GCC; see the file COPYING3. If not see
#include "cfgloop.h"
#include "domwalk.h"
#include "params.h"
-#include "vecprim.h"
#include "diagnostic-core.h"
@@ -84,7 +83,7 @@ typedef struct def_blocks_d *def_blocks_p;
- A NULL node at the top entry is used to mark the last slot
associated with the current block. */
-static VEC(tree,heap) *block_defs_stack;
+static vec<tree> block_defs_stack;
/* Set of existing SSA names being replaced by update_ssa. */
@@ -102,10 +101,10 @@ sbitmap interesting_blocks;
released after we finish updating the SSA web. */
static bitmap names_to_release;
-/* VEC of VECs of PHIs to rewrite in a basic block. Element I corresponds
+/* vec of vec of PHIs to rewrite in a basic block. Element I corresponds
the to basic block with index I. Allocated once per compilation, *not*
released between different functions. */
-static VEC(gimple_vec, heap) *phis_to_rewrite;
+static vec<gimple_vec> phis_to_rewrite;
/* The bitmap of non-NULL elements of PHIS_TO_REWRITE. */
static bitmap blocks_with_phis_to_rewrite;
@@ -161,8 +160,6 @@ struct var_info_d
/* The information associated with decls. */
typedef struct var_info_d *var_info_p;
-DEF_VEC_P(var_info_p);
-DEF_VEC_ALLOC_P(var_info_p,heap);
/* Each entry in VAR_INFOS contains an element of type STRUCT
VAR_INFO_D. */
@@ -186,10 +183,8 @@ struct ssa_name_info
/* The information associated with names. */
typedef struct ssa_name_info *ssa_name_info_p;
-DEF_VEC_P (ssa_name_info_p);
-DEF_VEC_ALLOC_P (ssa_name_info_p, heap);
-static VEC(ssa_name_info_p, heap) *info_for_ssa_name;
+static vec<ssa_name_info_p> info_for_ssa_name;
static unsigned current_info_for_ssa_name_age;
static bitmap_obstack update_ssa_obstack;
@@ -235,7 +230,7 @@ extern void debug_currdefs (void);
/* The set of symbols we ought to re-write into SSA form in update_ssa. */
static bitmap symbols_to_rename_set;
-static VEC(tree,heap) *symbols_to_rename;
+static vec<tree> symbols_to_rename;
/* Mark SYM for renaming. */
@@ -245,7 +240,7 @@ mark_for_renaming (tree sym)
if (!symbols_to_rename_set)
symbols_to_rename_set = BITMAP_ALLOC (NULL);
if (bitmap_set_bit (symbols_to_rename_set, DECL_UID (sym)))
- VEC_safe_push (tree, heap, symbols_to_rename, sym);
+ symbols_to_rename.safe_push (sym);
}
/* Return true if SYM is marked for renaming. */
@@ -309,22 +304,21 @@ static inline ssa_name_info_p
get_ssa_name_ann (tree name)
{
unsigned ver = SSA_NAME_VERSION (name);
- unsigned len = VEC_length (ssa_name_info_p, info_for_ssa_name);
+ unsigned len = info_for_ssa_name.length ();
struct ssa_name_info *info;
/* Re-allocate the vector at most once per update/into-SSA. */
if (ver >= len)
- VEC_safe_grow_cleared (ssa_name_info_p, heap,
- info_for_ssa_name, num_ssa_names);
+ info_for_ssa_name.safe_grow_cleared (num_ssa_names);
/* But allocate infos lazily. */
- info = VEC_index (ssa_name_info_p, info_for_ssa_name, ver);
+ info = info_for_ssa_name[ver];
if (!info)
{
info = XCNEW (struct ssa_name_info);
info->age = current_info_for_ssa_name_age;
info->info.need_phi_state = NEED_PHI_STATE_UNKNOWN;
- VEC_replace (ssa_name_info_p, info_for_ssa_name, ver, info);
+ info_for_ssa_name[ver] = info;
}
if (info->age < current_info_for_ssa_name_age)
@@ -734,7 +728,7 @@ find_dfsnum_interval (struct dom_dfsnum *defs, unsigned n, unsigned s)
static void
prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
{
- VEC(int, heap) *worklist;
+ vec<int> worklist;
bitmap_iterator bi;
unsigned i, b, p, u, top;
bitmap live_phis;
@@ -806,8 +800,8 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
dfs_out numbers, increase the dfs number by one (so that it corresponds
to the start of the following interval, not to the end of the current
one). We use WORKLIST as a stack. */
- worklist = VEC_alloc (int, heap, n_defs + 1);
- VEC_quick_push (int, worklist, 1);
+ worklist.create (n_defs + 1);
+ worklist.quick_push (1);
top = 1;
n_defs = 1;
for (i = 1; i < adef; i++)
@@ -817,8 +811,8 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
{
/* This is a closing element. Interval corresponding to the top
of the stack after removing it follows. */
- VEC_pop (int, worklist);
- top = VEC_index (int, worklist, VEC_length (int, worklist) - 1);
+ worklist.pop ();
+ top = worklist[worklist.length () - 1];
defs[n_defs].bb_index = top;
defs[n_defs].dfs_num = defs[i].dfs_num + 1;
}
@@ -828,7 +822,7 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
it to the correct position. */
defs[n_defs].bb_index = defs[i].bb_index;
defs[n_defs].dfs_num = defs[i].dfs_num;
- VEC_quick_push (int, worklist, b);
+ worklist.quick_push (b);
top = b;
}
@@ -839,19 +833,19 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
else
n_defs++;
}
- VEC_pop (int, worklist);
- gcc_assert (VEC_empty (int, worklist));
+ worklist.pop ();
+ gcc_assert (worklist.is_empty ());
/* Now process the uses. */
live_phis = BITMAP_ALLOC (NULL);
EXECUTE_IF_SET_IN_BITMAP (uses, 0, i, bi)
{
- VEC_safe_push (int, heap, worklist, i);
+ worklist.safe_push (i);
}
- while (!VEC_empty (int, worklist))
+ while (!worklist.is_empty ())
{
- b = VEC_pop (int, worklist);
+ b = worklist.pop ();
if (b == ENTRY_BLOCK)
continue;
@@ -889,11 +883,11 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
continue;
bitmap_set_bit (uses, u);
- VEC_safe_push (int, heap, worklist, u);
+ worklist.safe_push (u);
}
}
- VEC_free (int, heap, worklist);
+ worklist.release ();
bitmap_copy (phis, live_phis);
BITMAP_FREE (live_phis);
free (defs);
@@ -932,15 +926,14 @@ mark_phi_for_rewrite (basic_block bb, gimple phi)
bitmap_set_bit (blocks_with_phis_to_rewrite, idx);
n = (unsigned) last_basic_block + 1;
- if (VEC_length (gimple_vec, phis_to_rewrite) < n)
- VEC_safe_grow_cleared (gimple_vec, heap, phis_to_rewrite, n);
+ if (phis_to_rewrite.length () < n)
+ phis_to_rewrite.safe_grow_cleared (n);
- phis = VEC_index (gimple_vec, phis_to_rewrite, idx);
- if (!phis)
- phis = VEC_alloc (gimple, heap, 10);
+ phis = phis_to_rewrite[idx];
+ phis.reserve (10);
- VEC_safe_push (gimple, heap, phis, phi);
- VEC_replace (gimple_vec, phis_to_rewrite, idx, phis);
+ phis.safe_push (phi);
+ phis_to_rewrite[idx] = phis;
}
/* Insert PHI nodes for variable VAR using the iterated dominance
@@ -1049,27 +1042,27 @@ insert_phi_nodes (bitmap_head *dfs)
htab_iterator hi;
unsigned i;
var_info_p info;
- VEC(var_info_p,heap) *vars;
+ vec<var_info_p> vars;
timevar_push (TV_TREE_INSERT_PHI_NODES);
- vars = VEC_alloc (var_info_p, heap, htab_elements (var_infos));
+ vars.create (htab_elements (var_infos));
FOR_EACH_HTAB_ELEMENT (var_infos, info, var_info_p, hi)
if (info->info.need_phi_state != NEED_PHI_STATE_NO)
- VEC_quick_push (var_info_p, vars, info);
+ vars.quick_push (info);
/* Do two stages to avoid code generation differences for UID
differences but no UID ordering differences. */
- VEC_qsort (var_info_p, vars, insert_phi_nodes_compare_var_infos);
+ vars.qsort (insert_phi_nodes_compare_var_infos);
- FOR_EACH_VEC_ELT (var_info_p, vars, i, info)
+ FOR_EACH_VEC_ELT (vars, i, info)
{
bitmap idf = compute_idf (info->info.def_blocks.def_blocks, dfs);
insert_phi_nodes_for (info->var, idf, false);
BITMAP_FREE (idf);
}
- VEC_free(var_info_p, heap, vars);
+ vars.release ();
timevar_pop (TV_TREE_INSERT_PHI_NODES);
}
@@ -1105,7 +1098,7 @@ register_new_def (tree def, tree sym)
in the stack so that we know which symbol is being defined by
this SSA name when we unwind the stack. */
if (currdef && !is_gimple_reg (sym))
- VEC_safe_push (tree, heap, block_defs_stack, sym);
+ block_defs_stack.safe_push (sym);
/* Push the current reaching definition into BLOCK_DEFS_STACK. This
stack is later used by the dominator tree callbacks to restore
@@ -1113,7 +1106,7 @@ register_new_def (tree def, tree sym)
block after a recursive visit to all its immediately dominated
blocks. If there is no current reaching definition, then just
record the underlying _DECL node. */
- VEC_safe_push (tree, heap, block_defs_stack, currdef ? currdef : sym);
+ block_defs_stack.safe_push (currdef ? currdef : sym);
/* Set the current reaching definition for SYM to be DEF. */
info->current_def = def;
@@ -1388,7 +1381,7 @@ rewrite_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
fprintf (dump_file, "\n\nRenaming block #%d\n\n", bb->index);
/* Mark the unwind point for this block. */
- VEC_safe_push (tree, heap, block_defs_stack, NULL_TREE);
+ block_defs_stack.safe_push (NULL_TREE);
/* Step 1. Register new definitions for every PHI node in the block.
Conceptually, all the PHI nodes are executed in parallel and each PHI
@@ -1423,9 +1416,9 @@ rewrite_leave_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
basic_block bb ATTRIBUTE_UNUSED)
{
/* Restore CURRDEFS to its original state. */
- while (VEC_length (tree, block_defs_stack) > 0)
+ while (block_defs_stack.length () > 0)
{
- tree tmp = VEC_pop (tree, block_defs_stack);
+ tree tmp = block_defs_stack.pop ();
tree saved_def, var;
if (tmp == NULL_TREE)
@@ -1443,7 +1436,7 @@ rewrite_leave_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
saved_def = tmp;
var = SSA_NAME_VAR (saved_def);
if (!is_gimple_reg (var))
- var = VEC_pop (tree, block_defs_stack);
+ var = block_defs_stack.pop ();
}
else
{
@@ -1511,11 +1504,11 @@ dump_defs_stack (FILE *file, int n)
i = 1;
fprintf (file, "Level %d (current level)\n", i);
- for (j = (int) VEC_length (tree, block_defs_stack) - 1; j >= 0; j--)
+ for (j = (int) block_defs_stack.length () - 1; j >= 0; j--)
{
tree name, var;
- name = VEC_index (tree, block_defs_stack, j);
+ name = block_defs_stack[j];
if (name == NULL_TREE)
{
i++;
@@ -1536,7 +1529,7 @@ dump_defs_stack (FILE *file, int n)
if (!is_gimple_reg (var))
{
j--;
- var = VEC_index (tree, block_defs_stack, j);
+ var = block_defs_stack[j];
}
}
@@ -1572,11 +1565,11 @@ dump_currdefs (FILE *file)
unsigned i;
tree var;
- if (VEC_empty (tree, symbols_to_rename))
+ if (symbols_to_rename.is_empty ())
return;
fprintf (file, "\n\nCurrent reaching definitions\n\n");
- FOR_EACH_VEC_ELT (tree, symbols_to_rename, i, var)
+ FOR_EACH_VEC_ELT (symbols_to_rename, i, var)
{
common_info_p info = get_common_info (var);
fprintf (file, "CURRDEF (");
@@ -1732,9 +1725,9 @@ register_new_update_single (tree new_name, tree old_name)
restore the reaching definitions for all the variables
defined in the block after a recursive visit to all its
immediately dominated blocks. */
- VEC_reserve (tree, heap, block_defs_stack, 2);
- VEC_quick_push (tree, block_defs_stack, currdef);
- VEC_quick_push (tree, block_defs_stack, old_name);
+ block_defs_stack.reserve (2);
+ block_defs_stack.quick_push (currdef);
+ block_defs_stack.quick_push (old_name);
/* Set the current reaching definition for OLD_NAME to be
NEW_NAME. */
@@ -1988,8 +1981,8 @@ rewrite_update_phi_arguments (basic_block bb)
if (!bitmap_bit_p (blocks_with_phis_to_rewrite, e->dest->index))
continue;
- phis = VEC_index (gimple_vec, phis_to_rewrite, e->dest->index);
- FOR_EACH_VEC_ELT (gimple, phis, i, phi)
+ phis = phis_to_rewrite[e->dest->index];
+ FOR_EACH_VEC_ELT (phis, i, phi)
{
tree arg, lhs_sym, reaching_def = NULL;
use_operand_p arg_p;
@@ -2068,7 +2061,7 @@ rewrite_update_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
bb->index);
/* Mark the unwind point for this block. */
- VEC_safe_push (tree, heap, block_defs_stack, NULL_TREE);
+ block_defs_stack.safe_push (NULL_TREE);
if (!bitmap_bit_p (blocks_to_update, bb->index))
return;
@@ -2135,9 +2128,9 @@ static void
rewrite_update_leave_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
basic_block bb ATTRIBUTE_UNUSED)
{
- while (VEC_length (tree, block_defs_stack) > 0)
+ while (block_defs_stack.length () > 0)
{
- tree var = VEC_pop (tree, block_defs_stack);
+ tree var = block_defs_stack.pop ();
tree saved_def;
/* NULL indicates the unwind stop point for this block (see
@@ -2145,7 +2138,7 @@ rewrite_update_leave_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
if (var == NULL)
return;
- saved_def = VEC_pop (tree, block_defs_stack);
+ saved_def = block_defs_stack.pop ();
get_common_info (var)->current_def = saved_def;
}
}
@@ -2191,7 +2184,7 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what)
else
gcc_unreachable ();
- block_defs_stack = VEC_alloc (tree, heap, 10);
+ block_defs_stack.create (10);
/* Initialize the dominator walker. */
init_walk_dominator_tree (&walk_data);
@@ -2211,7 +2204,7 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what)
dump_tree_ssa_stats (dump_file);
}
- VEC_free (tree, heap, block_defs_stack);
+ block_defs_stack.release ();
timevar_pop (TV_TREE_SSA_REWRITE_BLOCKS);
}
@@ -2288,7 +2281,7 @@ init_ssa_renamer (void)
/* Allocate memory for the DEF_BLOCKS hash table. */
gcc_assert (var_infos == NULL);
- var_infos = htab_create (VEC_length (tree, cfun->local_decls),
+ var_infos = htab_create (vec_safe_length (cfun->local_decls),
var_info_hash, var_info_eq, free);
bitmap_obstack_initialize (&update_ssa_obstack);
@@ -2808,7 +2801,7 @@ delete_update_ssa (void)
BITMAP_FREE (symbols_to_rename_set);
symbols_to_rename_set = NULL;
- VEC_free (tree, heap, symbols_to_rename);
+ symbols_to_rename.release ();
if (names_to_release)
{
@@ -2824,10 +2817,9 @@ delete_update_ssa (void)
if (blocks_with_phis_to_rewrite)
EXECUTE_IF_SET_IN_BITMAP (blocks_with_phis_to_rewrite, 0, i, bi)
{
- gimple_vec phis = VEC_index (gimple_vec, phis_to_rewrite, i);
-
- VEC_free (gimple, heap, phis);
- VEC_replace (gimple_vec, phis_to_rewrite, i, NULL);
+ gimple_vec phis = phis_to_rewrite[i];
+ phis.release ();
+ phis_to_rewrite[i].create (0);
}
BITMAP_FREE (blocks_with_phis_to_rewrite);
@@ -3141,8 +3133,8 @@ update_ssa (unsigned update_flags)
gcc_assert (update_ssa_initialized_fn == cfun);
blocks_with_phis_to_rewrite = BITMAP_ALLOC (NULL);
- if (!phis_to_rewrite)
- phis_to_rewrite = VEC_alloc (gimple_vec, heap, last_basic_block + 1);
+ if (!phis_to_rewrite.exists ())
+ phis_to_rewrite.create (last_basic_block + 1);
blocks_to_update = BITMAP_ALLOC (NULL);
/* Ensure that the dominance information is up-to-date. */
@@ -3248,7 +3240,7 @@ update_ssa (unsigned update_flags)
sbitmap_free (tmp);
}
- FOR_EACH_VEC_ELT (tree, symbols_to_rename, i, sym)
+ FOR_EACH_VEC_ELT (symbols_to_rename, i, sym)
insert_updated_phi_nodes_for (sym, dfs, blocks_to_update,
update_flags);
@@ -3269,7 +3261,7 @@ update_ssa (unsigned update_flags)
EXECUTE_IF_SET_IN_BITMAP (old_ssa_names, 0, i, sbi)
get_ssa_name_ann (ssa_name (i))->info.current_def = NULL_TREE;
- FOR_EACH_VEC_ELT (tree, symbols_to_rename, i, sym)
+ FOR_EACH_VEC_ELT (symbols_to_rename, i, sym)
get_var_info (sym)->info.current_def = NULL_TREE;
/* Now start the renaming process at START_BB. */
diff --git a/gcc/tree-iterator.c b/gcc/tree-iterator.c
index e4175fd46ec..7de885a10cd 100644
--- a/gcc/tree-iterator.c
+++ b/gcc/tree-iterator.c
@@ -31,15 +31,15 @@ along with GCC; see the file COPYING3. If not see
/* This is a cache of STATEMENT_LIST nodes. We create and destroy them
fairly often during gimplification. */
-static GTY ((deletable (""))) VEC(tree,gc) *stmt_list_cache;
+static GTY ((deletable (""))) vec<tree, va_gc> *stmt_list_cache;
tree
alloc_stmt_list (void)
{
tree list;
- if (!VEC_empty (tree, stmt_list_cache))
+ if (!vec_safe_is_empty (stmt_list_cache))
{
- list = VEC_pop (tree, stmt_list_cache);
+ list = stmt_list_cache->pop ();
memset (list, 0, sizeof(struct tree_base));
TREE_SET_CODE (list, STATEMENT_LIST);
}
@@ -54,7 +54,7 @@ free_stmt_list (tree t)
{
gcc_assert (!STATEMENT_LIST_HEAD (t));
gcc_assert (!STATEMENT_LIST_TAIL (t));
- VEC_safe_push (tree, gc, stmt_list_cache, t);
+ vec_safe_push (stmt_list_cache, t);
}
/* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
diff --git a/gcc/tree-loop-distribution.c b/gcc/tree-loop-distribution.c
index 0ade107bd59..a0a766b8100 100644
--- a/gcc/tree-loop-distribution.c
+++ b/gcc/tree-loop-distribution.c
@@ -64,8 +64,6 @@ typedef struct partition_s
data_reference_p secondary_dr;
} *partition_t;
-DEF_VEC_P (partition_t);
-DEF_VEC_ALLOC_P (partition_t, heap);
/* Allocate and initialize a partition from BITMAP. */
@@ -618,11 +616,12 @@ mark_nodes_having_upstream_mem_writes (struct graph *rdg)
if (!bitmap_bit_p (seen, v))
{
unsigned i;
- VEC (int, heap) *nodes = VEC_alloc (int, heap, 3);
+ vec<int> nodes;
+ nodes.create (3);
graphds_dfs (rdg, &v, 1, &nodes, false, NULL);
- FOR_EACH_VEC_ELT (int, nodes, i, x)
+ FOR_EACH_VEC_ELT (nodes, i, x)
{
if (!bitmap_set_bit (seen, x))
continue;
@@ -638,7 +637,7 @@ mark_nodes_having_upstream_mem_writes (struct graph *rdg)
}
}
- VEC_free (int, heap, nodes);
+ nodes.release ();
}
}
@@ -749,7 +748,8 @@ rdg_flag_vertex_and_dependent (struct graph *rdg, int v, partition_t partition,
bitmap loops, bitmap processed)
{
unsigned i;
- VEC (int, heap) *nodes = VEC_alloc (int, heap, 3);
+ vec<int> nodes;
+ nodes.create (3);
int x;
bitmap_set_bit (processed, v);
@@ -757,32 +757,32 @@ rdg_flag_vertex_and_dependent (struct graph *rdg, int v, partition_t partition,
graphds_dfs (rdg, &v, 1, &nodes, false, remaining_stmts);
rdg_flag_vertex (rdg, v, partition, loops);
- FOR_EACH_VEC_ELT (int, nodes, i, x)
+ FOR_EACH_VEC_ELT (nodes, i, x)
if (!already_processed_vertex_p (processed, x))
rdg_flag_vertex_and_dependent (rdg, x, partition, loops, processed);
- VEC_free (int, heap, nodes);
+ nodes.release ();
}
/* Initialize CONDS with all the condition statements from the basic
blocks of LOOP. */
static void
-collect_condition_stmts (struct loop *loop, VEC (gimple, heap) **conds)
+collect_condition_stmts (struct loop *loop, vec<gimple> *conds)
{
unsigned i;
edge e;
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
- FOR_EACH_VEC_ELT (edge, exits, i, e)
+ FOR_EACH_VEC_ELT (exits, i, e)
{
gimple cond = last_stmt (e->src);
if (cond)
- VEC_safe_push (gimple, heap, *conds, cond);
+ conds->safe_push (cond);
}
- VEC_free (edge, heap, exits);
+ exits.release ();
}
/* Add to PARTITION all the exit condition statements for LOOPS
@@ -795,14 +795,15 @@ rdg_flag_loop_exits (struct graph *rdg, bitmap loops, partition_t partition,
{
unsigned i;
bitmap_iterator bi;
- VEC (gimple, heap) *conds = VEC_alloc (gimple, heap, 3);
+ vec<gimple> conds;
+ conds.create (3);
EXECUTE_IF_SET_IN_BITMAP (loops, 0, i, bi)
collect_condition_stmts (get_loop (i), &conds);
- while (!VEC_empty (gimple, conds))
+ while (!conds.is_empty ())
{
- gimple cond = VEC_pop (gimple, conds);
+ gimple cond = conds.pop ();
int v = rdg_vertex_for_stmt (rdg, cond);
bitmap new_loops = BITMAP_ALLOC (NULL);
@@ -816,7 +817,7 @@ rdg_flag_loop_exits (struct graph *rdg, bitmap loops, partition_t partition,
BITMAP_FREE (new_loops);
}
- VEC_free (gimple, heap, conds);
+ conds.release ();
}
/* Returns a bitmap in which all the statements needed for computing
@@ -831,7 +832,7 @@ build_rdg_partition_for_component (struct graph *rdg, rdgc c)
bitmap loops = BITMAP_ALLOC (NULL);
bitmap processed = BITMAP_ALLOC (NULL);
- FOR_EACH_VEC_ELT (int, c->vertices, i, v)
+ FOR_EACH_VEC_ELT (c->vertices, i, v)
if (!already_processed_vertex_p (processed, v))
rdg_flag_vertex_and_dependent (rdg, v, partition, loops, processed);
@@ -845,39 +846,42 @@ build_rdg_partition_for_component (struct graph *rdg, rdgc c)
/* Free memory for COMPONENTS. */
static void
-free_rdg_components (VEC (rdgc, heap) *components)
+free_rdg_components (vec<rdgc> components)
{
int i;
rdgc x;
- FOR_EACH_VEC_ELT (rdgc, components, i, x)
+ FOR_EACH_VEC_ELT (components, i, x)
{
- VEC_free (int, heap, x->vertices);
+ x->vertices.release ();
free (x);
}
- VEC_free (rdgc, heap, components);
+ components.release ();
}
/* Build the COMPONENTS vector with the strongly connected components
of RDG in which the STARTING_VERTICES occur. */
static void
-rdg_build_components (struct graph *rdg, VEC (int, heap) *starting_vertices,
- VEC (rdgc, heap) **components)
+rdg_build_components (struct graph *rdg, vec<int> starting_vertices,
+ vec<rdgc> *components)
{
int i, v;
bitmap saved_components = BITMAP_ALLOC (NULL);
int n_components = graphds_scc (rdg, NULL);
- VEC (int, heap) **all_components = XNEWVEC (VEC (int, heap) *, n_components);
+ /* ??? Macros cannot process template types with more than one
+ argument, so we need this typedef. */
+ typedef vec<int> vec_int_heap;
+ vec<int> *all_components = XNEWVEC (vec_int_heap, n_components);
for (i = 0; i < n_components; i++)
- all_components[i] = VEC_alloc (int, heap, 3);
+ all_components[i].create (3);
for (i = 0; i < rdg->n_vertices; i++)
- VEC_safe_push (int, heap, all_components[rdg->vertices[i].component], i);
+ all_components[rdg->vertices[i].component].safe_push (i);
- FOR_EACH_VEC_ELT (int, starting_vertices, i, v)
+ FOR_EACH_VEC_ELT (starting_vertices, i, v)
{
int c = rdg->vertices[v].component;
@@ -887,13 +891,13 @@ rdg_build_components (struct graph *rdg, VEC (int, heap) *starting_vertices,
x->num = c;
x->vertices = all_components[c];
- VEC_safe_push (rdgc, heap, *components, x);
+ components->safe_push (x);
}
}
for (i = 0; i < n_components; i++)
if (!bitmap_bit_p (saved_components, i))
- VEC_free (int, heap, all_components[i]);
+ all_components[i].release ();
free (all_components);
BITMAP_FREE (saved_components);
@@ -962,8 +966,7 @@ classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
return;
/* But exactly one store and/or load. */
- for (j = 0;
- VEC_iterate (data_reference_p, RDG_DATAREFS (rdg, i), j, dr); ++j)
+ for (j = 0; RDG_DATAREFS (rdg, i).iterate (j, &dr); ++j)
{
if (DR_IS_READ (dr))
{
@@ -1017,16 +1020,16 @@ classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
return;
/* Now check that if there is a dependence this dependence is
of a suitable form for memmove. */
- VEC(loop_p, heap) *loops = NULL;
+ vec<loop_p> loops = vec<loop_p>();
ddr_p ddr;
- VEC_safe_push (loop_p, heap, loops, loop);
+ loops.safe_push (loop);
ddr = initialize_data_dependence_relation (single_load, single_store,
loops);
compute_affine_dependence (ddr, loop);
if (DDR_ARE_DEPENDENT (ddr) == chrec_dont_know)
{
free_dependence_relation (ddr);
- VEC_free (loop_p, heap, loops);
+ loops.release ();
return;
}
if (DDR_ARE_DEPENDENT (ddr) != chrec_known)
@@ -1034,24 +1037,24 @@ classify_partition (loop_p loop, struct graph *rdg, partition_t partition)
if (DDR_NUM_DIST_VECTS (ddr) == 0)
{
free_dependence_relation (ddr);
- VEC_free (loop_p, heap, loops);
+ loops.release ();
return;
}
lambda_vector dist_v;
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIST_VECTS (ddr), i, dist_v)
+ FOR_EACH_VEC_ELT (DDR_DIST_VECTS (ddr), i, dist_v)
{
int dist = dist_v[index_in_loop_nest (loop->num,
DDR_LOOP_NEST (ddr))];
if (dist > 0 && !DDR_REVERSED_P (ddr))
{
free_dependence_relation (ddr);
- VEC_free (loop_p, heap, loops);
+ loops.release ();
return;
}
}
}
free_dependence_relation (ddr);
- VEC_free (loop_p, heap, loops);
+ loops.release ();
partition->kind = PKIND_MEMCPY;
partition->main_dr = single_store;
partition->secondary_dr = single_load;
@@ -1099,12 +1102,11 @@ similar_memory_accesses (struct graph *rdg, partition_t partition1,
if (RDG_MEM_WRITE_STMT (rdg, j)
|| RDG_MEM_READS_STMT (rdg, j))
{
- FOR_EACH_VEC_ELT (data_reference_p, RDG_DATAREFS (rdg, i), k, ref1)
+ FOR_EACH_VEC_ELT (RDG_DATAREFS (rdg, i), k, ref1)
{
tree base1 = ref_base_address (ref1);
if (base1)
- FOR_EACH_VEC_ELT (data_reference_p,
- RDG_DATAREFS (rdg, j), l, ref2)
+ FOR_EACH_VEC_ELT (RDG_DATAREFS (rdg, j), l, ref2)
if (base1 == ref_base_address (ref2))
return true;
}
@@ -1118,18 +1120,18 @@ similar_memory_accesses (struct graph *rdg, partition_t partition1,
distributed in different loops. */
static void
-rdg_build_partitions (struct graph *rdg, VEC (rdgc, heap) *components,
- VEC (int, heap) **other_stores,
- VEC (partition_t, heap) **partitions, bitmap processed)
+rdg_build_partitions (struct graph *rdg, vec<rdgc> components,
+ vec<int> *other_stores,
+ vec<partition_t> *partitions, bitmap processed)
{
int i;
rdgc x;
partition_t partition = partition_alloc (NULL);
- FOR_EACH_VEC_ELT (rdgc, components, i, x)
+ FOR_EACH_VEC_ELT (components, i, x)
{
partition_t np;
- int v = VEC_index (int, x->vertices, 0);
+ int v = x->vertices[0];
if (bitmap_bit_p (processed, v))
continue;
@@ -1148,7 +1150,7 @@ rdg_build_partitions (struct graph *rdg, VEC (rdgc, heap) *components,
dump_bitmap (dump_file, partition->stmts);
}
- VEC_safe_push (partition_t, heap, *partitions, partition);
+ partitions->safe_push (partition);
partition = partition_alloc (NULL);
}
}
@@ -1159,26 +1161,28 @@ rdg_build_partitions (struct graph *rdg, VEC (rdgc, heap) *components,
for (i = 0; i < rdg->n_vertices; i++)
if (!bitmap_bit_p (processed, i)
&& rdg_defs_used_in_other_loops_p (rdg, i))
- VEC_safe_push (int, heap, *other_stores, i);
+ other_stores->safe_push (i);
/* If there are still statements left in the OTHER_STORES array,
create other components and partitions with these stores and
their dependences. */
- if (VEC_length (int, *other_stores) > 0)
+ if (other_stores->length () > 0)
{
- VEC (rdgc, heap) *comps = VEC_alloc (rdgc, heap, 3);
- VEC (int, heap) *foo = VEC_alloc (int, heap, 3);
+ vec<rdgc> comps;
+ comps.create (3);
+ vec<int> foo;
+ foo.create (3);
rdg_build_components (rdg, *other_stores, &comps);
rdg_build_partitions (rdg, comps, &foo, partitions, processed);
- VEC_free (int, heap, foo);
+ foo.release ();
free_rdg_components (comps);
}
/* If there is something left in the last partition, save it. */
if (bitmap_count_bits (partition->stmts) > 0)
- VEC_safe_push (partition_t, heap, *partitions, partition);
+ partitions->safe_push (partition);
else
partition_free (partition);
}
@@ -1186,20 +1190,20 @@ rdg_build_partitions (struct graph *rdg, VEC (rdgc, heap) *components,
/* Dump to FILE the PARTITIONS. */
static void
-dump_rdg_partitions (FILE *file, VEC (partition_t, heap) *partitions)
+dump_rdg_partitions (FILE *file, vec<partition_t> partitions)
{
int i;
partition_t partition;
- FOR_EACH_VEC_ELT (partition_t, partitions, i, partition)
+ FOR_EACH_VEC_ELT (partitions, i, partition)
debug_bitmap_file (file, partition->stmts);
}
/* Debug PARTITIONS. */
-extern void debug_rdg_partitions (VEC (partition_t, heap) *);
+extern void debug_rdg_partitions (vec<partition_t> );
DEBUG_FUNCTION void
-debug_rdg_partitions (VEC (partition_t, heap) *partitions)
+debug_rdg_partitions (vec<partition_t> partitions)
{
dump_rdg_partitions (stderr, partitions);
}
@@ -1249,13 +1253,14 @@ number_of_rw_in_partition (struct graph *rdg, partition_t partition)
write operations of RDG. */
static bool
-partition_contains_all_rw (struct graph *rdg, VEC (partition_t, heap) *partitions)
+partition_contains_all_rw (struct graph *rdg,
+ vec<partition_t> partitions)
{
int i;
partition_t partition;
int nrw = number_of_rw_in_rdg (rdg);
- FOR_EACH_VEC_ELT (partition_t, partitions, i, partition)
+ FOR_EACH_VEC_ELT (partitions, i, partition)
if (nrw == number_of_rw_in_partition (rdg, partition))
return true;
@@ -1267,12 +1272,15 @@ partition_contains_all_rw (struct graph *rdg, VEC (partition_t, heap) *partition
static int
ldist_gen (struct loop *loop, struct graph *rdg,
- VEC (int, heap) *starting_vertices)
+ vec<int> starting_vertices)
{
int i, nbp;
- VEC (rdgc, heap) *components = VEC_alloc (rdgc, heap, 3);
- VEC (partition_t, heap) *partitions = VEC_alloc (partition_t, heap, 3);
- VEC (int, heap) *other_stores = VEC_alloc (int, heap, 3);
+ vec<rdgc> components;
+ components.create (3);
+ vec<partition_t> partitions;
+ partitions.create (3);
+ vec<int> other_stores;
+ other_stores.create (3);
partition_t partition;
bitmap processed = BITMAP_ALLOC (NULL);
bool any_builtin;
@@ -1292,7 +1300,7 @@ ldist_gen (struct loop *loop, struct graph *rdg,
unsigned j;
bool found = false;
- FOR_EACH_VEC_ELT (int, starting_vertices, j, v)
+ FOR_EACH_VEC_ELT (starting_vertices, j, v)
if (i == v)
{
found = true;
@@ -1300,7 +1308,7 @@ ldist_gen (struct loop *loop, struct graph *rdg,
}
if (!found)
- VEC_safe_push (int, heap, other_stores, i);
+ other_stores.safe_push (i);
}
}
@@ -1311,7 +1319,7 @@ ldist_gen (struct loop *loop, struct graph *rdg,
BITMAP_FREE (processed);
any_builtin = false;
- FOR_EACH_VEC_ELT (partition_t, partitions, i, partition)
+ FOR_EACH_VEC_ELT (partitions, i, partition)
{
classify_partition (loop, rdg, partition);
any_builtin |= partition_builtin_p (partition);
@@ -1334,31 +1342,31 @@ ldist_gen (struct loop *loop, struct graph *rdg,
i = 0;
do
{
- for (; VEC_iterate (partition_t, partitions, i, into); ++i)
+ for (; partitions.iterate (i, &into); ++i)
if (!partition_builtin_p (into))
break;
- for (++i; VEC_iterate (partition_t, partitions, i, partition); ++i)
+ for (++i; partitions.iterate (i, &partition); ++i)
if (!partition_builtin_p (partition))
{
bitmap_ior_into (into->stmts, partition->stmts);
- VEC_ordered_remove (partition_t, partitions, i);
+ partitions.ordered_remove (i);
i--;
}
else
break;
}
- while ((unsigned) i < VEC_length (partition_t, partitions));
+ while ((unsigned) i < partitions.length ());
}
else
{
partition_t into;
int j;
- for (i = 0; VEC_iterate (partition_t, partitions, i, into); ++i)
+ for (i = 0; partitions.iterate (i, &into); ++i)
{
if (partition_builtin_p (into))
continue;
for (j = i + 1;
- VEC_iterate (partition_t, partitions, j, partition); ++j)
+ partitions.iterate (j, &partition); ++j)
{
if (!partition_builtin_p (partition)
/* ??? The following is horribly inefficient,
@@ -1375,19 +1383,17 @@ ldist_gen (struct loop *loop, struct graph *rdg,
"memory accesses\n");
}
bitmap_ior_into (into->stmts, partition->stmts);
- VEC_ordered_remove (partition_t, partitions, j);
+ partitions.ordered_remove (j);
j--;
}
}
}
}
- nbp = VEC_length (partition_t, partitions);
+ nbp = partitions.length ();
if (nbp == 0
- || (nbp == 1
- && !partition_builtin_p (VEC_index (partition_t, partitions, 0)))
- || (nbp > 1
- && partition_contains_all_rw (rdg, partitions)))
+ || (nbp == 1 && !partition_builtin_p (partitions[0]))
+ || (nbp > 1 && partition_contains_all_rw (rdg, partitions)))
{
nbp = 0;
goto ldist_done;
@@ -1396,7 +1402,7 @@ ldist_gen (struct loop *loop, struct graph *rdg,
if (dump_file && (dump_flags & TDF_DETAILS))
dump_rdg_partitions (dump_file, partitions);
- FOR_EACH_VEC_ELT (partition_t, partitions, i, partition)
+ FOR_EACH_VEC_ELT (partitions, i, partition)
generate_code_for_partition (loop, partition, i < nbp - 1);
ldist_done:
@@ -1404,11 +1410,11 @@ ldist_gen (struct loop *loop, struct graph *rdg,
BITMAP_FREE (remaining_stmts);
BITMAP_FREE (upstream_mem_writes);
- FOR_EACH_VEC_ELT (partition_t, partitions, i, partition)
+ FOR_EACH_VEC_ELT (partitions, i, partition)
partition_free (partition);
- VEC_free (int, heap, other_stores);
- VEC_free (partition_t, heap, partitions);
+ other_stores.release ();
+ partitions.release ();
free_rdg_components (components);
return nbp;
}
@@ -1420,20 +1426,20 @@ ldist_gen (struct loop *loop, struct graph *rdg,
Returns the number of distributed loops. */
static int
-distribute_loop (struct loop *loop, VEC (gimple, heap) *stmts)
+distribute_loop (struct loop *loop, vec<gimple> stmts)
{
int res = 0;
struct graph *rdg;
gimple s;
unsigned i;
- VEC (int, heap) *vertices;
- VEC (ddr_p, heap) *dependence_relations;
- VEC (data_reference_p, heap) *datarefs;
- VEC (loop_p, heap) *loop_nest;
-
- datarefs = VEC_alloc (data_reference_p, heap, 10);
- dependence_relations = VEC_alloc (ddr_p, heap, 100);
- loop_nest = VEC_alloc (loop_p, heap, 3);
+ vec<int> vertices;
+ vec<ddr_p> dependence_relations;
+ vec<data_reference_p> datarefs;
+ vec<loop_p> loop_nest;
+
+ datarefs.create (10);
+ dependence_relations.create (100);
+ loop_nest.create (3);
rdg = build_rdg (loop, &loop_nest, &dependence_relations, &datarefs);
if (!rdg)
@@ -1445,22 +1451,22 @@ distribute_loop (struct loop *loop, VEC (gimple, heap) *stmts)
free_dependence_relations (dependence_relations);
free_data_refs (datarefs);
- VEC_free (loop_p, heap, loop_nest);
+ loop_nest.release ();
return res;
}
- vertices = VEC_alloc (int, heap, 3);
+ vertices.create (3);
if (dump_file && (dump_flags & TDF_DETAILS))
dump_rdg (dump_file, rdg);
- FOR_EACH_VEC_ELT (gimple, stmts, i, s)
+ FOR_EACH_VEC_ELT (stmts, i, s)
{
int v = rdg_vertex_for_stmt (rdg, s);
if (v >= 0)
{
- VEC_safe_push (int, heap, vertices, v);
+ vertices.safe_push (v);
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
@@ -1469,11 +1475,11 @@ distribute_loop (struct loop *loop, VEC (gimple, heap) *stmts)
}
res = ldist_gen (loop, rdg, vertices);
- VEC_free (int, heap, vertices);
+ vertices.release ();
free_rdg (rdg);
free_dependence_relations (dependence_relations);
free_data_refs (datarefs);
- VEC_free (loop_p, heap, loop_nest);
+ loop_nest.release ();
return res;
}
@@ -1500,7 +1506,7 @@ tree_loop_distribution (void)
walking to innermost loops. */
FOR_EACH_LOOP (li, loop, LI_ONLY_INNERMOST)
{
- VEC (gimple, heap) *work_list = NULL;
+ vec<gimple> work_list = vec<gimple>();
basic_block *bbs;
int num = loop->num;
int nb_generated_loops = 0;
@@ -1534,12 +1540,12 @@ tree_loop_distribution (void)
|| is_gimple_reg (gimple_assign_lhs (stmt)))
continue;
- VEC_safe_push (gimple, heap, work_list, stmt);
+ work_list.safe_push (stmt);
}
}
free (bbs);
- if (VEC_length (gimple, work_list) > 0)
+ if (work_list.length () > 0)
nb_generated_loops = distribute_loop (loop, work_list);
if (nb_generated_loops > 0)
@@ -1554,7 +1560,7 @@ tree_loop_distribution (void)
fprintf (dump_file, "Loop %d is the same.\n", num);
}
- VEC_free (gimple, heap, work_list);
+ work_list.release ();
}
if (changed)
diff --git a/gcc/tree-mudflap.c b/gcc/tree-mudflap.c
index 53fba71df39..9b9c549a19b 100644
--- a/gcc/tree-mudflap.c
+++ b/gcc/tree-mudflap.c
@@ -1223,7 +1223,7 @@ mf_marked_p (tree t)
delayed until program finish time. If they're still incomplete by
then, warnings are emitted. */
-static GTY (()) VEC(tree,gc) *deferred_static_decls;
+static GTY (()) vec<tree, va_gc> *deferred_static_decls;
/* A list of statements for calling __mf_register() at startup time. */
static GTY (()) tree enqueued_call_stmt_chain;
@@ -1260,7 +1260,7 @@ mudflap_enqueue_decl (tree obj)
if (DECL_P (obj) && DECL_EXTERNAL (obj) && mf_artificial (obj))
return;
- VEC_safe_push (tree, gc, deferred_static_decls, obj);
+ vec_safe_push (deferred_static_decls, obj);
}
@@ -1315,7 +1315,7 @@ mudflap_finish_file (void)
{
size_t i;
tree obj;
- FOR_EACH_VEC_ELT (tree, deferred_static_decls, i, obj)
+ FOR_EACH_VEC_ELT (*deferred_static_decls, i, obj)
{
gcc_assert (DECL_P (obj));
@@ -1342,7 +1342,7 @@ mudflap_finish_file (void)
mf_varname_tree (obj));
}
- VEC_truncate (tree, deferred_static_decls, 0);
+ deferred_static_decls->truncate (0);
}
/* Append all the enqueued registration calls. */
diff --git a/gcc/tree-optimize.c b/gcc/tree-optimize.c
index a918101a4db..e44756d120c 100644
--- a/gcc/tree-optimize.c
+++ b/gcc/tree-optimize.c
@@ -206,10 +206,7 @@ execute_fixup_cfg (void)
/* We just processed all calls. */
if (cfun->gimple_df)
- {
- VEC_free (gimple, gc, MODIFIED_NORETURN_CALLS (cfun));
- MODIFIED_NORETURN_CALLS (cfun) = NULL;
- }
+ vec_free (MODIFIED_NORETURN_CALLS (cfun));
/* Dump a textual representation of the flowgraph. */
if (dump_file)
diff --git a/gcc/tree-outof-ssa.c b/gcc/tree-outof-ssa.c
index 444ea329790..5119b1d6fcf 100644
--- a/gcc/tree-outof-ssa.c
+++ b/gcc/tree-outof-ssa.c
@@ -38,8 +38,6 @@ along with GCC; see the file COPYING3. If not see
#include "expr.h"
-DEF_VEC_I(source_location);
-DEF_VEC_ALLOC_I(source_location,heap);
/* Used to hold all the components required to do SSA PHI elimination.
The node and pred/succ list is a simple linear list of nodes and
@@ -67,19 +65,19 @@ typedef struct _elim_graph {
int size;
/* List of nodes in the elimination graph. */
- VEC(int,heap) *nodes;
+ vec<int> nodes;
/* The predecessor and successor edge list. */
- VEC(int,heap) *edge_list;
+ vec<int> edge_list;
/* Source locus on each edge */
- VEC(source_location,heap) *edge_locus;
+ vec<source_location> edge_locus;
/* Visited vector. */
sbitmap visited;
/* Stack for visited nodes. */
- VEC(int,heap) *stack;
+ vec<int> stack;
/* The variable partition map. */
var_map map;
@@ -88,11 +86,11 @@ typedef struct _elim_graph {
edge e;
/* List of constant copies to emit. These are pushed on in pairs. */
- VEC(int,heap) *const_dests;
- VEC(tree,heap) *const_copies;
+ vec<int> const_dests;
+ vec<tree> const_copies;
/* Source locations for any constant copies. */
- VEC(source_location,heap) *copy_locus;
+ vec<source_location> copy_locus;
} *elim_graph;
@@ -338,13 +336,13 @@ new_elim_graph (int size)
{
elim_graph g = (elim_graph) xmalloc (sizeof (struct _elim_graph));
- g->nodes = VEC_alloc (int, heap, 30);
- g->const_dests = VEC_alloc (int, heap, 20);
- g->const_copies = VEC_alloc (tree, heap, 20);
- g->copy_locus = VEC_alloc (source_location, heap, 10);
- g->edge_list = VEC_alloc (int, heap, 20);
- g->edge_locus = VEC_alloc (source_location, heap, 10);
- g->stack = VEC_alloc (int, heap, 30);
+ g->nodes.create (30);
+ g->const_dests.create (20);
+ g->const_copies.create (20);
+ g->copy_locus.create (10);
+ g->edge_list.create (20);
+ g->edge_locus.create (10);
+ g->stack.create (30);
g->visited = sbitmap_alloc (size);
@@ -357,9 +355,9 @@ new_elim_graph (int size)
static inline void
clear_elim_graph (elim_graph g)
{
- VEC_truncate (int, g->nodes, 0);
- VEC_truncate (int, g->edge_list, 0);
- VEC_truncate (source_location, g->edge_locus, 0);
+ g->nodes.truncate (0);
+ g->edge_list.truncate (0);
+ g->edge_locus.truncate (0);
}
@@ -369,13 +367,13 @@ static inline void
delete_elim_graph (elim_graph g)
{
sbitmap_free (g->visited);
- VEC_free (int, heap, g->stack);
- VEC_free (int, heap, g->edge_list);
- VEC_free (tree, heap, g->const_copies);
- VEC_free (int, heap, g->const_dests);
- VEC_free (int, heap, g->nodes);
- VEC_free (source_location, heap, g->copy_locus);
- VEC_free (source_location, heap, g->edge_locus);
+ g->stack.release ();
+ g->edge_list.release ();
+ g->const_copies.release ();
+ g->const_dests.release ();
+ g->nodes.release ();
+ g->copy_locus.release ();
+ g->edge_locus.release ();
free (g);
}
@@ -386,7 +384,7 @@ delete_elim_graph (elim_graph g)
static inline int
elim_graph_size (elim_graph g)
{
- return VEC_length (int, g->nodes);
+ return g->nodes.length ();
}
@@ -398,10 +396,10 @@ elim_graph_add_node (elim_graph g, int node)
int x;
int t;
- FOR_EACH_VEC_ELT (int, g->nodes, x, t)
+ FOR_EACH_VEC_ELT (g->nodes, x, t)
if (t == node)
return;
- VEC_safe_push (int, heap, g->nodes, node);
+ g->nodes.safe_push (node);
}
@@ -410,9 +408,9 @@ elim_graph_add_node (elim_graph g, int node)
static inline void
elim_graph_add_edge (elim_graph g, int pred, int succ, source_location locus)
{
- VEC_safe_push (int, heap, g->edge_list, pred);
- VEC_safe_push (int, heap, g->edge_list, succ);
- VEC_safe_push (source_location, heap, g->edge_locus, locus);
+ g->edge_list.safe_push (pred);
+ g->edge_list.safe_push (succ);
+ g->edge_locus.safe_push (locus);
}
@@ -424,14 +422,14 @@ elim_graph_remove_succ_edge (elim_graph g, int node, source_location *locus)
{
int y;
unsigned x;
- for (x = 0; x < VEC_length (int, g->edge_list); x += 2)
- if (VEC_index (int, g->edge_list, x) == node)
+ for (x = 0; x < g->edge_list.length (); x += 2)
+ if (g->edge_list[x] == node)
{
- VEC_replace (int, g->edge_list, x, -1);
- y = VEC_index (int, g->edge_list, x + 1);
- VEC_replace (int, g->edge_list, x + 1, -1);
- *locus = VEC_index (source_location, g->edge_locus, x / 2);
- VEC_replace (source_location, g->edge_locus, x / 2, UNKNOWN_LOCATION);
+ g->edge_list[x] = -1;
+ y = g->edge_list[x + 1];
+ g->edge_list[x + 1] = -1;
+ *locus = g->edge_locus[x / 2];
+ g->edge_locus[x / 2] = UNKNOWN_LOCATION;
return y;
}
*locus = UNKNOWN_LOCATION;
@@ -447,14 +445,13 @@ elim_graph_remove_succ_edge (elim_graph g, int node, source_location *locus)
do { \
unsigned x_; \
int y_; \
- for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \
+ for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
{ \
- y_ = VEC_index (int, (GRAPH)->edge_list, x_); \
+ y_ = (GRAPH)->edge_list[x_]; \
if (y_ != (NODE)) \
continue; \
- (void) ((VAR) = VEC_index (int, (GRAPH)->edge_list, x_ + 1)); \
- (void) ((LOCUS) = VEC_index (source_location, \
- (GRAPH)->edge_locus, x_ / 2)); \
+ (void) ((VAR) = (GRAPH)->edge_list[x_ + 1]); \
+ (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
CODE; \
} \
} while (0)
@@ -468,14 +465,13 @@ do { \
do { \
unsigned x_; \
int y_; \
- for (x_ = 0; x_ < VEC_length (int, (GRAPH)->edge_list); x_ += 2) \
+ for (x_ = 0; x_ < (GRAPH)->edge_list.length (); x_ += 2) \
{ \
- y_ = VEC_index (int, (GRAPH)->edge_list, x_ + 1); \
+ y_ = (GRAPH)->edge_list[x_ + 1]; \
if (y_ != (NODE)) \
continue; \
- (void) ((VAR) = VEC_index (int, (GRAPH)->edge_list, x_)); \
- (void) ((LOCUS) = VEC_index (source_location, \
- (GRAPH)->edge_locus, x_ / 2)); \
+ (void) ((VAR) = (GRAPH)->edge_list[x_]); \
+ (void) ((LOCUS) = (GRAPH)->edge_locus[x_ / 2]); \
CODE; \
} \
} while (0)
@@ -524,9 +520,9 @@ eliminate_build (elim_graph g)
{
/* Save constant copies until all other copies have been emitted
on this edge. */
- VEC_safe_push (int, heap, g->const_dests, p0);
- VEC_safe_push (tree, heap, g->const_copies, Ti);
- VEC_safe_push (source_location, heap, g->copy_locus, locus);
+ g->const_dests.safe_push (p0);
+ g->const_copies.safe_push (Ti);
+ g->copy_locus.safe_push (locus);
}
else
{
@@ -556,7 +552,7 @@ elim_forward (elim_graph g, int T)
if (!bitmap_bit_p (g->visited, S))
elim_forward (g, S);
});
- VEC_safe_push (int, heap, g->stack, T);
+ g->stack.safe_push (T);
}
@@ -655,8 +651,8 @@ eliminate_phi (edge e, elim_graph g)
{
int x;
- gcc_assert (VEC_length (tree, g->const_copies) == 0);
- gcc_assert (VEC_length (source_location, g->copy_locus) == 0);
+ gcc_assert (g->const_copies.length () == 0);
+ gcc_assert (g->copy_locus.length () == 0);
/* Abnormal edges already have everything coalesced. */
if (e->flags & EDGE_ABNORMAL)
@@ -671,33 +667,33 @@ eliminate_phi (edge e, elim_graph g)
int part;
bitmap_clear (g->visited);
- VEC_truncate (int, g->stack, 0);
+ g->stack.truncate (0);
- FOR_EACH_VEC_ELT (int, g->nodes, x, part)
+ FOR_EACH_VEC_ELT (g->nodes, x, part)
{
if (!bitmap_bit_p (g->visited, part))
elim_forward (g, part);
}
bitmap_clear (g->visited);
- while (VEC_length (int, g->stack) > 0)
+ while (g->stack.length () > 0)
{
- x = VEC_pop (int, g->stack);
+ x = g->stack.pop ();
if (!bitmap_bit_p (g->visited, x))
elim_create (g, x);
}
}
/* If there are any pending constant copies, issue them now. */
- while (VEC_length (tree, g->const_copies) > 0)
+ while (g->const_copies.length () > 0)
{
int dest;
tree src;
source_location locus;
- src = VEC_pop (tree, g->const_copies);
- dest = VEC_pop (int, g->const_dests);
- locus = VEC_pop (source_location, g->copy_locus);
+ src = g->const_copies.pop ();
+ dest = g->const_dests.pop ();
+ locus = g->copy_locus.pop ();
insert_value_copy_on_edge (e, dest, src, locus);
}
}
diff --git a/gcc/tree-parloops.c b/gcc/tree-parloops.c
index a3b981ec0a5..4a691acdd18 100644
--- a/gcc/tree-parloops.c
+++ b/gcc/tree-parloops.c
@@ -304,7 +304,7 @@ lambda_matrix_vector_mult (lambda_matrix matrix, int m, int n,
static bool
lambda_transform_legal_p (lambda_trans_matrix trans,
int nb_loops,
- VEC (ddr_p, heap) *dependence_relations)
+ vec<ddr_p> dependence_relations)
{
unsigned int i, j;
lambda_vector distres;
@@ -314,10 +314,10 @@ lambda_transform_legal_p (lambda_trans_matrix trans,
&& LTM_ROWSIZE (trans) == nb_loops);
/* When there are no dependences, the transformation is correct. */
- if (VEC_length (ddr_p, dependence_relations) == 0)
+ if (dependence_relations.length () == 0)
return true;
- ddr = VEC_index (ddr_p, dependence_relations, 0);
+ ddr = dependence_relations[0];
if (ddr == NULL)
return true;
@@ -329,7 +329,7 @@ lambda_transform_legal_p (lambda_trans_matrix trans,
distres = lambda_vector_new (nb_loops);
/* For each distance vector in the dependence graph. */
- FOR_EACH_VEC_ELT (ddr_p, dependence_relations, i, ddr)
+ FOR_EACH_VEC_ELT (dependence_relations, i, ddr)
{
/* Don't care about relations for which we know that there is no
dependence, nor about read-read (aka. output-dependences):
@@ -367,9 +367,9 @@ lambda_transform_legal_p (lambda_trans_matrix trans,
static bool
loop_parallel_p (struct loop *loop, struct obstack * parloop_obstack)
{
- VEC (loop_p, heap) *loop_nest;
- VEC (ddr_p, heap) *dependence_relations;
- VEC (data_reference_p, heap) *datarefs;
+ vec<loop_p> loop_nest;
+ vec<ddr_p> dependence_relations;
+ vec<data_reference_p> datarefs;
lambda_trans_matrix trans;
bool ret = false;
@@ -384,9 +384,9 @@ loop_parallel_p (struct loop *loop, struct obstack * parloop_obstack)
/* Check for problems with dependences. If the loop can be reversed,
the iterations are independent. */
- datarefs = VEC_alloc (data_reference_p, heap, 10);
- dependence_relations = VEC_alloc (ddr_p, heap, 10 * 10);
- loop_nest = VEC_alloc (loop_p, heap, 3);
+ datarefs.create (10);
+ dependence_relations.create (10 * 10);
+ loop_nest.create (3);
if (! compute_data_dependences_for_loop (loop, true, &loop_nest, &datarefs,
&dependence_relations))
{
@@ -412,7 +412,7 @@ loop_parallel_p (struct loop *loop, struct obstack * parloop_obstack)
" FAILED: data dependencies exist across iterations\n");
end:
- VEC_free (loop_p, heap, loop_nest);
+ loop_nest.release ();
free_dependence_relations (dependence_relations);
free_data_refs (datarefs);
@@ -705,7 +705,8 @@ static void
eliminate_local_variables (edge entry, edge exit)
{
basic_block bb;
- VEC (basic_block, heap) *body = VEC_alloc (basic_block, heap, 3);
+ vec<basic_block> body;
+ body.create (3);
unsigned i;
gimple_stmt_iterator gsi;
bool has_debug_stmt = false;
@@ -716,7 +717,7 @@ eliminate_local_variables (edge entry, edge exit)
gather_blocks_in_sese_region (entry_bb, exit_bb, &body);
- FOR_EACH_VEC_ELT (basic_block, body, i, bb)
+ FOR_EACH_VEC_ELT (body, i, bb)
if (bb != entry_bb && bb != exit_bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
if (is_gimple_debug (gsi_stmt (gsi)))
@@ -728,14 +729,14 @@ eliminate_local_variables (edge entry, edge exit)
eliminate_local_variables_stmt (entry, &gsi, decl_address);
if (has_debug_stmt)
- FOR_EACH_VEC_ELT (basic_block, body, i, bb)
+ FOR_EACH_VEC_ELT (body, i, bb)
if (bb != entry_bb && bb != exit_bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
if (gimple_debug_bind_p (gsi_stmt (gsi)))
eliminate_local_variables_stmt (entry, &gsi, decl_address);
htab_delete (decl_address);
- VEC_free (basic_block, heap, body);
+ body.release ();
}
/* Returns true if expression EXPR is not defined between ENTRY and
@@ -1259,7 +1260,8 @@ separate_decls_in_region (edge entry, edge exit, htab_t reduction_list,
tree type, type_name, nvar;
gimple_stmt_iterator gsi;
struct clsn_data clsn_data;
- VEC (basic_block, heap) *body = VEC_alloc (basic_block, heap, 3);
+ vec<basic_block> body;
+ body.create (3);
basic_block bb;
basic_block entry_bb = bb1;
basic_block exit_bb = exit->dest;
@@ -1268,7 +1270,7 @@ separate_decls_in_region (edge entry, edge exit, htab_t reduction_list,
entry = single_succ_edge (entry_bb);
gather_blocks_in_sese_region (entry_bb, exit_bb, &body);
- FOR_EACH_VEC_ELT (basic_block, body, i, bb)
+ FOR_EACH_VEC_ELT (body, i, bb)
{
if (bb != entry_bb && bb != exit_bb)
{
@@ -1296,7 +1298,7 @@ separate_decls_in_region (edge entry, edge exit, htab_t reduction_list,
and discard those for which we know there's nothing we can
do. */
if (has_debug_stmt)
- FOR_EACH_VEC_ELT (basic_block, body, i, bb)
+ FOR_EACH_VEC_ELT (body, i, bb)
if (bb != entry_bb && bb != exit_bb)
{
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
@@ -1317,7 +1319,7 @@ separate_decls_in_region (edge entry, edge exit, htab_t reduction_list,
}
}
- VEC_free (basic_block, heap, body);
+ body.release ();
if (htab_elements (name_copies) == 0 && htab_elements (reduction_list) == 0)
{
diff --git a/gcc/tree-phinodes.c b/gcc/tree-phinodes.c
index 4bb10e59ac7..2c269647ddf 100644
--- a/gcc/tree-phinodes.c
+++ b/gcc/tree-phinodes.c
@@ -72,7 +72,7 @@ along with GCC; see the file COPYING3. If not see
the -2 on all the calculations below. */
#define NUM_BUCKETS 10
-static GTY ((deletable (""))) VEC(gimple,gc) *free_phinodes[NUM_BUCKETS - 2];
+static GTY ((deletable (""))) vec<gimple, va_gc> *free_phinodes[NUM_BUCKETS - 2];
static unsigned long free_phinode_count;
static int ideal_phi_node_len (int);
@@ -108,13 +108,12 @@ allocate_phi_node (size_t len)
/* If our free list has an element, then use it. */
if (bucket < NUM_BUCKETS - 2
- && gimple_phi_capacity (VEC_index (gimple, free_phinodes[bucket], 0))
- >= len)
+ && gimple_phi_capacity ((*free_phinodes[bucket])[0]) >= len)
{
free_phinode_count--;
- phi = VEC_pop (gimple, free_phinodes[bucket]);
- if (VEC_empty (gimple, free_phinodes[bucket]))
- VEC_free (gimple, gc, free_phinodes[bucket]);
+ phi = free_phinodes[bucket]->pop ();
+ if (free_phinodes[bucket]->is_empty ())
+ vec_free (free_phinodes[bucket]);
if (GATHER_STATISTICS)
phi_nodes_reused++;
}
@@ -229,7 +228,7 @@ release_phi_node (gimple phi)
bucket = len > NUM_BUCKETS - 1 ? NUM_BUCKETS - 1 : len;
bucket -= 2;
- VEC_safe_push (gimple, gc, free_phinodes[bucket], phi);
+ vec_safe_push (free_phinodes[bucket], phi);
free_phinode_count++;
}
diff --git a/gcc/tree-predcom.c b/gcc/tree-predcom.c
index ba61c5b04e0..b1dce08e017 100644
--- a/gcc/tree-predcom.c
+++ b/gcc/tree-predcom.c
@@ -239,8 +239,6 @@ typedef struct dref_d
unsigned always_accessed : 1;
} *dref;
-DEF_VEC_P (dref);
-DEF_VEC_ALLOC_P (dref, heap);
/* Type of the chain of the references. */
@@ -273,16 +271,16 @@ typedef struct chain
struct chain *ch1, *ch2;
/* The references in the chain. */
- VEC(dref,heap) *refs;
+ vec<dref> refs;
/* The maximum distance of the reference in the chain from the root. */
unsigned length;
/* The variables used to copy the value throughout iterations. */
- VEC(tree,heap) *vars;
+ vec<tree> vars;
/* Initializers for the variables. */
- VEC(tree,heap) *inits;
+ vec<tree> inits;
/* True if there is a use of a variable with the maximal distance
that comes after the root in the loop. */
@@ -295,8 +293,6 @@ typedef struct chain
unsigned combined : 1;
} *chain_p;
-DEF_VEC_P (chain_p);
-DEF_VEC_ALLOC_P (chain_p, heap);
/* Describes the knowledge about the step of the memory references in
the component. */
@@ -318,7 +314,7 @@ enum ref_step_type
struct component
{
/* The references in the component. */
- VEC(dref,heap) *refs;
+ vec<dref> refs;
/* What we know about the step of the references in the component. */
enum ref_step_type comp_step;
@@ -416,10 +412,10 @@ dump_chain (FILE *file, chain_p chain)
fprintf (file, "\n");
}
- if (chain->vars)
+ if (chain->vars.exists ())
{
fprintf (file, " vars");
- FOR_EACH_VEC_ELT (tree, chain->vars, i, var)
+ FOR_EACH_VEC_ELT (chain->vars, i, var)
{
fprintf (file, " ");
print_generic_expr (file, var, TDF_SLIM);
@@ -427,10 +423,10 @@ dump_chain (FILE *file, chain_p chain)
fprintf (file, "\n");
}
- if (chain->inits)
+ if (chain->inits.exists ())
{
fprintf (file, " inits");
- FOR_EACH_VEC_ELT (tree, chain->inits, i, var)
+ FOR_EACH_VEC_ELT (chain->inits, i, var)
{
fprintf (file, " ");
print_generic_expr (file, var, TDF_SLIM);
@@ -439,7 +435,7 @@ dump_chain (FILE *file, chain_p chain)
}
fprintf (file, " references:\n");
- FOR_EACH_VEC_ELT (dref, chain->refs, i, a)
+ FOR_EACH_VEC_ELT (chain->refs, i, a)
dump_dref (file, a);
fprintf (file, "\n");
@@ -447,14 +443,14 @@ dump_chain (FILE *file, chain_p chain)
/* Dumps CHAINS to FILE. */
-extern void dump_chains (FILE *, VEC (chain_p, heap) *);
+extern void dump_chains (FILE *, vec<chain_p> );
void
-dump_chains (FILE *file, VEC (chain_p, heap) *chains)
+dump_chains (FILE *file, vec<chain_p> chains)
{
chain_p chain;
unsigned i;
- FOR_EACH_VEC_ELT (chain_p, chains, i, chain)
+ FOR_EACH_VEC_ELT (chains, i, chain)
dump_chain (file, chain);
}
@@ -469,7 +465,7 @@ dump_component (FILE *file, struct component *comp)
fprintf (file, "Component%s:\n",
comp->comp_step == RS_INVARIANT ? " (invariant)" : "");
- FOR_EACH_VEC_ELT (dref, comp->refs, i, a)
+ FOR_EACH_VEC_ELT (comp->refs, i, a)
dump_dref (file, a);
fprintf (file, "\n");
}
@@ -497,12 +493,12 @@ release_chain (chain_p chain)
if (chain == NULL)
return;
- FOR_EACH_VEC_ELT (dref, chain->refs, i, ref)
+ FOR_EACH_VEC_ELT (chain->refs, i, ref)
free (ref);
- VEC_free (dref, heap, chain->refs);
- VEC_free (tree, heap, chain->vars);
- VEC_free (tree, heap, chain->inits);
+ chain->refs.release ();
+ chain->vars.release ();
+ chain->inits.release ();
free (chain);
}
@@ -510,14 +506,14 @@ release_chain (chain_p chain)
/* Frees CHAINS. */
static void
-release_chains (VEC (chain_p, heap) *chains)
+release_chains (vec<chain_p> chains)
{
unsigned i;
chain_p chain;
- FOR_EACH_VEC_ELT (chain_p, chains, i, chain)
+ FOR_EACH_VEC_ELT (chains, i, chain)
release_chain (chain);
- VEC_free (chain_p, heap, chains);
+ chains.release ();
}
/* Frees a component COMP. */
@@ -525,7 +521,7 @@ release_chains (VEC (chain_p, heap) *chains)
static void
release_component (struct component *comp)
{
- VEC_free (dref, heap, comp->refs);
+ comp->refs.release ();
free (comp);
}
@@ -679,13 +675,13 @@ static basic_block
last_always_executed_block (struct loop *loop)
{
unsigned i;
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
edge ex;
basic_block last = loop->latch;
- FOR_EACH_VEC_ELT (edge, exits, i, ex)
+ FOR_EACH_VEC_ELT (exits, i, ex)
last = nearest_common_dominator (CDI_DOMINATORS, last, ex->src);
- VEC_free (edge, heap, exits);
+ exits.release ();
return last;
}
@@ -694,10 +690,10 @@ last_always_executed_block (struct loop *loop)
static struct component *
split_data_refs_to_components (struct loop *loop,
- VEC (data_reference_p, heap) *datarefs,
- VEC (ddr_p, heap) *depends)
+ vec<data_reference_p> datarefs,
+ vec<ddr_p> depends)
{
- unsigned i, n = VEC_length (data_reference_p, datarefs);
+ unsigned i, n = datarefs.length ();
unsigned ca, ia, ib, bad;
unsigned *comp_father = XNEWVEC (unsigned, n + 1);
unsigned *comp_size = XNEWVEC (unsigned, n + 1);
@@ -708,7 +704,7 @@ split_data_refs_to_components (struct loop *loop,
dref dataref;
basic_block last_always_executed = last_always_executed_block (loop);
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
if (!DR_REF (dr))
{
@@ -725,7 +721,7 @@ split_data_refs_to_components (struct loop *loop,
comp_father[n] = n;
comp_size[n] = 1;
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
enum ref_step_type dummy;
@@ -736,7 +732,7 @@ split_data_refs_to_components (struct loop *loop,
}
}
- FOR_EACH_VEC_ELT (ddr_p, depends, i, ddr)
+ FOR_EACH_VEC_ELT (depends, i, ddr)
{
double_int dummy_off;
@@ -763,7 +759,7 @@ split_data_refs_to_components (struct loop *loop,
comps = XCNEWVEC (struct component *, n);
bad = component_of (comp_father, n);
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
ia = (unsigned) (size_t) dr->aux;
ca = component_of (comp_father, ia);
@@ -774,7 +770,7 @@ split_data_refs_to_components (struct loop *loop,
if (!comp)
{
comp = XCNEW (struct component);
- comp->refs = VEC_alloc (dref, heap, comp_size[ca]);
+ comp->refs.create (comp_size[ca]);
comps[ca] = comp;
}
@@ -787,8 +783,8 @@ split_data_refs_to_components (struct loop *loop,
dataref->always_accessed
= dominated_by_p (CDI_DOMINATORS, last_always_executed,
gimple_bb (dataref->stmt));
- dataref->pos = VEC_length (dref, comp->refs);
- VEC_quick_push (dref, comp->refs, dataref);
+ dataref->pos = comp->refs.length ();
+ comp->refs.quick_push (dataref);
}
for (i = 0; i < n; i++)
@@ -820,7 +816,7 @@ suitable_component_p (struct loop *loop, struct component *comp)
basic_block ba, bp = loop->header;
bool ok, has_write = false;
- FOR_EACH_VEC_ELT (dref, comp->refs, i, a)
+ FOR_EACH_VEC_ELT (comp->refs, i, a)
{
ba = gimple_bb (a->stmt);
@@ -834,12 +830,12 @@ suitable_component_p (struct loop *loop, struct component *comp)
has_write = true;
}
- first = VEC_index (dref, comp->refs, 0);
+ first = comp->refs[0];
ok = suitable_reference_p (first->ref, &comp->comp_step);
gcc_assert (ok);
first->offset = double_int_zero;
- for (i = 1; VEC_iterate (dref, comp->refs, i, a); i++)
+ for (i = 1; comp->refs.iterate (i, &a); i++)
{
if (!determine_offset (first->ref, a->ref, &a->offset))
return false;
@@ -884,7 +880,7 @@ filter_suitable_components (struct loop *loop, struct component *comps)
unsigned i;
*comp = act->next;
- FOR_EACH_VEC_ELT (dref, act->refs, i, ref)
+ FOR_EACH_VEC_ELT (act->refs, i, ref)
free (ref);
release_component (act);
}
@@ -914,7 +910,7 @@ order_drefs (const void *a, const void *b)
static inline dref
get_chain_root (chain_p chain)
{
- return VEC_index (dref, chain->refs, 0);
+ return chain->refs[0];
}
/* Adds REF to the chain CHAIN. */
@@ -934,7 +930,7 @@ add_ref_to_chain (chain_p chain, dref ref)
}
gcc_assert (dist.fits_uhwi ());
- VEC_safe_push (dref, heap, chain->refs, ref);
+ chain->refs.safe_push (ref);
ref->distance = dist.to_uhwi ();
@@ -964,9 +960,9 @@ make_invariant_chain (struct component *comp)
chain->all_always_accessed = true;
- FOR_EACH_VEC_ELT (dref, comp->refs, i, ref)
+ FOR_EACH_VEC_ELT (comp->refs, i, ref)
{
- VEC_safe_push (dref, heap, chain->refs, ref);
+ chain->refs.safe_push (ref);
chain->all_always_accessed &= ref->always_accessed;
}
@@ -982,7 +978,7 @@ make_rooted_chain (dref ref)
chain->type = DR_IS_READ (ref->ref) ? CT_LOAD : CT_STORE_LOAD;
- VEC_safe_push (dref, heap, chain->refs, ref);
+ chain->refs.safe_push (ref);
chain->all_always_accessed = ref->always_accessed;
ref->distance = 0;
@@ -995,7 +991,7 @@ make_rooted_chain (dref ref)
static bool
nontrivial_chain_p (chain_p chain)
{
- return chain != NULL && VEC_length (dref, chain->refs) > 1;
+ return chain != NULL && chain->refs.length () > 1;
}
/* Returns the ssa name that contains the value of REF, or NULL_TREE if there
@@ -1136,10 +1132,10 @@ insert_looparound_copy (chain_p chain, dref ref, gimple phi)
nw->distance = ref->distance + 1;
nw->always_accessed = 1;
- FOR_EACH_VEC_ELT (dref, chain->refs, i, aref)
+ FOR_EACH_VEC_ELT (chain->refs, i, aref)
if (aref->distance >= nw->distance)
break;
- VEC_safe_insert (dref, heap, chain->refs, i, nw);
+ chain->refs.safe_insert (i, nw);
if (nw->distance > chain->length)
{
@@ -1160,7 +1156,7 @@ add_looparound_copies (struct loop *loop, chain_p chain)
dref ref, root = get_chain_root (chain);
gimple phi;
- FOR_EACH_VEC_ELT (dref, chain->refs, i, ref)
+ FOR_EACH_VEC_ELT (chain->refs, i, ref)
{
phi = find_looparound_phi (loop, ref, root);
if (!phi)
@@ -1178,7 +1174,7 @@ add_looparound_copies (struct loop *loop, chain_p chain)
static void
determine_roots_comp (struct loop *loop,
struct component *comp,
- VEC (chain_p, heap) **chains)
+ vec<chain_p> *chains)
{
unsigned i;
dref a;
@@ -1189,13 +1185,13 @@ determine_roots_comp (struct loop *loop,
if (comp->comp_step == RS_INVARIANT)
{
chain = make_invariant_chain (comp);
- VEC_safe_push (chain_p, heap, *chains, chain);
+ chains->safe_push (chain);
return;
}
- VEC_qsort (dref, comp->refs, order_drefs);
+ comp->refs.qsort (order_drefs);
- FOR_EACH_VEC_ELT (dref, comp->refs, i, a)
+ FOR_EACH_VEC_ELT (comp->refs, i, a)
{
if (!chain || DR_IS_WRITE (a->ref)
|| double_int::from_uhwi (MAX_DISTANCE).ule (a->offset - last_ofs))
@@ -1203,7 +1199,7 @@ determine_roots_comp (struct loop *loop,
if (nontrivial_chain_p (chain))
{
add_looparound_copies (loop, chain);
- VEC_safe_push (chain_p, heap, *chains, chain);
+ chains->safe_push (chain);
}
else
release_chain (chain);
@@ -1218,7 +1214,7 @@ determine_roots_comp (struct loop *loop,
if (nontrivial_chain_p (chain))
{
add_looparound_copies (loop, chain);
- VEC_safe_push (chain_p, heap, *chains, chain);
+ chains->safe_push (chain);
}
else
release_chain (chain);
@@ -1229,7 +1225,7 @@ determine_roots_comp (struct loop *loop,
static void
determine_roots (struct loop *loop,
- struct component *comps, VEC (chain_p, heap) **chains)
+ struct component *comps, vec<chain_p> *chains)
{
struct component *comp;
@@ -1428,7 +1424,7 @@ get_init_expr (chain_p chain, unsigned index)
return fold_build2 (chain->op, chain->rslt_type, e1, e2);
}
else
- return VEC_index (tree, chain->inits, index);
+ return chain->inits[index];
}
/* Returns a new temporary variable used for the I-th variable carrying
@@ -1465,7 +1461,7 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
since this is an nonempty chain, reuse_first cannot be true. */
gcc_assert (n > 0 || !reuse_first);
- chain->vars = VEC_alloc (tree, heap, n + 1);
+ chain->vars.create (n + 1);
if (chain->type == CT_COMBINATION)
ref = gimple_assign_lhs (root->stmt);
@@ -1475,18 +1471,18 @@ initialize_root_vars (struct loop *loop, chain_p chain, bitmap tmp_vars)
for (i = 0; i < n + (reuse_first ? 0 : 1); i++)
{
var = predcom_tmp_var (ref, i, tmp_vars);
- VEC_quick_push (tree, chain->vars, var);
+ chain->vars.quick_push (var);
}
if (reuse_first)
- VEC_quick_push (tree, chain->vars, VEC_index (tree, chain->vars, 0));
+ chain->vars.quick_push (chain->vars[0]);
- FOR_EACH_VEC_ELT (tree, chain->vars, i, var)
- VEC_replace (tree, chain->vars, i, make_ssa_name (var, NULL));
+ FOR_EACH_VEC_ELT (chain->vars, i, var)
+ chain->vars[i] = make_ssa_name (var, NULL);
for (i = 0; i < n; i++)
{
- var = VEC_index (tree, chain->vars, i);
- next = VEC_index (tree, chain->vars, i + 1);
+ var = chain->vars[i];
+ next = chain->vars[i + 1];
init = get_init_expr (chain, i);
init = force_gimple_operand (init, &stmts, true, NULL_TREE);
@@ -1512,7 +1508,7 @@ initialize_root (struct loop *loop, chain_p chain, bitmap tmp_vars)
initialize_root_vars (loop, chain, tmp_vars);
replace_ref_with (root->stmt,
- VEC_index (tree, chain->vars, chain->length),
+ chain->vars[chain->length],
true, in_lhs);
}
@@ -1525,7 +1521,7 @@ initialize_root (struct loop *loop, chain_p chain, bitmap tmp_vars)
static void
initialize_root_vars_lm (struct loop *loop, dref root, bool written,
- VEC(tree, heap) **vars, VEC(tree, heap) *inits,
+ vec<tree> *vars, vec<tree> inits,
bitmap tmp_vars)
{
unsigned i;
@@ -1536,18 +1532,18 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
/* Find the initializer for the variable, and check that it cannot
trap. */
- init = VEC_index (tree, inits, 0);
+ init = inits[0];
- *vars = VEC_alloc (tree, heap, written ? 2 : 1);
+ vars->create (written ? 2 : 1);
var = predcom_tmp_var (ref, 0, tmp_vars);
- VEC_quick_push (tree, *vars, var);
+ vars->quick_push (var);
if (written)
- VEC_quick_push (tree, *vars, VEC_index (tree, *vars, 0));
+ vars->quick_push ((*vars)[0]);
- FOR_EACH_VEC_ELT (tree, *vars, i, var)
- VEC_replace (tree, *vars, i, make_ssa_name (var, NULL));
+ FOR_EACH_VEC_ELT (*vars, i, var)
+ (*vars)[i] = make_ssa_name (var, NULL);
- var = VEC_index (tree, *vars, 0);
+ var = (*vars)[0];
init = force_gimple_operand (init, &stmts, written, NULL_TREE);
if (stmts)
@@ -1555,7 +1551,7 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
if (written)
{
- next = VEC_index (tree, *vars, 1);
+ next = (*vars)[1];
phi = create_phi_node (var, loop->header);
add_phi_arg (phi, init, entry, UNKNOWN_LOCATION);
add_phi_arg (phi, next, latch, UNKNOWN_LOCATION);
@@ -1574,26 +1570,26 @@ initialize_root_vars_lm (struct loop *loop, dref root, bool written,
static void
execute_load_motion (struct loop *loop, chain_p chain, bitmap tmp_vars)
{
- VEC (tree, heap) *vars;
+ vec<tree> vars;
dref a;
unsigned n_writes = 0, ridx, i;
tree var;
gcc_assert (chain->type == CT_INVARIANT);
gcc_assert (!chain->combined);
- FOR_EACH_VEC_ELT (dref, chain->refs, i, a)
+ FOR_EACH_VEC_ELT (chain->refs, i, a)
if (DR_IS_WRITE (a->ref))
n_writes++;
/* If there are no reads in the loop, there is nothing to do. */
- if (n_writes == VEC_length (dref, chain->refs))
+ if (n_writes == chain->refs.length ())
return;
initialize_root_vars_lm (loop, get_chain_root (chain), n_writes > 0,
&vars, chain->inits, tmp_vars);
ridx = 0;
- FOR_EACH_VEC_ELT (dref, chain->refs, i, a)
+ FOR_EACH_VEC_ELT (chain->refs, i, a)
{
bool is_read = DR_IS_READ (a->ref);
@@ -1602,19 +1598,19 @@ execute_load_motion (struct loop *loop, chain_p chain, bitmap tmp_vars)
n_writes--;
if (n_writes)
{
- var = VEC_index (tree, vars, 0);
+ var = vars[0];
var = make_ssa_name (SSA_NAME_VAR (var), NULL);
- VEC_replace (tree, vars, 0, var);
+ vars[0] = var;
}
else
ridx = 1;
}
- replace_ref_with (a->stmt, VEC_index (tree, vars, ridx),
+ replace_ref_with (a->stmt, vars[ridx],
!is_read, !is_read);
}
- VEC_free (tree, heap, vars);
+ vars.release ();
}
/* Returns the single statement in that NAME is used, excepting
@@ -1719,7 +1715,7 @@ execute_pred_commoning_chain (struct loop *loop, chain_p chain,
{
/* For combined chains, just remove the statements that are used to
compute the values of the expression (except for the root one). */
- for (i = 1; VEC_iterate (dref, chain->refs, i, a); i++)
+ for (i = 1; chain->refs.iterate (i, &a); i++)
remove_stmt (a->stmt);
}
else
@@ -1728,9 +1724,9 @@ execute_pred_commoning_chain (struct loop *loop, chain_p chain,
and replace the uses of the original references by these
variables. */
initialize_root (loop, chain, tmp_vars);
- for (i = 1; VEC_iterate (dref, chain->refs, i, a); i++)
+ for (i = 1; chain->refs.iterate (i, &a); i++)
{
- var = VEC_index (tree, chain->vars, chain->length - a->distance);
+ var = chain->vars[chain->length - a->distance];
replace_ref_with (a->stmt, var, false, false);
}
}
@@ -1741,13 +1737,13 @@ execute_pred_commoning_chain (struct loop *loop, chain_p chain,
optimized. */
static unsigned
-determine_unroll_factor (VEC (chain_p, heap) *chains)
+determine_unroll_factor (vec<chain_p> chains)
{
chain_p chain;
unsigned factor = 1, af, nfactor, i;
unsigned max = PARAM_VALUE (PARAM_MAX_UNROLL_TIMES);
- FOR_EACH_VEC_ELT (chain_p, chains, i, chain)
+ FOR_EACH_VEC_ELT (chains, i, chain)
{
if (chain->type == CT_INVARIANT || chain->combined)
continue;
@@ -1770,13 +1766,13 @@ determine_unroll_factor (VEC (chain_p, heap) *chains)
Uids of the newly created temporary variables are marked in TMP_VARS. */
static void
-execute_pred_commoning (struct loop *loop, VEC (chain_p, heap) *chains,
+execute_pred_commoning (struct loop *loop, vec<chain_p> chains,
bitmap tmp_vars)
{
chain_p chain;
unsigned i;
- FOR_EACH_VEC_ELT (chain_p, chains, i, chain)
+ FOR_EACH_VEC_ELT (chains, i, chain)
{
if (chain->type == CT_INVARIANT)
execute_load_motion (loop, chain, tmp_vars);
@@ -1791,14 +1787,14 @@ execute_pred_commoning (struct loop *loop, VEC (chain_p, heap) *chains,
phi node, record the ssa name that is defined by it. */
static void
-replace_phis_by_defined_names (VEC (chain_p, heap) *chains)
+replace_phis_by_defined_names (vec<chain_p> chains)
{
chain_p chain;
dref a;
unsigned i, j;
- FOR_EACH_VEC_ELT (chain_p, chains, i, chain)
- FOR_EACH_VEC_ELT (dref, chain->refs, j, a)
+ FOR_EACH_VEC_ELT (chains, i, chain)
+ FOR_EACH_VEC_ELT (chain->refs, j, a)
{
if (gimple_code (a->stmt) == GIMPLE_PHI)
{
@@ -1812,14 +1808,14 @@ replace_phis_by_defined_names (VEC (chain_p, heap) *chains)
NULL, use it to set the stmt field. */
static void
-replace_names_by_phis (VEC (chain_p, heap) *chains)
+replace_names_by_phis (vec<chain_p> chains)
{
chain_p chain;
dref a;
unsigned i, j;
- FOR_EACH_VEC_ELT (chain_p, chains, i, chain)
- FOR_EACH_VEC_ELT (dref, chain->refs, j, a)
+ FOR_EACH_VEC_ELT (chains, i, chain)
+ FOR_EACH_VEC_ELT (chain->refs, j, a)
if (a->stmt == NULL)
{
a->stmt = SSA_NAME_DEF_STMT (a->name_defined_by_phi);
@@ -1833,7 +1829,7 @@ replace_names_by_phis (VEC (chain_p, heap) *chains)
struct epcc_data
{
- VEC (chain_p, heap) *chains;
+ vec<chain_p> chains;
bitmap tmp_vars;
};
@@ -2242,11 +2238,11 @@ combine_chains (chain_p ch1, chain_p ch2)
if (ch1->length != ch2->length)
return NULL;
- if (VEC_length (dref, ch1->refs) != VEC_length (dref, ch2->refs))
+ if (ch1->refs.length () != ch2->refs.length ())
return NULL;
- for (i = 0; (VEC_iterate (dref, ch1->refs, i, r1)
- && VEC_iterate (dref, ch2->refs, i, r2)); i++)
+ for (i = 0; (ch1->refs.iterate (i, &r1)
+ && ch2->refs.iterate (i, &r2)); i++)
{
if (r1->distance != r2->distance)
return NULL;
@@ -2270,19 +2266,19 @@ combine_chains (chain_p ch1, chain_p ch2)
new_chain->rslt_type = rslt_type;
new_chain->length = ch1->length;
- for (i = 0; (VEC_iterate (dref, ch1->refs, i, r1)
- && VEC_iterate (dref, ch2->refs, i, r2)); i++)
+ for (i = 0; (ch1->refs.iterate (i, &r1)
+ && ch2->refs.iterate (i, &r2)); i++)
{
nw = XCNEW (struct dref_d);
nw->stmt = stmt_combining_refs (r1, r2);
nw->distance = r1->distance;
- VEC_safe_push (dref, heap, new_chain->refs, nw);
+ new_chain->refs.safe_push (nw);
}
new_chain->has_max_use_after = false;
root_stmt = get_chain_root (new_chain)->stmt;
- for (i = 1; VEC_iterate (dref, new_chain->refs, i, nw); i++)
+ for (i = 1; new_chain->refs.iterate (i, &nw); i++)
{
if (nw->distance == new_chain->length
&& !stmt_dominates_stmt_p (nw->stmt, root_stmt))
@@ -2300,23 +2296,23 @@ combine_chains (chain_p ch1, chain_p ch2)
/* Try to combine the CHAINS. */
static void
-try_combine_chains (VEC (chain_p, heap) **chains)
+try_combine_chains (vec<chain_p> *chains)
{
unsigned i, j;
chain_p ch1, ch2, cch;
- VEC (chain_p, heap) *worklist = NULL;
+ vec<chain_p> worklist = vec<chain_p>();
- FOR_EACH_VEC_ELT (chain_p, *chains, i, ch1)
+ FOR_EACH_VEC_ELT (*chains, i, ch1)
if (chain_can_be_combined_p (ch1))
- VEC_safe_push (chain_p, heap, worklist, ch1);
+ worklist.safe_push (ch1);
- while (!VEC_empty (chain_p, worklist))
+ while (!worklist.is_empty ())
{
- ch1 = VEC_pop (chain_p, worklist);
+ ch1 = worklist.pop ();
if (!chain_can_be_combined_p (ch1))
continue;
- FOR_EACH_VEC_ELT (chain_p, *chains, j, ch2)
+ FOR_EACH_VEC_ELT (*chains, j, ch2)
{
if (!chain_can_be_combined_p (ch2))
continue;
@@ -2324,14 +2320,14 @@ try_combine_chains (VEC (chain_p, heap) **chains)
cch = combine_chains (ch1, ch2);
if (cch)
{
- VEC_safe_push (chain_p, heap, worklist, cch);
- VEC_safe_push (chain_p, heap, *chains, cch);
+ worklist.safe_push (cch);
+ chains->safe_push (cch);
break;
}
}
}
- VEC_free (chain_p, heap, worklist);
+ worklist.release ();
}
/* Prepare initializers for CHAIN in LOOP. Returns false if this is
@@ -2349,25 +2345,25 @@ prepare_initializers_chain (struct loop *loop, chain_p chain)
/* Find the initializers for the variables, and check that they cannot
trap. */
- chain->inits = VEC_alloc (tree, heap, n);
+ chain->inits.create (n);
for (i = 0; i < n; i++)
- VEC_quick_push (tree, chain->inits, NULL_TREE);
+ chain->inits.quick_push (NULL_TREE);
/* If we have replaced some looparound phi nodes, use their initializers
instead of creating our own. */
- FOR_EACH_VEC_ELT (dref, chain->refs, i, laref)
+ FOR_EACH_VEC_ELT (chain->refs, i, laref)
{
if (gimple_code (laref->stmt) != GIMPLE_PHI)
continue;
gcc_assert (laref->distance > 0);
- VEC_replace (tree, chain->inits, n - laref->distance,
- PHI_ARG_DEF_FROM_EDGE (laref->stmt, entry));
+ chain->inits[n - laref->distance]
+ = PHI_ARG_DEF_FROM_EDGE (laref->stmt, entry);
}
for (i = 0; i < n; i++)
{
- if (VEC_index (tree, chain->inits, i) != NULL_TREE)
+ if (chain->inits[i] != NULL_TREE)
continue;
init = ref_at_iteration (loop, DR_REF (dr), (int) i - n);
@@ -2381,7 +2377,7 @@ prepare_initializers_chain (struct loop *loop, chain_p chain)
if (stmts)
gsi_insert_seq_on_edge_immediate (entry, stmts);
- VEC_replace (tree, chain->inits, i, init);
+ chain->inits[i] = init;
}
return true;
@@ -2391,20 +2387,20 @@ prepare_initializers_chain (struct loop *loop, chain_p chain)
be used because the initializers might trap. */
static void
-prepare_initializers (struct loop *loop, VEC (chain_p, heap) *chains)
+prepare_initializers (struct loop *loop, vec<chain_p> chains)
{
chain_p chain;
unsigned i;
- for (i = 0; i < VEC_length (chain_p, chains); )
+ for (i = 0; i < chains.length (); )
{
- chain = VEC_index (chain_p, chains, i);
+ chain = chains[i];
if (prepare_initializers_chain (loop, chain))
i++;
else
{
release_chain (chain);
- VEC_unordered_remove (chain_p, chains, i);
+ chains.unordered_remove (i);
}
}
}
@@ -2415,11 +2411,11 @@ prepare_initializers (struct loop *loop, VEC (chain_p, heap) *chains)
static bool
tree_predictive_commoning_loop (struct loop *loop)
{
- VEC (loop_p, heap) *loop_nest;
- VEC (data_reference_p, heap) *datarefs;
- VEC (ddr_p, heap) *dependences;
+ vec<loop_p> loop_nest;
+ vec<data_reference_p> datarefs;
+ vec<ddr_p> dependences;
struct component *components;
- VEC (chain_p, heap) *chains = NULL;
+ vec<chain_p> chains = vec<chain_p>();
unsigned unroll_factor;
struct tree_niter_desc desc;
bool unroll = false;
@@ -2431,15 +2427,15 @@ tree_predictive_commoning_loop (struct loop *loop)
/* Find the data references and split them into components according to their
dependence relations. */
- datarefs = VEC_alloc (data_reference_p, heap, 10);
- dependences = VEC_alloc (ddr_p, heap, 10);
- loop_nest = VEC_alloc (loop_p, heap, 3);
+ datarefs.create (10);
+ dependences.create (10);
+ loop_nest.create (3);
if (! compute_data_dependences_for_loop (loop, true, &loop_nest, &datarefs,
&dependences))
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Cannot analyze data dependencies\n");
- VEC_free (loop_p, heap, loop_nest);
+ loop_nest.release ();
free_data_refs (datarefs);
free_dependence_relations (dependences);
return false;
@@ -2449,7 +2445,7 @@ tree_predictive_commoning_loop (struct loop *loop)
dump_data_dependence_relations (dump_file, dependences);
components = split_data_refs_to_components (loop, datarefs, dependences);
- VEC_free (loop_p, heap, loop_nest);
+ loop_nest.release ();
free_dependence_relations (dependences);
if (!components)
{
@@ -2471,7 +2467,7 @@ tree_predictive_commoning_loop (struct loop *loop)
determine_roots (loop, components, &chains);
release_components (components);
- if (!chains)
+ if (!chains.exists ())
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file,
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index a92b6d0d253..755e3639ed3 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -546,13 +546,13 @@ dump_block_node (pretty_printer *buffer, tree block, int spc, int flags)
newline_and_indent (buffer, spc + 2);
}
- if (VEC_length (tree, BLOCK_NONLOCALIZED_VARS (block)) > 0)
+ if (vec_safe_length (BLOCK_NONLOCALIZED_VARS (block)) > 0)
{
unsigned i;
- VEC(tree,gc) *nlv = BLOCK_NONLOCALIZED_VARS (block);
+ vec<tree, va_gc> *nlv = BLOCK_NONLOCALIZED_VARS (block);
pp_string (buffer, "NONLOCALIZED_VARS: ");
- FOR_EACH_VEC_ELT (tree, nlv, i, t)
+ FOR_EACH_VEC_ELT (*nlv, i, t)
{
dump_generic_node (buffer, t, 0, flags, false);
pp_string (buffer, " ");
@@ -1359,7 +1359,7 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
dump_decl_name (buffer, val, flags);
else
dump_generic_node (buffer, val, spc, flags, false);
- if (ix != VEC_length (constructor_elt, CONSTRUCTOR_ELTS (node)) - 1)
+ if (ix != vec_safe_length (CONSTRUCTOR_ELTS (node)) - 1)
{
pp_character (buffer, ',');
pp_space (buffer);
diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c
index 325654bac1b..d19154265e3 100644
--- a/gcc/tree-scalar-evolution.c
+++ b/gcc/tree-scalar-evolution.c
@@ -873,7 +873,7 @@ get_loop_exit_condition (const struct loop *loop)
static void
get_exit_conditions_rec (struct loop *loop,
- VEC(gimple,heap) **exit_conditions)
+ vec<gimple> *exit_conditions)
{
if (!loop)
return;
@@ -887,7 +887,7 @@ get_exit_conditions_rec (struct loop *loop,
gimple loop_condition = get_loop_exit_condition (loop);
if (loop_condition)
- VEC_safe_push (gimple, heap, *exit_conditions, loop_condition);
+ exit_conditions->safe_push (loop_condition);
}
}
@@ -895,7 +895,7 @@ get_exit_conditions_rec (struct loop *loop,
initializes the EXIT_CONDITIONS array. */
static void
-select_loops_exit_conditions (VEC(gimple,heap) **exit_conditions)
+select_loops_exit_conditions (vec<gimple> *exit_conditions)
{
struct loop *function_body = current_loops->tree_root;
@@ -2866,14 +2866,14 @@ number_of_exit_cond_executions (struct loop *loop)
from the EXIT_CONDITIONS array. */
static void
-number_of_iterations_for_all_loops (VEC(gimple,heap) **exit_conditions)
+number_of_iterations_for_all_loops (vec<gimple> *exit_conditions)
{
unsigned int i;
unsigned nb_chrec_dont_know_loops = 0;
unsigned nb_static_loops = 0;
gimple cond;
- FOR_EACH_VEC_ELT (gimple, *exit_conditions, i, cond)
+ FOR_EACH_VEC_ELT (*exit_conditions, i, cond)
{
tree res = number_of_latch_executions (loop_containing_stmt (cond));
if (chrec_contains_undetermined (res))
@@ -3018,7 +3018,7 @@ gather_chrec_stats (tree chrec, struct chrec_stats *stats)
index. This allows the parallelization of the loop. */
static void
-analyze_scalar_evolution_for_all_loop_phi_nodes (VEC(gimple,heap) **exit_conditions)
+analyze_scalar_evolution_for_all_loop_phi_nodes (vec<gimple> *exit_conditions)
{
unsigned int i;
struct chrec_stats stats;
@@ -3027,7 +3027,7 @@ analyze_scalar_evolution_for_all_loop_phi_nodes (VEC(gimple,heap) **exit_conditi
reset_chrecs_counters (&stats);
- FOR_EACH_VEC_ELT (gimple, *exit_conditions, i, cond)
+ FOR_EACH_VEC_ELT (*exit_conditions, i, cond)
{
struct loop *loop;
basic_block bb;
@@ -3238,16 +3238,16 @@ simple_iv (struct loop *wrto_loop, struct loop *use_loop, tree op,
void
scev_analysis (void)
{
- VEC(gimple,heap) *exit_conditions;
+ vec<gimple> exit_conditions;
- exit_conditions = VEC_alloc (gimple, heap, 37);
+ exit_conditions.create (37);
select_loops_exit_conditions (&exit_conditions);
if (dump_file && (dump_flags & TDF_STATS))
analyze_scalar_evolution_for_all_loop_phi_nodes (&exit_conditions);
number_of_iterations_for_all_loops (&exit_conditions);
- VEC_free (gimple, heap, exit_conditions);
+ exit_conditions.release ();
}
/* Finalize the scalar evolution analysis. */
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index 8dd5cb497c2..b5606482629 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -250,8 +250,6 @@ struct access
typedef struct access *access_p;
-DEF_VEC_P (access_p);
-DEF_VEC_ALLOC_P (access_p, heap);
/* Alloc pool for allocating access structures. */
static alloc_pool access_pool;
@@ -268,7 +266,7 @@ struct assign_link
/* Alloc pool for allocating assign link structures. */
static alloc_pool link_pool;
-/* Base (tree) -> Vector (VEC(access_p,heap) *) map. */
+/* Base (tree) -> Vector (vec<access_p> *) map. */
static struct pointer_map_t *base_access_vec;
/* Set of candidates. */
@@ -468,7 +466,7 @@ access_has_replacements_p (struct access *acc)
/* Return a vector of pointers to accesses for the variable given in BASE or
NULL if there is none. */
-static VEC (access_p, heap) *
+static vec<access_p> *
get_base_access_vector (tree base)
{
void **slot;
@@ -477,7 +475,7 @@ get_base_access_vector (tree base)
if (!slot)
return NULL;
else
- return *(VEC (access_p, heap) **) slot;
+ return *(vec<access_p> **) slot;
}
/* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
@@ -504,13 +502,13 @@ find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
static struct access *
get_first_repr_for_decl (tree base)
{
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
access_vec = get_base_access_vector (base);
if (!access_vec)
return NULL;
- return VEC_index (access_p, access_vec, 0);
+ return (*access_vec)[0];
}
/* Find an access representative for the variable BASE and given OFFSET and
@@ -613,7 +611,7 @@ static void
sra_initialize (void)
{
candidate_bitmap = BITMAP_ALLOC (NULL);
- candidates = htab_create (VEC_length (tree, cfun->local_decls) / 2,
+ candidates = htab_create (vec_safe_length (cfun->local_decls) / 2,
uid_decl_map_hash, uid_decl_map_eq, NULL);
should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
@@ -633,10 +631,8 @@ static bool
delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
void *data ATTRIBUTE_UNUSED)
{
- VEC (access_p, heap) *access_vec;
- access_vec = (VEC (access_p, heap) *) *value;
- VEC_free (access_p, heap, access_vec);
-
+ vec<access_p> *access_vec = (vec<access_p> *) *value;
+ vec_free (access_vec);
return true;
}
@@ -800,7 +796,7 @@ mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
static struct access *
create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
{
- VEC (access_p, heap) *vec;
+ vec<access_p> *v;
struct access *access;
void **slot;
@@ -812,14 +808,14 @@ create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
slot = pointer_map_contains (base_access_vec, base);
if (slot)
- vec = (VEC (access_p, heap) *) *slot;
+ v = (vec<access_p> *) *slot;
else
- vec = VEC_alloc (access_p, heap, 32);
+ vec_alloc (v, 32);
- VEC_safe_push (access_p, heap, vec, access);
+ v->safe_push (access);
- *((struct VEC (access_p,heap) **)
- pointer_map_insert (base_access_vec, base)) = vec;
+ *((vec<access_p> **)
+ pointer_map_insert (base_access_vec, base)) = v;
return access;
}
@@ -1789,22 +1785,22 @@ sort_and_splice_var_accesses (tree var)
{
int i, j, access_count;
struct access *res, **prev_acc_ptr = &res;
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
bool first = true;
HOST_WIDE_INT low = -1, high = 0;
access_vec = get_base_access_vector (var);
if (!access_vec)
return NULL;
- access_count = VEC_length (access_p, access_vec);
+ access_count = access_vec->length ();
/* Sort by <OFFSET, SIZE>. */
- VEC_qsort (access_p, access_vec, compare_access_positions);
+ access_vec->qsort (compare_access_positions);
i = 0;
while (i < access_count)
{
- struct access *access = VEC_index (access_p, access_vec, i);
+ struct access *access = (*access_vec)[i];
bool grp_write = access->write;
bool grp_read = !access->write;
bool grp_scalar_write = access->write
@@ -1834,7 +1830,7 @@ sort_and_splice_var_accesses (tree var)
j = i + 1;
while (j < access_count)
{
- struct access *ac2 = VEC_index (access_p, access_vec, j);
+ struct access *ac2 = (*access_vec)[j];
if (ac2->offset != access->offset || ac2->size != access->size)
break;
if (ac2->write)
@@ -1889,7 +1885,7 @@ sort_and_splice_var_accesses (tree var)
prev_acc_ptr = &access->next_grp;
}
- gcc_assert (res == VEC_index (access_p, access_vec, 0));
+ gcc_assert (res == (*access_vec)[0]);
return res;
}
@@ -2951,8 +2947,7 @@ sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
}
loc = gimple_location (*stmt);
- if (VEC_length (constructor_elt,
- CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
+ if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
{
/* I have never seen this code path trigger but if it can happen the
following should handle it gracefully. */
@@ -3353,7 +3348,7 @@ initialize_parameter_reductions (void)
parm;
parm = DECL_CHAIN (parm))
{
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
struct access *access;
if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
@@ -3362,7 +3357,7 @@ initialize_parameter_reductions (void)
if (!access_vec)
continue;
- for (access = VEC_index (access_p, access_vec, 0);
+ for (access = (*access_vec)[0];
access;
access = access->next_grp)
generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
@@ -3668,7 +3663,7 @@ mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
current function. */
static void
-analyze_modified_params (VEC (access_p, heap) *representatives)
+analyze_modified_params (vec<access_p> representatives)
{
int i;
@@ -3676,7 +3671,7 @@ analyze_modified_params (VEC (access_p, heap) *representatives)
{
struct access *repr;
- for (repr = VEC_index (access_p, representatives, i);
+ for (repr = representatives[i];
repr;
repr = repr->next_grp)
{
@@ -3715,25 +3710,25 @@ analyze_modified_params (VEC (access_p, heap) *representatives)
static void
propagate_dereference_distances (void)
{
- VEC (basic_block, heap) *queue;
+ vec<basic_block> queue;
basic_block bb;
- queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
- VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
+ queue.create (last_basic_block_for_function (cfun));
+ queue.quick_push (ENTRY_BLOCK_PTR);
FOR_EACH_BB (bb)
{
- VEC_quick_push (basic_block, queue, bb);
+ queue.quick_push (bb);
bb->aux = bb;
}
- while (!VEC_empty (basic_block, queue))
+ while (!queue.is_empty ())
{
edge_iterator ei;
edge e;
bool change = false;
int i;
- bb = VEC_pop (basic_block, queue);
+ bb = queue.pop ();
bb->aux = NULL;
if (bitmap_bit_p (final_bbs, bb->index))
@@ -3775,11 +3770,11 @@ propagate_dereference_distances (void)
continue;
e->src->aux = e->src;
- VEC_quick_push (basic_block, queue, e->src);
+ queue.quick_push (e->src);
}
}
- VEC_free (basic_block, heap, queue);
+ queue.release ();
}
/* Dump a dereferences TABLE with heading STR to file F. */
@@ -3826,7 +3821,7 @@ dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
distances of each representative of a (fraction of a) parameter. */
static void
-analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
+analyze_caller_dereference_legality (vec<access_p> representatives)
{
int i;
@@ -3844,7 +3839,7 @@ analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
for (i = 0; i < func_param_count; i++)
{
- struct access *repr = VEC_index (access_p, representatives, i);
+ struct access *repr = representatives[i];
int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
if (!repr || no_accesses_p (repr))
@@ -3871,19 +3866,19 @@ unmodified_by_ref_scalar_representative (tree parm)
{
int i, access_count;
struct access *repr;
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
access_vec = get_base_access_vector (parm);
gcc_assert (access_vec);
- repr = VEC_index (access_p, access_vec, 0);
+ repr = (*access_vec)[0];
if (repr->write)
return NULL;
repr->group_representative = repr;
- access_count = VEC_length (access_p, access_vec);
+ access_count = access_vec->length ();
for (i = 1; i < access_count; i++)
{
- struct access *access = VEC_index (access_p, access_vec, i);
+ struct access *access = (*access_vec)[i];
if (access->write)
return NULL;
access->group_representative = repr;
@@ -3930,14 +3925,14 @@ splice_param_accesses (tree parm, bool *ro_grp)
int i, j, access_count, group_count;
int agg_size, total_size = 0;
struct access *access, *res, **prev_acc_ptr = &res;
- VEC (access_p, heap) *access_vec;
+ vec<access_p> *access_vec;
access_vec = get_base_access_vector (parm);
if (!access_vec)
return &no_accesses_representant;
- access_count = VEC_length (access_p, access_vec);
+ access_count = access_vec->length ();
- VEC_qsort (access_p, access_vec, compare_access_positions);
+ access_vec->qsort (compare_access_positions);
i = 0;
total_size = 0;
@@ -3946,7 +3941,7 @@ splice_param_accesses (tree parm, bool *ro_grp)
{
bool modification;
tree a1_alias_type;
- access = VEC_index (access_p, access_vec, i);
+ access = (*access_vec)[i];
modification = access->write;
if (access_precludes_ipa_sra_p (access))
return NULL;
@@ -3959,7 +3954,7 @@ splice_param_accesses (tree parm, bool *ro_grp)
j = i + 1;
while (j < access_count)
{
- struct access *ac2 = VEC_index (access_p, access_vec, j);
+ struct access *ac2 = (*access_vec)[j];
if (ac2->offset != access->offset)
{
/* All or nothing law for parameters. */
@@ -4097,13 +4092,13 @@ enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
IPA-SRA. Return result based on what representatives have been found. */
static enum ipa_splicing_result
-splice_all_param_accesses (VEC (access_p, heap) **representatives)
+splice_all_param_accesses (vec<access_p> &representatives)
{
enum ipa_splicing_result result = NO_GOOD_ACCESS;
tree parm;
struct access *repr;
- *representatives = VEC_alloc (access_p, heap, func_param_count);
+ representatives.create (func_param_count);
for (parm = DECL_ARGUMENTS (current_function_decl);
parm;
@@ -4111,8 +4106,7 @@ splice_all_param_accesses (VEC (access_p, heap) **representatives)
{
if (is_unused_scalar_param (parm))
{
- VEC_quick_push (access_p, *representatives,
- &no_accesses_representant);
+ representatives.quick_push (&no_accesses_representant);
if (result == NO_GOOD_ACCESS)
result = UNUSED_PARAMS;
}
@@ -4121,7 +4115,7 @@ splice_all_param_accesses (VEC (access_p, heap) **representatives)
&& bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
{
repr = unmodified_by_ref_scalar_representative (parm);
- VEC_quick_push (access_p, *representatives, repr);
+ representatives.quick_push (repr);
if (repr)
result = UNMODIF_BY_REF_ACCESSES;
}
@@ -4129,7 +4123,7 @@ splice_all_param_accesses (VEC (access_p, heap) **representatives)
{
bool ro_grp = false;
repr = splice_param_accesses (parm, &ro_grp);
- VEC_quick_push (access_p, *representatives, repr);
+ representatives.quick_push (repr);
if (repr && !no_accesses_p (repr))
{
@@ -4147,13 +4141,12 @@ splice_all_param_accesses (VEC (access_p, heap) **representatives)
result = UNUSED_PARAMS;
}
else
- VEC_quick_push (access_p, *representatives, NULL);
+ representatives.quick_push (NULL);
}
if (result == NO_GOOD_ACCESS)
{
- VEC_free (access_p, heap, *representatives);
- *representatives = NULL;
+ representatives.release ();
return NO_GOOD_ACCESS;
}
@@ -4163,13 +4156,13 @@ splice_all_param_accesses (VEC (access_p, heap) **representatives)
/* Return the index of BASE in PARMS. Abort if it is not found. */
static inline int
-get_param_index (tree base, VEC(tree, heap) *parms)
+get_param_index (tree base, vec<tree> parms)
{
int i, len;
- len = VEC_length (tree, parms);
+ len = parms.length ();
for (i = 0; i < len; i++)
- if (VEC_index (tree, parms, i) == base)
+ if (parms[i] == base)
return i;
gcc_unreachable ();
}
@@ -4180,21 +4173,21 @@ get_param_index (tree base, VEC(tree, heap) *parms)
final number of adjustments. */
static ipa_parm_adjustment_vec
-turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
+turn_representatives_into_adjustments (vec<access_p> representatives,
int adjustments_count)
{
- VEC (tree, heap) *parms;
+ vec<tree> parms;
ipa_parm_adjustment_vec adjustments;
tree parm;
int i;
gcc_assert (adjustments_count > 0);
parms = ipa_get_vector_of_formal_parms (current_function_decl);
- adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
+ adjustments.create (adjustments_count);
parm = DECL_ARGUMENTS (current_function_decl);
for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
{
- struct access *repr = VEC_index (access_p, representatives, i);
+ struct access *repr = representatives[i];
if (!repr || no_accesses_p (repr))
{
@@ -4207,7 +4200,7 @@ turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
adj.copy_param = 1;
else
adj.remove_param = 1;
- VEC_quick_push (ipa_parm_adjustment_t, adjustments, adj);
+ adjustments.quick_push (adj);
}
else
{
@@ -4226,11 +4219,11 @@ turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
&& (repr->grp_maybe_modified
|| repr->grp_not_necessarilly_dereferenced));
- VEC_quick_push (ipa_parm_adjustment_t, adjustments, adj);
+ adjustments.quick_push (adj);
}
}
}
- VEC_free (tree, heap, parms);
+ parms.release ();
return adjustments;
}
@@ -4243,12 +4236,12 @@ analyze_all_param_acesses (void)
enum ipa_splicing_result repr_state;
bool proceed = false;
int i, adjustments_count = 0;
- VEC (access_p, heap) *representatives;
+ vec<access_p> representatives;
ipa_parm_adjustment_vec adjustments;
- repr_state = splice_all_param_accesses (&representatives);
+ repr_state = splice_all_param_accesses (representatives);
if (repr_state == NO_GOOD_ACCESS)
- return NULL;
+ return ipa_parm_adjustment_vec();
/* If there are any parameters passed by reference which are not modified
directly, we need to check whether they can be modified indirectly. */
@@ -4260,7 +4253,7 @@ analyze_all_param_acesses (void)
for (i = 0; i < func_param_count; i++)
{
- struct access *repr = VEC_index (access_p, representatives, i);
+ struct access *repr = representatives[i];
if (repr && !no_accesses_p (repr))
{
@@ -4269,7 +4262,7 @@ analyze_all_param_acesses (void)
adjustments_count++;
if (repr->grp_not_necessarilly_dereferenced
|| repr->grp_maybe_modified)
- VEC_replace (access_p, representatives, i, NULL);
+ representatives[i] = NULL;
else
{
proceed = true;
@@ -4282,7 +4275,7 @@ analyze_all_param_acesses (void)
if (new_components == 0)
{
- VEC_replace (access_p, representatives, i, NULL);
+ representatives[i] = NULL;
adjustments_count++;
}
else
@@ -4312,9 +4305,9 @@ analyze_all_param_acesses (void)
adjustments = turn_representatives_into_adjustments (representatives,
adjustments_count);
else
- adjustments = NULL;
+ adjustments = ipa_parm_adjustment_vec();
- VEC_free (access_p, heap, representatives);
+ representatives.release ();
return adjustments;
}
@@ -4350,12 +4343,12 @@ get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
{
int i, len;
- len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ len = adjustments.length ();
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (!adj->copy_param && adj->base == base)
return adj;
}
@@ -4436,7 +4429,7 @@ sra_ipa_modify_expr (tree *expr, bool convert,
HOST_WIDE_INT offset, size, max_size;
tree base, src;
- len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ len = adjustments.length ();
if (TREE_CODE (*expr) == BIT_FIELD_REF
|| TREE_CODE (*expr) == IMAGPART_EXPR
@@ -4462,7 +4455,7 @@ sra_ipa_modify_expr (tree *expr, bool convert,
for (i = 0; i < len; i++)
{
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (adj->base == base &&
(adj->offset == offset || adj->remove_param))
@@ -4532,7 +4525,8 @@ sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
else
- *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
+ *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
+ NULL);
}
else
new_rhs = fold_build1_loc (gimple_location (stmt),
@@ -4660,7 +4654,7 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
gsip = &gsi;
}
- len = VEC_length (ipa_parm_adjustment_t, adjustments);
+ len = adjustments.length ();
for (i = 0; i < len; i++)
{
struct ipa_parm_adjustment *adj;
@@ -4669,7 +4663,7 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
tree name, vexpr, copy = NULL_TREE;
use_operand_p use_p;
- adj = &VEC_index (ipa_parm_adjustment_t, adjustments, i);
+ adj = &adjustments[i];
if (adj->copy_param || !is_gimple_reg (adj->base))
continue;
name = ssa_default_def (cfun, adj->base);
@@ -4758,7 +4752,7 @@ static bool
convert_callers_for_node (struct cgraph_node *node,
void *data)
{
- ipa_parm_adjustment_vec adjustments = (ipa_parm_adjustment_vec)data;
+ ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
bitmap recomputed_callers = BITMAP_ALLOC (NULL);
struct cgraph_edge *cs;
@@ -4772,7 +4766,7 @@ convert_callers_for_node (struct cgraph_node *node,
xstrdup (cgraph_node_name (cs->caller)),
xstrdup (cgraph_node_name (cs->callee)));
- ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
+ ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
pop_cfun ();
}
@@ -4795,7 +4789,7 @@ convert_callers (struct cgraph_node *node, tree old_decl,
basic_block this_block;
cgraph_for_node_and_aliases (node, convert_callers_for_node,
- adjustments, false);
+ &adjustments, false);
if (!encountered_recursive_call)
return;
@@ -4832,15 +4826,16 @@ modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
{
struct cgraph_node *new_node;
bool cfg_changed;
- VEC (cgraph_edge_p, heap) * redirect_callers = collect_callers_of_node (node);
+ vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
rebuild_cgraph_edges ();
free_dominance_info (CDI_DOMINATORS);
pop_cfun ();
- new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL,
- false, NULL, NULL, "isra");
- VEC_free (cgraph_edge_p, heap, redirect_callers);
+ new_node = cgraph_function_versioning (node, redirect_callers,
+ NULL,
+ NULL, false, NULL, NULL, "isra");
+ redirect_callers.release ();
push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
@@ -4968,7 +4963,7 @@ ipa_early_sra (void)
}
adjustments = analyze_all_param_acesses ();
- if (!adjustments)
+ if (!adjustments.exists ())
goto out;
if (dump_file)
ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
@@ -4977,7 +4972,7 @@ ipa_early_sra (void)
ret = TODO_update_ssa | TODO_cleanup_cfg;
else
ret = TODO_update_ssa;
- VEC_free (ipa_parm_adjustment_t, heap, adjustments);
+ adjustments.release ();
statistics_counter_event (cfun, "Unused parameters deleted",
sra_stats.deleted_unused_parameters);
diff --git a/gcc/tree-ssa-address.c b/gcc/tree-ssa-address.c
index c5b8ff1056b..3b1e068515a 100644
--- a/gcc/tree-ssa-address.c
+++ b/gcc/tree-ssa-address.c
@@ -79,14 +79,12 @@ typedef struct GTY (()) mem_addr_template {
be filled in. */
} mem_addr_template;
-DEF_VEC_O (mem_addr_template);
-DEF_VEC_ALLOC_O (mem_addr_template, gc);
/* The templates. Each of the low five bits of the index corresponds to one
component of TARGET_MEM_REF being present, while the high bits identify
the address space. See TEMPL_IDX. */
-static GTY(()) VEC (mem_addr_template, gc) *mem_addr_template_list;
+static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
#define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
(((int) (AS) << 5) \
@@ -209,14 +207,11 @@ addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
unsigned int templ_index
= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
- if (templ_index
- >= VEC_length (mem_addr_template, mem_addr_template_list))
- VEC_safe_grow_cleared (mem_addr_template, gc, mem_addr_template_list,
- templ_index + 1);
+ if (templ_index >= vec_safe_length (mem_addr_template_list))
+ vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
/* Reuse the templates for addresses, so that we do not waste memory. */
- templ = &VEC_index (mem_addr_template, mem_addr_template_list,
- templ_index);
+ templ = &(*mem_addr_template_list)[templ_index];
if (!templ->ref)
{
sym = (addr->symbol ?
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index b045da27eec..0b9b090a37c 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -40,7 +40,6 @@ along with GCC; see the file COPYING3. If not see
#include "params.h"
#include "vec.h"
#include "bitmap.h"
-#include "vecprim.h"
#include "pointer-set.h"
#include "alloc-pool.h"
#include "tree-ssa-alias.h"
diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c
index 7f0c407caa8..4e565448d5b 100644
--- a/gcc/tree-ssa-ccp.c
+++ b/gcc/tree-ssa-ccp.c
@@ -1686,7 +1686,8 @@ insert_clobber_before_stack_restore (tree saved_val, tree var,
FOR_EACH_IMM_USE_STMT (stmt, iter, saved_val)
if (gimple_call_builtin_p (stmt, BUILT_IN_STACK_RESTORE))
{
- clobber = build_constructor (TREE_TYPE (var), NULL);
+ clobber = build_constructor (TREE_TYPE (var),
+ NULL);
TREE_THIS_VOLATILE (clobber) = 1;
clobber_stmt = gimple_build_assign (var, clobber);
diff --git a/gcc/tree-ssa-coalesce.c b/gcc/tree-ssa-coalesce.c
index f0d66ccb5f1..35774a7b5b2 100644
--- a/gcc/tree-ssa-coalesce.c
+++ b/gcc/tree-ssa-coalesce.c
@@ -505,7 +505,7 @@ dump_coalesce_list (FILE *f, coalesce_list_p cl)
typedef struct ssa_conflicts_d
{
bitmap_obstack obstack; /* A place to allocate our bitmaps. */
- VEC(bitmap, heap)* conflicts;
+ vec<bitmap> conflicts;
} * ssa_conflicts_p;
/* Return an empty new conflict graph for SIZE elements. */
@@ -517,8 +517,8 @@ ssa_conflicts_new (unsigned size)
ptr = XNEW (struct ssa_conflicts_d);
bitmap_obstack_initialize (&ptr->obstack);
- ptr->conflicts = VEC_alloc (bitmap, heap, size);
- VEC_safe_grow_cleared (bitmap, heap, ptr->conflicts, size);
+ ptr->conflicts.create (size);
+ ptr->conflicts.safe_grow_cleared (size);
return ptr;
}
@@ -529,7 +529,7 @@ static inline void
ssa_conflicts_delete (ssa_conflicts_p ptr)
{
bitmap_obstack_release (&ptr->obstack);
- VEC_free (bitmap, heap, ptr->conflicts);
+ ptr->conflicts.release ();
free (ptr);
}
@@ -539,8 +539,8 @@ ssa_conflicts_delete (ssa_conflicts_p ptr)
static inline bool
ssa_conflicts_test_p (ssa_conflicts_p ptr, unsigned x, unsigned y)
{
- bitmap bx = VEC_index (bitmap, ptr->conflicts, x);
- bitmap by = VEC_index (bitmap, ptr->conflicts, y);
+ bitmap bx = ptr->conflicts[x];
+ bitmap by = ptr->conflicts[y];
gcc_checking_assert (x != y);
@@ -557,10 +557,10 @@ ssa_conflicts_test_p (ssa_conflicts_p ptr, unsigned x, unsigned y)
static inline void
ssa_conflicts_add_one (ssa_conflicts_p ptr, unsigned x, unsigned y)
{
- bitmap bx = VEC_index (bitmap, ptr->conflicts, x);
+ bitmap bx = ptr->conflicts[x];
/* If there are no conflicts yet, allocate the bitmap and set bit. */
if (! bx)
- bx = VEC_index (bitmap, ptr->conflicts, x) = BITMAP_ALLOC (&ptr->obstack);
+ bx = ptr->conflicts[x] = BITMAP_ALLOC (&ptr->obstack);
bitmap_set_bit (bx, y);
}
@@ -583,8 +583,8 @@ ssa_conflicts_merge (ssa_conflicts_p ptr, unsigned x, unsigned y)
{
unsigned z;
bitmap_iterator bi;
- bitmap bx = VEC_index (bitmap, ptr->conflicts, x);
- bitmap by = VEC_index (bitmap, ptr->conflicts, y);
+ bitmap bx = ptr->conflicts[x];
+ bitmap by = ptr->conflicts[y];
gcc_checking_assert (x != y);
if (! by)
@@ -595,7 +595,7 @@ ssa_conflicts_merge (ssa_conflicts_p ptr, unsigned x, unsigned y)
conflict. */
EXECUTE_IF_SET_IN_BITMAP (by, 0, z, bi)
{
- bitmap bz = VEC_index (bitmap, ptr->conflicts, z);
+ bitmap bz = ptr->conflicts[z];
if (bz)
bitmap_set_bit (bz, x);
}
@@ -605,13 +605,13 @@ ssa_conflicts_merge (ssa_conflicts_p ptr, unsigned x, unsigned y)
/* If X has conflicts, add Y's to X. */
bitmap_ior_into (bx, by);
BITMAP_FREE (by);
- VEC_replace (bitmap, ptr->conflicts, y, NULL);
+ ptr->conflicts[y] = NULL;
}
else
{
/* If X has no conflicts, simply use Y's. */
- VEC_replace (bitmap, ptr->conflicts, x, by);
- VEC_replace (bitmap, ptr->conflicts, y, NULL);
+ ptr->conflicts[x] = by;
+ ptr->conflicts[y] = NULL;
}
}
@@ -626,7 +626,7 @@ ssa_conflicts_dump (FILE *file, ssa_conflicts_p ptr)
fprintf (file, "\nConflict graph:\n");
- FOR_EACH_VEC_ELT (bitmap, ptr->conflicts, x, b)
+ FOR_EACH_VEC_ELT (ptr->conflicts, x, b)
if (b)
{
fprintf (file, "%d: ", x);
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index af4c387c23e..2cfe9539568 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -68,7 +68,7 @@ static struct stmt_stats
#define STMT_NECESSARY GF_PLF_1
-static VEC(gimple,heap) *worklist;
+static vec<gimple> worklist;
/* Vector indicating an SSA name has already been processed and marked
as necessary. */
@@ -212,7 +212,7 @@ mark_stmt_necessary (gimple stmt, bool add_to_worklist)
gimple_set_plf (stmt, STMT_NECESSARY, true);
if (add_to_worklist)
- VEC_safe_push (gimple, heap, worklist, stmt);
+ worklist.safe_push (stmt);
if (bb_contains_live_stmts && !is_gimple_debug (stmt))
bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
}
@@ -255,7 +255,7 @@ mark_operand_necessary (tree op)
gimple_set_plf (stmt, STMT_NECESSARY, true);
if (bb_contains_live_stmts)
bitmap_set_bit (bb_contains_live_stmts, gimple_bb (stmt)->index);
- VEC_safe_push (gimple, heap, worklist, stmt);
+ worklist.safe_push (stmt);
}
@@ -694,10 +694,10 @@ propagate_necessity (struct edge_list *el)
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\nProcessing worklist:\n");
- while (VEC_length (gimple, worklist) > 0)
+ while (worklist.length () > 0)
{
/* Take STMT from worklist. */
- stmt = VEC_pop (gimple, worklist);
+ stmt = worklist.pop ();
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1256,7 +1256,7 @@ eliminate_unnecessary_stmts (void)
gimple_stmt_iterator gsi, psi;
gimple stmt;
tree call;
- VEC (basic_block, heap) *h;
+ vec<basic_block> h;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "\nEliminating unnecessary statements:\n");
@@ -1288,9 +1288,9 @@ eliminate_unnecessary_stmts (void)
gcc_assert (dom_info_available_p (CDI_DOMINATORS));
h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
- while (VEC_length (basic_block, h))
+ while (h.length ())
{
- bb = VEC_pop (basic_block, h);
+ bb = h.pop ();
/* Remove dead statements. */
for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
@@ -1371,7 +1371,7 @@ eliminate_unnecessary_stmts (void)
}
}
- VEC_free (basic_block, heap, h);
+ h.release ();
/* Since we don't track liveness of virtual PHI nodes, it is possible that we
rendered some PHI nodes unreachable while they are still in use.
@@ -1424,9 +1424,9 @@ eliminate_unnecessary_stmts (void)
{
h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
- while (VEC_length (basic_block, h))
+ while (h.length ())
{
- bb = VEC_pop (basic_block, h);
+ bb = h.pop ();
prev_bb = bb->prev_bb;
/* Rearrangements to the CFG may have failed
to update the dominators tree, so that
@@ -1437,7 +1437,7 @@ eliminate_unnecessary_stmts (void)
delete_basic_block (bb);
}
- VEC_free (basic_block, heap, h);
+ h.release ();
}
}
}
@@ -1497,7 +1497,7 @@ tree_dce_init (bool aggressive)
processed = sbitmap_alloc (num_ssa_names + 1);
bitmap_clear (processed);
- worklist = VEC_alloc (gimple, heap, 64);
+ worklist.create (64);
cfg_altered = false;
}
@@ -1522,7 +1522,7 @@ tree_dce_done (bool aggressive)
sbitmap_free (processed);
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
}
/* Main routine to eliminate dead code.
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index 7d015b7837c..0c2158c6e5e 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -75,8 +75,6 @@ typedef struct cond_equivalence_s
tree value;
} cond_equivalence;
-DEF_VEC_O(cond_equivalence);
-DEF_VEC_ALLOC_O(cond_equivalence,heap);
/* Structure for recording edge equivalences as well as any pending
edge redirections during the dominator optimizer.
@@ -101,7 +99,7 @@ struct edge_info
/* Traversing an edge may also indicate one or more particular conditions
are true or false. */
- VEC(cond_equivalence, heap) *cond_equivalences;
+ vec<cond_equivalence> cond_equivalences;
};
/* Hash table with expressions made available during the renaming process.
@@ -119,10 +117,8 @@ static htab_t avail_exprs;
remove the expressions from the global hash table until we hit the
marker. */
typedef struct expr_hash_elt * expr_hash_elt_t;
-DEF_VEC_P(expr_hash_elt_t);
-DEF_VEC_ALLOC_P(expr_hash_elt_t,heap);
-static VEC(expr_hash_elt_t,heap) *avail_exprs_stack;
+static vec<expr_hash_elt_t> avail_exprs_stack;
/* Structure for entries in the expression hash table. */
@@ -149,7 +145,7 @@ struct expr_hash_elt
A NULL entry is used to mark the end of pairs which need to be
restored during finalization of this block. */
-static VEC(tree,heap) *const_and_copies_stack;
+static vec<tree> const_and_copies_stack;
/* Track whether or not we have changed the control flow graph. */
static bool cfg_altered;
@@ -705,8 +701,7 @@ free_all_edge_infos (void)
if (edge_info)
{
- if (edge_info->cond_equivalences)
- VEC_free (cond_equivalence, heap, edge_info->cond_equivalences);
+ edge_info->cond_equivalences.release ();
free (edge_info);
e->aux = NULL;
}
@@ -729,8 +724,8 @@ tree_ssa_dominator_optimize (void)
/* Create our hash tables. */
avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free_expr_hash_elt);
- avail_exprs_stack = VEC_alloc (expr_hash_elt_t, heap, 20);
- const_and_copies_stack = VEC_alloc (tree, heap, 20);
+ avail_exprs_stack.create (20);
+ const_and_copies_stack.create (20);
need_eh_cleanup = BITMAP_ALLOC (NULL);
/* Setup callbacks for the generic dominator tree walker. */
@@ -844,12 +839,12 @@ tree_ssa_dominator_optimize (void)
/* Free asserted bitmaps and stacks. */
BITMAP_FREE (need_eh_cleanup);
- VEC_free (expr_hash_elt_t, heap, avail_exprs_stack);
- VEC_free (tree, heap, const_and_copies_stack);
+ avail_exprs_stack.release ();
+ const_and_copies_stack.release ();
/* Free the value-handle array. */
threadedge_finalize_values ();
- ssa_name_values = NULL;
+ ssa_name_values.release ();
return 0;
}
@@ -938,9 +933,9 @@ static void
remove_local_expressions_from_table (void)
{
/* Remove all the expressions made available in this block. */
- while (VEC_length (expr_hash_elt_t, avail_exprs_stack) > 0)
+ while (avail_exprs_stack.length () > 0)
{
- expr_hash_elt_t victim = VEC_pop (expr_hash_elt_t, avail_exprs_stack);
+ expr_hash_elt_t victim = avail_exprs_stack.pop ();
void **slot;
if (victim == NULL)
@@ -969,11 +964,11 @@ remove_local_expressions_from_table (void)
static void
restore_vars_to_original_value (void)
{
- while (VEC_length (tree, const_and_copies_stack) > 0)
+ while (const_and_copies_stack.length () > 0)
{
tree prev_value, dest;
- dest = VEC_pop (tree, const_and_copies_stack);
+ dest = const_and_copies_stack.pop ();
if (dest == NULL)
break;
@@ -987,7 +982,7 @@ restore_vars_to_original_value (void)
fprintf (dump_file, "\n");
}
- prev_value = VEC_pop (tree, const_and_copies_stack);
+ prev_value = const_and_copies_stack.pop ();
set_ssa_name_value (dest, prev_value);
}
}
@@ -1141,8 +1136,7 @@ record_equivalences_from_incoming_edge (basic_block bb)
if (lhs)
record_equality (lhs, rhs);
- for (i = 0; VEC_iterate (cond_equivalence,
- edge_info->cond_equivalences, i, eq); ++i)
+ for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
record_cond (eq);
}
}
@@ -1210,7 +1204,7 @@ record_cond (cond_equivalence *p)
print_expr_hash_elt (dump_file, element);
}
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element);
+ avail_exprs_stack.safe_push (element);
}
else
free_expr_hash_elt (element);
@@ -1222,7 +1216,7 @@ record_cond (cond_equivalence *p)
static void
build_and_record_new_cond (enum tree_code code,
tree op0, tree op1,
- VEC(cond_equivalence, heap) **p)
+ vec<cond_equivalence> *p)
{
cond_equivalence c;
struct hashable_expr *cond = &c.cond;
@@ -1236,7 +1230,7 @@ build_and_record_new_cond (enum tree_code code,
cond->ops.binary.opnd1 = op1;
c.value = boolean_true_node;
- VEC_safe_push (cond_equivalence, heap, *p, c);
+ p->safe_push (c);
}
/* Record that COND is true and INVERTED is false into the edge information
@@ -1343,7 +1337,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
two slots. */
initialize_expr_from_cond (cond, &c.cond);
c.value = boolean_true_node;
- VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, c);
+ edge_info->cond_equivalences.safe_push (c);
/* It is possible for INVERTED to be the negation of a comparison,
and not a valid RHS or GIMPLE_COND condition. This happens because
@@ -1352,7 +1346,7 @@ record_conditions (struct edge_info *edge_info, tree cond, tree inverted)
obey the trichotomy law. */
initialize_expr_from_cond (inverted, &c.cond);
c.value = boolean_false_node;
- VEC_safe_push (cond_equivalence, heap, edge_info->cond_equivalences, c);
+ edge_info->cond_equivalences.safe_push (c);
}
/* A helper function for record_const_or_copy and record_equality.
@@ -1372,9 +1366,9 @@ record_const_or_copy_1 (tree x, tree y, tree prev_x)
fprintf (dump_file, "\n");
}
- VEC_reserve (tree, heap, const_and_copies_stack, 2);
- VEC_quick_push (tree, const_and_copies_stack, prev_x);
- VEC_quick_push (tree, const_and_copies_stack, x);
+ const_and_copies_stack.reserve (2);
+ const_and_copies_stack.quick_push (prev_x);
+ const_and_copies_stack.quick_push (x);
}
/* Return the loop depth of the basic block of the defining statement of X.
@@ -1744,9 +1738,8 @@ dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Push a marker on the stacks of local information so that we know how
far to unwind when we finalize this block. */
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack,
- (expr_hash_elt_t)NULL);
- VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
+ avail_exprs_stack.safe_push (NULL);
+ const_and_copies_stack.safe_push (NULL_TREE);
record_equivalences_from_incoming_edge (bb);
@@ -1756,8 +1749,7 @@ dom_opt_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Create equivalences from redundant PHIs. PHIs are only truly
redundant when they exist in the same block, so push another
marker and unwind right afterwards. */
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack,
- (expr_hash_elt_t)NULL);
+ avail_exprs_stack.safe_push (NULL);
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
eliminate_redundant_computations (&gsi);
remove_local_expressions_from_table ();
@@ -1789,7 +1781,7 @@ dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
{
/* Push a marker on the stack, which thread_across_edge expects
and will remove. */
- VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
+ const_and_copies_stack.safe_push (NULL_TREE);
dom_thread_across_edge (walk_data, single_succ_edge (bb));
}
else if ((last = last_stmt (bb))
@@ -1812,9 +1804,8 @@ dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
/* Push a marker onto the available expression stack so that we
unwind any expressions related to the TRUE arm before processing
the false arm below. */
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack,
- (expr_hash_elt_t)NULL);
- VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
+ avail_exprs_stack.safe_push (NULL);
+ const_and_copies_stack.safe_push (NULL_TREE);
edge_info = (struct edge_info *) true_edge->aux;
@@ -1832,8 +1823,7 @@ dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
/* If we have 0 = COND or 1 = COND equivalences, record them
into our expression hash tables. */
- for (i = 0; VEC_iterate (cond_equivalence,
- edge_info->cond_equivalences, i, eq); ++i)
+ for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
record_cond (eq);
}
@@ -1850,7 +1840,7 @@ dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
struct edge_info *edge_info;
unsigned int i;
- VEC_safe_push (tree, heap, const_and_copies_stack, NULL_TREE);
+ const_and_copies_stack.safe_push (NULL_TREE);
edge_info = (struct edge_info *) false_edge->aux;
/* If we have info associated with this edge, record it into
@@ -1867,8 +1857,7 @@ dom_opt_leave_block (struct dom_walk_data *walk_data, basic_block bb)
/* If we have 0 = COND or 1 = COND equivalences, record them
into our expression hash tables. */
- for (i = 0; VEC_iterate (cond_equivalence,
- edge_info->cond_equivalences, i, eq); ++i)
+ for (i = 0; edge_info->cond_equivalences.iterate (i, &eq); ++i)
record_cond (eq);
}
@@ -2431,7 +2420,7 @@ lookup_avail_expr (gimple stmt, bool insert)
print_expr_hash_elt (dump_file, element2);
}
- VEC_safe_push (expr_hash_elt_t, heap, avail_exprs_stack, element2);
+ avail_exprs_stack.safe_push (element2);
return NULL_TREE;
}
else
diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c
index 246fc842ebe..5c612d075db 100644
--- a/gcc/tree-ssa-forwprop.c
+++ b/gcc/tree-ssa-forwprop.c
@@ -1189,13 +1189,14 @@ static void
simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
{
unsigned int branch_num = gimple_switch_num_labels (stmt);
- VEC(tree, heap) *labels = VEC_alloc (tree, heap, branch_num);
+ vec<tree> labels;
+ labels.create (branch_num);
unsigned int i, len;
/* Collect the existing case labels in a VEC, and preprocess it as if
we are gimplifying a GENERIC SWITCH_EXPR. */
for (i = 1; i < branch_num; i++)
- VEC_quick_push (tree, labels, gimple_switch_label (stmt, i));
+ labels.quick_push (gimple_switch_label (stmt, i));
preprocess_case_label_vec_for_gimple (labels, index_type, NULL);
/* If any labels were removed, replace the existing case labels
@@ -1203,7 +1204,7 @@ simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
Note that the type updates were done in-place on the case labels,
so we only have to replace the case labels in the GIMPLE_SWITCH
if the number of labels changed. */
- len = VEC_length (tree, labels);
+ len = labels.length ();
if (len < branch_num - 1)
{
bitmap target_blocks;
@@ -1219,12 +1220,12 @@ simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
label = CASE_LABEL (gimple_switch_default_label (stmt));
elt = build_case_label (build_int_cst (index_type, 0), NULL, label);
- VEC_quick_push (tree, labels, elt);
+ labels.quick_push (elt);
len = 1;
}
- for (i = 0; i < VEC_length (tree, labels); i++)
- gimple_switch_set_label (stmt, i + 1, VEC_index (tree, labels, i));
+ for (i = 0; i < labels.length (); i++)
+ gimple_switch_set_label (stmt, i + 1, labels[i]);
for (i++ ; i < branch_num; i++)
gimple_switch_set_label (stmt, i, NULL_TREE);
gimple_switch_set_num_labels (stmt, len + 1);
@@ -1251,7 +1252,7 @@ simplify_gimple_switch_label_vec (gimple stmt, tree index_type)
BITMAP_FREE (target_blocks);
}
- VEC_free (tree, heap, labels);
+ labels.release ();
}
/* STMT is a SWITCH_EXPR for which we attempt to find equivalent forms of
@@ -2817,7 +2818,7 @@ simplify_vector_constructor (gimple_stmt_iterator *gsi)
sel = XALLOCAVEC (unsigned char, nelts);
orig = NULL;
maybe_ident = true;
- FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (op), i, elt)
+ FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (op), i, elt)
{
tree ref, op1;
diff --git a/gcc/tree-ssa-live.c b/gcc/tree-ssa-live.c
index 157f2db221f..878caeb823b 100644
--- a/gcc/tree-ssa-live.c
+++ b/gcc/tree-ssa-live.c
@@ -859,10 +859,10 @@ remove_unused_locals (void)
cfun->has_local_explicit_reg_vars = false;
/* Remove unmarked local and global vars from local_decls. */
- num = VEC_length (tree, cfun->local_decls);
+ num = vec_safe_length (cfun->local_decls);
for (srcidx = 0, dstidx = 0; srcidx < num; srcidx++)
{
- var = VEC_index (tree, cfun->local_decls, srcidx);
+ var = (*cfun->local_decls)[srcidx];
if (TREE_CODE (var) == VAR_DECL)
{
if (!is_used_p (var))
@@ -886,11 +886,11 @@ remove_unused_locals (void)
cfun->has_local_explicit_reg_vars = true;
if (srcidx != dstidx)
- VEC_replace (tree, cfun->local_decls, dstidx, var);
+ (*cfun->local_decls)[dstidx] = var;
dstidx++;
}
if (dstidx != num)
- VEC_truncate (tree, cfun->local_decls, dstidx);
+ cfun->local_decls->truncate (dstidx);
remove_unused_scope_block_p (DECL_INITIAL (current_function_decl));
clear_unused_block_pointer ();
diff --git a/gcc/tree-ssa-live.h b/gcc/tree-ssa-live.h
index 3d39cb6d443..f1ee1f421f3 100644
--- a/gcc/tree-ssa-live.h
+++ b/gcc/tree-ssa-live.h
@@ -24,9 +24,6 @@ along with GCC; see the file COPYING3. If not see
#define _TREE_SSA_LIVE_H 1
#include "partition.h"
-#include "vecprim.h"
-
-
/* Used to create the variable mapping when we go out of SSA form.
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index 4274457d4dc..7f4d045d275 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -105,18 +105,14 @@ typedef struct mem_ref_loc
gimple stmt; /* The statement in that it occurs. */
} *mem_ref_loc_p;
-DEF_VEC_P(mem_ref_loc_p);
-DEF_VEC_ALLOC_P(mem_ref_loc_p, heap);
/* The list of memory reference locations in a loop. */
typedef struct mem_ref_locs
{
- VEC (mem_ref_loc_p, heap) *locs;
+ vec<mem_ref_loc_p> locs;
} *mem_ref_locs_p;
-DEF_VEC_P(mem_ref_locs_p);
-DEF_VEC_ALLOC_P(mem_ref_locs_p, heap);
/* Description of a memory reference. */
@@ -128,7 +124,7 @@ typedef struct mem_ref
hashval_t hash; /* Its hash value. */
bitmap stored; /* The set of loops in that this memory location
is stored to. */
- VEC (mem_ref_locs_p, heap) *accesses_in_loop;
+ vec<mem_ref_locs_p> accesses_in_loop;
/* The locations of the accesses. Vector
indexed by the loop number. */
@@ -149,14 +145,8 @@ typedef struct mem_ref
bitmap dep_ref; /* The complement of INDEP_REF. */
} *mem_ref_p;
-DEF_VEC_P(mem_ref_p);
-DEF_VEC_ALLOC_P(mem_ref_p, heap);
-DEF_VEC_P(bitmap);
-DEF_VEC_ALLOC_P(bitmap, heap);
-DEF_VEC_P(htab_t);
-DEF_VEC_ALLOC_P(htab_t, heap);
/* Description of memory accesses in loops. */
@@ -166,18 +156,18 @@ static struct
htab_t refs;
/* The list of memory references. */
- VEC (mem_ref_p, heap) *refs_list;
+ vec<mem_ref_p> refs_list;
/* The set of memory references accessed in each loop. */
- VEC (bitmap, heap) *refs_in_loop;
+ vec<bitmap> refs_in_loop;
/* The set of memory references accessed in each loop, including
subloops. */
- VEC (bitmap, heap) *all_refs_in_loop;
+ vec<bitmap> all_refs_in_loop;
/* The set of memory references stored in each loop, including
subloops. */
- VEC (bitmap, heap) *all_refs_stored_in_loop;
+ vec<bitmap> all_refs_stored_in_loop;
/* Cache for expanding memory addresses. */
struct pointer_map_t *ttae_cache;
@@ -1473,9 +1463,9 @@ free_mem_ref_locs (mem_ref_locs_p accs)
if (!accs)
return;
- FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
+ FOR_EACH_VEC_ELT (accs->locs, i, loc)
free (loc);
- VEC_free (mem_ref_loc_p, heap, accs->locs);
+ accs->locs.release ();
free (accs);
}
@@ -1487,9 +1477,9 @@ memref_free (struct mem_ref *mem)
unsigned i;
mem_ref_locs_p accs;
- FOR_EACH_VEC_ELT (mem_ref_locs_p, mem->accesses_in_loop, i, accs)
+ FOR_EACH_VEC_ELT (mem->accesses_in_loop, i, accs)
free_mem_ref_locs (accs);
- VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop);
+ mem->accesses_in_loop.release ();
free (mem);
}
@@ -1509,7 +1499,7 @@ mem_ref_alloc (tree mem, unsigned hash, unsigned id)
ref->dep_loop = BITMAP_ALLOC (&lim_bitmap_obstack);
ref->indep_ref = BITMAP_ALLOC (&lim_bitmap_obstack);
ref->dep_ref = BITMAP_ALLOC (&lim_bitmap_obstack);
- ref->accesses_in_loop = NULL;
+ ref->accesses_in_loop.create (0);
return ref;
}
@@ -1520,7 +1510,7 @@ static mem_ref_locs_p
mem_ref_locs_alloc (void)
{
mem_ref_locs_p accs = XNEW (struct mem_ref_locs);
- accs->locs = NULL;
+ accs->locs.create (0);
return accs;
}
@@ -1532,23 +1522,22 @@ record_mem_ref_loc (mem_ref_p ref, struct loop *loop, gimple stmt, tree *loc)
{
mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
mem_ref_locs_p accs;
- bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
+ bitmap ril = memory_accesses.refs_in_loop[loop->num];
- if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
+ if (ref->accesses_in_loop.length ()
<= (unsigned) loop->num)
- VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop,
- loop->num + 1);
- accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
+ ref->accesses_in_loop.safe_grow_cleared (loop->num + 1);
+ accs = ref->accesses_in_loop[loop->num];
if (!accs)
{
accs = mem_ref_locs_alloc ();
- VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs);
+ ref->accesses_in_loop[loop->num] = accs;
}
aref->stmt = stmt;
aref->ref = loc;
- VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref);
+ accs->locs.safe_push (aref);
bitmap_set_bit (ril, ref->id);
}
@@ -1585,9 +1574,9 @@ gather_mem_refs_stmt (struct loop *loop, gimple stmt)
mem = simple_mem_ref_in_stmt (stmt, &is_stored);
if (!mem)
{
- id = VEC_length (mem_ref_p, memory_accesses.refs_list);
+ id = memory_accesses.refs_list.length ();
ref = mem_ref_alloc (error_mark_node, 0, id);
- VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
+ memory_accesses.refs_list.safe_push (ref);
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Unanalyzed memory reference %u: ", id);
@@ -1609,9 +1598,9 @@ gather_mem_refs_stmt (struct loop *loop, gimple stmt)
}
else
{
- id = VEC_length (mem_ref_p, memory_accesses.refs_list);
+ id = memory_accesses.refs_list.length ();
ref = mem_ref_alloc (*mem, hash, id);
- VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
+ memory_accesses.refs_list.safe_push (ref);
*slot = ref;
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -1654,15 +1643,14 @@ gather_mem_refs_in_loops (void)
the loop hierarchy. */
FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
{
- lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
- alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
+ lrefs = memory_accesses.refs_in_loop[loop->num];
+ alrefs = memory_accesses.all_refs_in_loop[loop->num];
bitmap_ior_into (alrefs, lrefs);
if (loop_outer (loop) == current_loops->tree_root)
continue;
- alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
- loop_outer (loop)->num);
+ alrefso = memory_accesses.all_refs_in_loop[loop_outer (loop)->num];
bitmap_ior_into (alrefso, alrefs);
}
}
@@ -1673,7 +1661,7 @@ gather_mem_refs_in_loops (void)
static void
create_vop_ref_mapping_loop (struct loop *loop)
{
- bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
+ bitmap refs = memory_accesses.refs_in_loop[loop->num];
struct loop *sloop;
bitmap_iterator bi;
unsigned i;
@@ -1681,14 +1669,13 @@ create_vop_ref_mapping_loop (struct loop *loop)
EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
{
- ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
+ ref = memory_accesses.refs_list[i];
for (sloop = loop; sloop != current_loops->tree_root;
sloop = loop_outer (sloop))
if (bitmap_bit_p (ref->stored, loop->num))
{
bitmap refs_stored
- = VEC_index (bitmap, memory_accesses.all_refs_stored_in_loop,
- sloop->num);
+ = memory_accesses.all_refs_stored_in_loop[sloop->num];
bitmap_set_bit (refs_stored, ref->id);
}
}
@@ -1718,22 +1705,19 @@ analyze_memory_references (void)
bitmap empty;
memory_accesses.refs = htab_create (100, memref_hash, memref_eq, NULL);
- memory_accesses.refs_list = NULL;
- memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
- number_of_loops ());
- memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
- number_of_loops ());
- memory_accesses.all_refs_stored_in_loop = VEC_alloc (bitmap, heap,
- number_of_loops ());
+ memory_accesses.refs_list.create (0);
+ memory_accesses.refs_in_loop.create (number_of_loops ());
+ memory_accesses.all_refs_in_loop.create (number_of_loops ());
+ memory_accesses.all_refs_stored_in_loop.create (number_of_loops ());
for (i = 0; i < number_of_loops (); i++)
{
empty = BITMAP_ALLOC (&lim_bitmap_obstack);
- VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
+ memory_accesses.refs_in_loop.quick_push (empty);
empty = BITMAP_ALLOC (&lim_bitmap_obstack);
- VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
+ memory_accesses.all_refs_in_loop.quick_push (empty);
empty = BITMAP_ALLOC (&lim_bitmap_obstack);
- VEC_quick_push (bitmap, memory_accesses.all_refs_stored_in_loop, empty);
+ memory_accesses.all_refs_stored_in_loop.quick_push (empty);
}
memory_accesses.ttae_cache = NULL;
@@ -1789,26 +1773,25 @@ rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
static void
get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
- VEC (mem_ref_loc_p, heap) **locs)
+ vec<mem_ref_loc_p> *locs)
{
mem_ref_locs_p accs;
unsigned i;
mem_ref_loc_p loc;
- bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
- loop->num);
+ bitmap refs = memory_accesses.all_refs_in_loop[loop->num];
struct loop *subloop;
if (!bitmap_bit_p (refs, ref->id))
return;
- if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
+ if (ref->accesses_in_loop.length ()
> (unsigned) loop->num)
{
- accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
+ accs = ref->accesses_in_loop[loop->num];
if (accs)
{
- FOR_EACH_VEC_ELT (mem_ref_loc_p, accs->locs, i, loc)
- VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
+ FOR_EACH_VEC_ELT (accs->locs, i, loc)
+ locs->safe_push (loc);
}
}
@@ -1823,12 +1806,12 @@ rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
{
unsigned i;
mem_ref_loc_p loc;
- VEC (mem_ref_loc_p, heap) *locs = NULL;
+ vec<mem_ref_loc_p> locs = vec<mem_ref_loc_p>();
get_all_locs_in_loop (loop, ref, &locs);
- FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
+ FOR_EACH_VEC_ELT (locs, i, loc)
rewrite_mem_ref_loc (loc, tmp_var);
- VEC_free (mem_ref_loc_p, heap, locs);
+ locs.release ();
}
/* The name and the length of the currently generated variable
@@ -2095,13 +2078,13 @@ execute_sm_if_changed_flag_set (struct loop *loop, mem_ref_p ref)
unsigned i;
mem_ref_loc_p loc;
tree flag;
- VEC (mem_ref_loc_p, heap) *locs = NULL;
+ vec<mem_ref_loc_p> locs = vec<mem_ref_loc_p>();
char *str = get_lsm_tmp_name (ref->mem, ~0);
lsm_tmp_name_add ("_flag");
flag = create_tmp_reg (boolean_type_node, str);
get_all_locs_in_loop (loop, ref, &locs);
- FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
+ FOR_EACH_VEC_ELT (locs, i, loc)
{
gimple_stmt_iterator gsi;
gimple stmt;
@@ -2115,7 +2098,7 @@ execute_sm_if_changed_flag_set (struct loop *loop, mem_ref_p ref)
gsi_insert_after (&gsi, stmt, GSI_CONTINUE_LINKING);
}
}
- VEC_free (mem_ref_loc_p, heap, locs);
+ locs.release ();
return flag;
}
@@ -2125,7 +2108,7 @@ execute_sm_if_changed_flag_set (struct loop *loop, mem_ref_p ref)
to the reference from the temporary variable are emitted to exits. */
static void
-execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
+execute_sm (struct loop *loop, vec<edge> exits, mem_ref_p ref)
{
tree tmp_var, store_flag;
unsigned i;
@@ -2181,7 +2164,7 @@ execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
}
/* Sink the store to every exit from the loop. */
- FOR_EACH_VEC_ELT (edge, exits, i, ex)
+ FOR_EACH_VEC_ELT (exits, i, ex)
if (!multi_threaded_model_p)
{
gimple store;
@@ -2197,7 +2180,7 @@ execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
static void
hoist_memory_references (struct loop *loop, bitmap mem_refs,
- VEC (edge, heap) *exits)
+ vec<edge> exits)
{
mem_ref_p ref;
unsigned i;
@@ -2205,7 +2188,7 @@ hoist_memory_references (struct loop *loop, bitmap mem_refs,
EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
{
- ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
+ ref = memory_accesses.refs_list[i];
execute_sm (loop, exits, ref);
}
}
@@ -2216,7 +2199,7 @@ hoist_memory_references (struct loop *loop, bitmap mem_refs,
static bool
ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
{
- VEC (mem_ref_loc_p, heap) *locs = NULL;
+ vec<mem_ref_loc_p> locs = vec<mem_ref_loc_p>();
unsigned i;
mem_ref_loc_p loc;
bool ret = false;
@@ -2229,7 +2212,7 @@ ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
base = TREE_OPERAND (base, 0);
get_all_locs_in_loop (loop, ref, &locs);
- FOR_EACH_VEC_ELT (mem_ref_loc_p, locs, i, loc)
+ FOR_EACH_VEC_ELT (locs, i, loc)
{
if (!get_lim_data (loc->stmt))
continue;
@@ -2262,7 +2245,7 @@ ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
break;
}
}
- VEC_free (mem_ref_loc_p, heap, locs);
+ locs.release ();
return ret;
}
@@ -2329,16 +2312,13 @@ ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref)
mem_ref_p aref;
if (stored)
- refs_to_check = VEC_index (bitmap,
- memory_accesses.all_refs_in_loop, loop->num);
+ refs_to_check = memory_accesses.all_refs_in_loop[loop->num];
else
- refs_to_check = VEC_index (bitmap,
- memory_accesses.all_refs_stored_in_loop,
- loop->num);
+ refs_to_check = memory_accesses.all_refs_stored_in_loop[loop->num];
EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
{
- aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
+ aref = memory_accesses.refs_list[i];
if (!MEM_ANALYZABLE (aref)
|| !refs_independent_p (ref, aref))
{
@@ -2425,15 +2405,14 @@ can_sm_ref_p (struct loop *loop, mem_ref_p ref)
static void
find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
{
- bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
- loop->num);
+ bitmap refs = memory_accesses.all_refs_in_loop[loop->num];
unsigned i;
bitmap_iterator bi;
mem_ref_p ref;
EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
{
- ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
+ ref = memory_accesses.refs_list[i];
if (can_sm_ref_p (loop, ref))
bitmap_set_bit (refs_to_sm, i);
}
@@ -2445,12 +2424,12 @@ find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
static bool
loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
- VEC (edge, heap) *exits)
+ vec<edge> exits)
{
unsigned i;
edge ex;
- FOR_EACH_VEC_ELT (edge, exits, i, ex)
+ FOR_EACH_VEC_ELT (exits, i, ex)
if (ex->flags & (EDGE_ABNORMAL | EDGE_EH))
return false;
@@ -2464,7 +2443,7 @@ loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
static void
store_motion_loop (struct loop *loop, bitmap sm_executed)
{
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
struct loop *subloop;
bitmap sm_in_loop = BITMAP_ALLOC (NULL);
@@ -2473,7 +2452,7 @@ store_motion_loop (struct loop *loop, bitmap sm_executed)
find_refs_for_sm (loop, sm_executed, sm_in_loop);
hoist_memory_references (loop, sm_in_loop, exits);
}
- VEC_free (edge, heap, exits);
+ exits.release ();
bitmap_ior_into (sm_executed, sm_in_loop);
for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
@@ -2623,13 +2602,13 @@ tree_ssa_lim_finalize (void)
htab_delete (memory_accesses.refs);
- FOR_EACH_VEC_ELT (mem_ref_p, memory_accesses.refs_list, i, ref)
+ FOR_EACH_VEC_ELT (memory_accesses.refs_list, i, ref)
memref_free (ref);
- VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
+ memory_accesses.refs_list.release ();
- VEC_free (bitmap, heap, memory_accesses.refs_in_loop);
- VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop);
- VEC_free (bitmap, heap, memory_accesses.all_refs_stored_in_loop);
+ memory_accesses.refs_in_loop.release ();
+ memory_accesses.all_refs_in_loop.release ();
+ memory_accesses.all_refs_stored_in_loop.release ();
if (memory_accesses.ttae_cache)
free_affine_expand_cache (&memory_accesses.ttae_cache);
diff --git a/gcc/tree-ssa-loop-ivcanon.c b/gcc/tree-ssa-loop-ivcanon.c
index 601223b3dda..108c338d3c7 100644
--- a/gcc/tree-ssa-loop-ivcanon.c
+++ b/gcc/tree-ssa-loop-ivcanon.c
@@ -216,7 +216,7 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
gimple_stmt_iterator gsi;
unsigned int i;
bool after_exit;
- VEC (basic_block, heap) *path = get_loop_hot_path (loop);
+ vec<basic_block> path = get_loop_hot_path (loop);
size->overall = 0;
size->eliminated_by_peeling = 0;
@@ -318,9 +318,9 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
}
}
}
- while (VEC_length (basic_block, path))
+ while (path.length ())
{
- basic_block bb = VEC_pop (basic_block, path);
+ basic_block bb = path.pop ();
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
@@ -350,7 +350,7 @@ tree_estimate_loop_size (struct loop *loop, edge exit, edge edge_to_cancel, stru
size->num_branches_on_hot_path++;
}
}
- VEC_free (basic_block, heap, path);
+ path.release ();
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "size: %i-%i, last_iteration: %i-%i\n", size->overall,
size->eliminated_by_peeling, size->last_iteration,
@@ -414,7 +414,7 @@ estimated_unrolled_size (struct loop_size *size,
edge
loop_edge_to_cancel (struct loop *loop)
{
- VEC (edge, heap) *exits;
+ vec<edge> exits;
unsigned i;
edge edge_to_cancel;
gimple_stmt_iterator gsi;
@@ -425,7 +425,7 @@ loop_edge_to_cancel (struct loop *loop)
exits = get_loop_exit_edges (loop);
- FOR_EACH_VEC_ELT (edge, exits, i, edge_to_cancel)
+ FOR_EACH_VEC_ELT (exits, i, edge_to_cancel)
{
/* Find the other edge than the loop exit
leaving the conditoinal. */
@@ -447,7 +447,7 @@ loop_edge_to_cancel (struct loop *loop)
if (edge_to_cancel->dest != loop->latch)
continue;
- VEC_free (edge, heap, exits);
+ exits.release ();
/* Verify that the code in loop latch does nothing that may end program
execution without really reaching the exit. This may include
@@ -457,7 +457,7 @@ loop_edge_to_cancel (struct loop *loop)
return NULL;
return edge_to_cancel;
}
- VEC_free (edge, heap, exits);
+ exits.release ();
return NULL;
}
@@ -573,8 +573,8 @@ remove_redundant_iv_tests (struct loop *loop)
}
/* Stores loops that will be unlooped after we process whole loop tree. */
-static VEC(loop_p, heap) *loops_to_unloop;
-static VEC(int, heap) *loops_to_unloop_nunroll;
+static vec<loop_p> loops_to_unloop;
+static vec<int> loops_to_unloop_nunroll;
/* Cancel all fully unrolled loops by putting __builtin_unreachable
on the latch edge.
@@ -592,10 +592,10 @@ void
unloop_loops (bitmap loop_closed_ssa_invalidated,
bool *irred_invalidated)
{
- while (VEC_length (loop_p, loops_to_unloop))
+ while (loops_to_unloop.length ())
{
- struct loop *loop = VEC_pop (loop_p, loops_to_unloop);
- int n_unroll = VEC_pop (int, loops_to_unloop_nunroll);
+ struct loop *loop = loops_to_unloop.pop ();
+ int n_unroll = loops_to_unloop_nunroll.pop ();
basic_block latch = loop->latch;
edge latch_edge = loop_latch_edge (loop);
int flags = latch_edge->flags;
@@ -625,10 +625,8 @@ unloop_loops (bitmap loop_closed_ssa_invalidated,
gsi = gsi_start_bb (latch_edge->dest);
gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
}
- VEC_free (loop_p, heap, loops_to_unloop);
- loops_to_unloop = NULL;
- VEC_free (int, heap, loops_to_unloop_nunroll);
- loops_to_unloop_nunroll = NULL;
+ loops_to_unloop.release ();
+ loops_to_unloop_nunroll.release ();
}
/* Tries to unroll LOOP completely, i.e. NITER times.
@@ -701,7 +699,7 @@ try_unroll_loop_completely (struct loop *loop,
sbitmap wont_exit;
edge e;
unsigned i;
- VEC (edge, heap) *to_remove = NULL;
+ vec<edge> to_remove = vec<edge>();
if (ul == UL_SINGLE_ITER)
return false;
@@ -817,13 +815,13 @@ try_unroll_loop_completely (struct loop *loop,
return false;
}
- FOR_EACH_VEC_ELT (edge, to_remove, i, e)
+ FOR_EACH_VEC_ELT (to_remove, i, e)
{
bool ok = remove_path (e);
gcc_assert (ok);
}
- VEC_free (edge, heap, to_remove);
+ to_remove.release ();
free (wont_exit);
free_original_copy_tables ();
}
@@ -843,8 +841,8 @@ try_unroll_loop_completely (struct loop *loop,
}
/* Store the loop for later unlooping and exit removal. */
- VEC_safe_push (loop_p, heap, loops_to_unloop, loop);
- VEC_safe_push (int, heap, loops_to_unloop_nunroll, n_unroll);
+ loops_to_unloop.safe_push (loop);
+ loops_to_unloop_nunroll.safe_push (n_unroll);
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1070,7 +1068,7 @@ propagate_constants_for_unrolling (basic_block bb)
unsigned int
tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
{
- VEC(loop_p,stack) *father_stack = VEC_alloc (loop_p, stack, 16);
+ vec<loop_p, va_stack> father_stack;
loop_iterator li;
struct loop *loop;
bool changed;
@@ -1078,6 +1076,7 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
int iteration = 0;
bool irred_invalidated = false;
+ vec_stack_alloc (loop_p, father_stack, 16);
do
{
changed = false;
@@ -1111,7 +1110,7 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
iteration is complete and the IR eventually cleaned up. */
if (loop_outer (loop_father) && !loop_father->aux)
{
- VEC_safe_push (loop_p, stack, father_stack, loop_father);
+ father_stack.safe_push (loop_father);
loop_father->aux = loop_father;
}
}
@@ -1124,9 +1123,9 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
/* Be sure to skip unlooped loops while procesing father_stack
array. */
- FOR_EACH_VEC_ELT (loop_p, loops_to_unloop, i, iter)
+ FOR_EACH_VEC_ELT (loops_to_unloop, i, iter)
(*iter)->aux = NULL;
- FOR_EACH_VEC_ELT (loop_p, father_stack, i, iter)
+ FOR_EACH_VEC_ELT (father_stack, i, iter)
if (!(*iter)->aux)
*iter = NULL;
unloop_loops (loop_closed_ssa_invalidated, &irred_invalidated);
@@ -1140,7 +1139,7 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
update_ssa (TODO_update_ssa);
/* Propagate the constants within the new basic blocks. */
- FOR_EACH_VEC_ELT (loop_p, father_stack, i, iter)
+ FOR_EACH_VEC_ELT (father_stack, i, iter)
if (*iter)
{
unsigned j;
@@ -1150,7 +1149,7 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
free (body);
(*iter)->aux = NULL;
}
- VEC_truncate (loop_p, father_stack, 0);
+ father_stack.truncate (0);
/* This will take care of removing completely unrolled loops
from the loop structures so we can continue unrolling now
@@ -1172,7 +1171,7 @@ tree_unroll_loops_completely (bool may_increase_size, bool unroll_outer)
while (changed
&& ++iteration <= PARAM_VALUE (PARAM_MAX_UNROLL_ITERATIONS));
- VEC_free (loop_p, stack, father_stack);
+ father_stack.release ();
if (irred_invalidated
&& loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 040885f5ade..ed1317080ea 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -234,12 +234,8 @@ struct iv_inv_expr_ent
/* The data used by the induction variable optimizations. */
typedef struct iv_use *iv_use_p;
-DEF_VEC_P(iv_use_p);
-DEF_VEC_ALLOC_P(iv_use_p,heap);
typedef struct iv_cand *iv_cand_p;
-DEF_VEC_P(iv_cand_p);
-DEF_VEC_ALLOC_P(iv_cand_p,heap);
struct ivopts_data
{
@@ -269,10 +265,10 @@ struct ivopts_data
bitmap relevant;
/* The uses of induction variables. */
- VEC(iv_use_p,heap) *iv_uses;
+ vec<iv_use_p> iv_uses;
/* The candidates. */
- VEC(iv_cand_p,heap) *iv_candidates;
+ vec<iv_cand_p> iv_candidates;
/* A bitmap of important candidates. */
bitmap important_candidates;
@@ -376,7 +372,7 @@ struct iv_ca_delta
/* The list of trees for that the decl_rtl field must be reset is stored
here. */
-static VEC(tree,heap) *decl_rtl_to_reset;
+static vec<tree> decl_rtl_to_reset;
static comp_cost force_expr_to_var_cost (tree, bool);
@@ -385,7 +381,7 @@ static comp_cost force_expr_to_var_cost (tree, bool);
static inline unsigned
n_iv_uses (struct ivopts_data *data)
{
- return VEC_length (iv_use_p, data->iv_uses);
+ return data->iv_uses.length ();
}
/* Ith use recorded in DATA. */
@@ -393,7 +389,7 @@ n_iv_uses (struct ivopts_data *data)
static inline struct iv_use *
iv_use (struct ivopts_data *data, unsigned i)
{
- return VEC_index (iv_use_p, data->iv_uses, i);
+ return data->iv_uses[i];
}
/* Number of candidates recorded in DATA. */
@@ -401,7 +397,7 @@ iv_use (struct ivopts_data *data, unsigned i)
static inline unsigned
n_iv_cands (struct ivopts_data *data)
{
- return VEC_length (iv_cand_p, data->iv_candidates);
+ return data->iv_candidates.length ();
}
/* Ith candidate recorded in DATA. */
@@ -409,7 +405,7 @@ n_iv_cands (struct ivopts_data *data)
static inline struct iv_cand *
iv_cand (struct ivopts_data *data, unsigned i)
{
- return VEC_index (iv_cand_p, data->iv_candidates, i);
+ return data->iv_candidates[i];
}
/* The single loop exit if it dominates the latch, NULL otherwise. */
@@ -855,12 +851,12 @@ tree_ssa_iv_optimize_init (struct ivopts_data *data)
data->important_candidates = BITMAP_ALLOC (NULL);
data->max_inv_id = 0;
data->niters = NULL;
- data->iv_uses = VEC_alloc (iv_use_p, heap, 20);
- data->iv_candidates = VEC_alloc (iv_cand_p, heap, 20);
+ data->iv_uses.create (20);
+ data->iv_candidates.create (20);
data->inv_expr_tab = htab_create (10, htab_inv_expr_hash,
htab_inv_expr_eq, free);
data->inv_expr_id = 0;
- decl_rtl_to_reset = VEC_alloc (tree, heap, 20);
+ decl_rtl_to_reset.create (20);
}
/* Returns a memory object to that EXPR points. In case we are able to
@@ -1209,7 +1205,7 @@ record_use (struct ivopts_data *data, tree *use_p, struct iv *iv,
if (dump_file && (dump_flags & TDF_DETAILS))
dump_use (dump_file, use);
- VEC_safe_push (iv_use_p, heap, data->iv_uses, use);
+ data->iv_uses.safe_push (use);
return use;
}
@@ -2270,7 +2266,7 @@ add_candidate_1 (struct ivopts_data *data,
}
cand->important = important;
cand->incremented_at = incremented_at;
- VEC_safe_push (iv_cand_p, heap, data->iv_candidates, cand);
+ data->iv_candidates.safe_push (cand);
if (step
&& TREE_CODE (step) != INTEGER_CST)
@@ -2846,7 +2842,7 @@ prepare_decl_rtl (tree *expr_p, int *ws, void *data)
if (x)
{
- VEC_safe_push (tree, heap, decl_rtl_to_reset, obj);
+ decl_rtl_to_reset.safe_push (obj);
SET_DECL_RTL (obj, x);
}
@@ -3074,8 +3070,6 @@ adjust_setup_cost (struct ivopts_data *data, unsigned cost)
validity for a memory reference accessing memory of mode MODE in
address space AS. */
-DEF_VEC_P (sbitmap);
-DEF_VEC_ALLOC_P (sbitmap, heap);
bool
multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
@@ -3083,13 +3077,13 @@ multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
{
#define MAX_RATIO 128
unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
- static VEC (sbitmap, heap) *valid_mult_list;
+ static vec<sbitmap> valid_mult_list;
sbitmap valid_mult;
- if (data_index >= VEC_length (sbitmap, valid_mult_list))
- VEC_safe_grow_cleared (sbitmap, heap, valid_mult_list, data_index + 1);
+ if (data_index >= valid_mult_list.length ())
+ valid_mult_list.safe_grow_cleared (data_index + 1);
- valid_mult = VEC_index (sbitmap, valid_mult_list, data_index);
+ valid_mult = valid_mult_list[data_index];
if (!valid_mult)
{
enum machine_mode address_mode = targetm.addr_space.address_mode (as);
@@ -3117,7 +3111,7 @@ multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, enum machine_mode mode,
fprintf (dump_file, "\n");
}
- VEC_replace (sbitmap, valid_mult_list, data_index, valid_mult);
+ valid_mult_list[data_index] = valid_mult;
}
if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
@@ -3146,8 +3140,6 @@ typedef struct address_cost_data_s
unsigned costs[2][2][2][2];
} *address_cost_data;
-DEF_VEC_P (address_cost_data);
-DEF_VEC_ALLOC_P (address_cost_data, heap);
static comp_cost
get_address_cost (bool symbol_present, bool var_present,
@@ -3157,7 +3149,7 @@ get_address_cost (bool symbol_present, bool var_present,
bool stmt_after_inc, bool *may_autoinc)
{
enum machine_mode address_mode = targetm.addr_space.address_mode (as);
- static VEC(address_cost_data, heap) *address_cost_data_list;
+ static vec<address_cost_data> address_cost_data_list;
unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mem_mode;
address_cost_data data;
static bool has_preinc[MAX_MACHINE_MODE], has_postinc[MAX_MACHINE_MODE];
@@ -3168,11 +3160,10 @@ get_address_cost (bool symbol_present, bool var_present,
unsigned HOST_WIDE_INT mask;
unsigned bits;
- if (data_index >= VEC_length (address_cost_data, address_cost_data_list))
- VEC_safe_grow_cleared (address_cost_data, heap, address_cost_data_list,
- data_index + 1);
+ if (data_index >= address_cost_data_list.length ())
+ address_cost_data_list.safe_grow_cleared (data_index + 1);
- data = VEC_index (address_cost_data, address_cost_data_list, data_index);
+ data = address_cost_data_list[data_index];
if (!data)
{
HOST_WIDE_INT i;
@@ -3376,8 +3367,7 @@ get_address_cost (bool symbol_present, bool var_present,
fprintf (dump_file, "\n");
}
- VEC_replace (address_cost_data, address_cost_data_list,
- data_index, data);
+ address_cost_data_list[data_index] = data;
}
bits = GET_MODE_BITSIZE (address_mode);
@@ -6614,7 +6604,7 @@ free_loop_data (struct ivopts_data *data)
free (use->cost_map);
free (use);
}
- VEC_truncate (iv_use_p, data->iv_uses, 0);
+ data->iv_uses.truncate (0);
for (i = 0; i < n_iv_cands (data); i++)
{
@@ -6625,7 +6615,7 @@ free_loop_data (struct ivopts_data *data)
BITMAP_FREE (cand->depends_on);
free (cand);
}
- VEC_truncate (iv_cand_p, data->iv_candidates, 0);
+ data->iv_candidates.truncate (0);
if (data->version_info_size < num_ssa_names)
{
@@ -6636,10 +6626,10 @@ free_loop_data (struct ivopts_data *data)
data->max_inv_id = 0;
- FOR_EACH_VEC_ELT (tree, decl_rtl_to_reset, i, obj)
+ FOR_EACH_VEC_ELT (decl_rtl_to_reset, i, obj)
SET_DECL_RTL (obj, NULL_RTX);
- VEC_truncate (tree, decl_rtl_to_reset, 0);
+ decl_rtl_to_reset.truncate (0);
htab_empty (data->inv_expr_tab);
data->inv_expr_id = 0;
@@ -6656,9 +6646,9 @@ tree_ssa_iv_optimize_finalize (struct ivopts_data *data)
BITMAP_FREE (data->relevant);
BITMAP_FREE (data->important_candidates);
- VEC_free (tree, heap, decl_rtl_to_reset);
- VEC_free (iv_use_p, heap, data->iv_uses);
- VEC_free (iv_cand_p, heap, data->iv_candidates);
+ decl_rtl_to_reset.release ();
+ data->iv_uses.release ();
+ data->iv_candidates.release ();
htab_delete (data->inv_expr_tab);
}
diff --git a/gcc/tree-ssa-loop-manip.c b/gcc/tree-ssa-loop-manip.c
index d80b91cc31d..e3d2f9c1614 100644
--- a/gcc/tree-ssa-loop-manip.c
+++ b/gcc/tree-ssa-loop-manip.c
@@ -173,7 +173,7 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
{
unsigned i;
bitmap_iterator bi;
- VEC (basic_block, heap) *worklist;
+ vec<basic_block> worklist;
struct loop *def_loop = def_bb->loop_father;
unsigned def_loop_depth = loop_depth (def_loop);
bitmap def_loop_exits;
@@ -181,7 +181,7 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
/* Normally the work list size is bounded by the number of basic
blocks in the largest loop. We don't know this number, but we
can be fairly sure that it will be relatively small. */
- worklist = VEC_alloc (basic_block, heap, MAX (8, n_basic_blocks / 128));
+ worklist.create (MAX (8, n_basic_blocks / 128));
EXECUTE_IF_SET_IN_BITMAP (use_blocks, 0, i, bi)
{
@@ -192,21 +192,21 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
if (! flow_loop_nested_p (use_loop, def_loop))
use_bb = find_sibling_superloop (use_loop, def_loop)->header;
if (bitmap_set_bit (live_exits, use_bb->index))
- VEC_safe_push (basic_block, heap, worklist, use_bb);
+ worklist.safe_push (use_bb);
}
/* Iterate until the worklist is empty. */
- while (! VEC_empty (basic_block, worklist))
+ while (! worklist.is_empty ())
{
edge e;
edge_iterator ei;
/* Pull a block off the worklist. */
- basic_block bb = VEC_pop (basic_block, worklist);
+ basic_block bb = worklist.pop ();
/* Make sure we have at least enough room in the work list
for all predecessors of this block. */
- VEC_reserve (basic_block, heap, worklist, EDGE_COUNT (bb->preds));
+ worklist.reserve (EDGE_COUNT (bb->preds));
/* For each predecessor block. */
FOR_EACH_EDGE (e, ei, bb->preds)
@@ -241,10 +241,10 @@ compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
if (pred_visited || dominated_by_p (CDI_DOMINATORS, pred, bb))
continue;
- VEC_quick_push (basic_block, worklist, pred);
+ worklist.quick_push (pred);
}
}
- VEC_free (basic_block, heap, worklist);
+ worklist.release ();
def_loop_exits = BITMAP_ALLOC (&loop_renamer_obstack);
for (struct loop *loop = def_loop;
@@ -343,11 +343,11 @@ get_loops_exits (bitmap *loop_exits)
FOR_EACH_LOOP (li, loop, 0)
{
- VEC(edge, heap) *exit_edges = get_loop_exit_edges (loop);
+ vec<edge> exit_edges = get_loop_exit_edges (loop);
loop_exits[loop->num] = BITMAP_ALLOC (&loop_renamer_obstack);
- FOR_EACH_VEC_ELT (edge, exit_edges, j, e)
+ FOR_EACH_VEC_ELT (exit_edges, j, e)
bitmap_set_bit (loop_exits[loop->num], e->dest->index);
- VEC_free (edge, heap, exit_edges);
+ exit_edges.release ();
}
}
@@ -741,7 +741,7 @@ copy_phi_node_args (unsigned first_new_block)
bool
gimple_duplicate_loop_to_header_edge (struct loop *loop, edge e,
unsigned int ndupl, sbitmap wont_exit,
- edge orig, VEC (edge, heap) **to_remove,
+ edge orig, vec<edge> *to_remove,
int flags)
{
unsigned first_new_block;
@@ -1038,7 +1038,7 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
unsigned new_est_niter, i, prob;
unsigned irr = loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP;
sbitmap wont_exit;
- VEC (edge, heap) *to_remove = NULL;
+ vec<edge> to_remove = vec<edge>();
est_niter = expected_loop_iterations (loop);
determine_exit_conditions (loop, desc, factor,
@@ -1181,12 +1181,12 @@ tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
free (wont_exit);
gcc_assert (ok);
- FOR_EACH_VEC_ELT (edge, to_remove, i, e)
+ FOR_EACH_VEC_ELT (to_remove, i, e)
{
ok = remove_path (e);
gcc_assert (ok);
}
- VEC_free (edge, heap, to_remove);
+ to_remove.release ();
update_ssa (TODO_update_ssa);
/* Ensure that the frequencies in the loop match the new estimated
diff --git a/gcc/tree-ssa-loop-niter.c b/gcc/tree-ssa-loop-niter.c
index 3936e60ac63..69b94c79972 100644
--- a/gcc/tree-ssa-loop-niter.c
+++ b/gcc/tree-ssa-loop-niter.c
@@ -1933,13 +1933,13 @@ tree
find_loop_niter (struct loop *loop, edge *exit)
{
unsigned i;
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
edge ex;
tree niter = NULL_TREE, aniter;
struct tree_niter_desc desc;
*exit = NULL;
- FOR_EACH_VEC_ELT (edge, exits, i, ex)
+ FOR_EACH_VEC_ELT (exits, i, ex)
{
if (!number_of_iterations_exit (loop, ex, &desc, false))
continue;
@@ -1984,7 +1984,7 @@ find_loop_niter (struct loop *loop, edge *exit)
continue;
}
}
- VEC_free (edge, heap, exits);
+ exits.release ();
return niter ? niter : chrec_dont_know;
}
@@ -2267,7 +2267,7 @@ tree
find_loop_niter_by_eval (struct loop *loop, edge *exit)
{
unsigned i;
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
edge ex;
tree niter = NULL_TREE, aniter;
@@ -2275,13 +2275,13 @@ find_loop_niter_by_eval (struct loop *loop, edge *exit)
/* Loops with multiple exits are expensive to handle and less important. */
if (!flag_expensive_optimizations
- && VEC_length (edge, exits) > 1)
+ && exits.length () > 1)
{
- VEC_free (edge, heap, exits);
+ exits.release ();
return chrec_dont_know;
}
- FOR_EACH_VEC_ELT (edge, exits, i, ex)
+ FOR_EACH_VEC_ELT (exits, i, ex)
{
if (!just_once_each_iteration_p (loop, ex->src))
continue;
@@ -2297,7 +2297,7 @@ find_loop_niter_by_eval (struct loop *loop, edge *exit)
niter = aniter;
*exit = ex;
}
- VEC_free (edge, heap, exits);
+ exits.release ();
return niter ? niter : chrec_dont_know;
}
@@ -2961,16 +2961,16 @@ double_int_cmp (const void *p1, const void *p2)
Lookup by binary search. */
int
-bound_index (VEC (double_int, heap) *bounds, double_int bound)
+bound_index (vec<double_int> bounds, double_int bound)
{
- unsigned int end = VEC_length (double_int, bounds);
+ unsigned int end = bounds.length ();
unsigned int begin = 0;
/* Find a matching index by means of a binary search. */
while (begin != end)
{
unsigned int middle = (begin + end) / 2;
- double_int index = VEC_index (double_int, bounds, middle);
+ double_int index = bounds[middle];
if (index == bound)
return middle;
@@ -2983,9 +2983,7 @@ bound_index (VEC (double_int, heap) *bounds, double_int bound)
}
/* Used to hold vector of queues of basic blocks bellow. */
-typedef VEC (basic_block, heap) *bb_queue;
-DEF_VEC_P(bb_queue);
-DEF_VEC_ALLOC_P(bb_queue,heap);
+typedef vec<basic_block> bb_queue;
/* We recorded loop bounds only for statements dominating loop latch (and thus
executed each loop iteration). If there are any bounds on statements not
@@ -2998,9 +2996,9 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
{
pointer_map_t *bb_bounds;
struct nb_iter_bound *elt;
- VEC (double_int, heap) *bounds = NULL;
- VEC (bb_queue, heap) *queues = NULL;
- bb_queue queue = NULL;
+ vec<double_int> bounds = vec<double_int>();
+ vec<bb_queue> queues = vec<bb_queue>();
+ bb_queue queue = bb_queue();
ptrdiff_t queue_index;
ptrdiff_t latch_index = 0;
pointer_map_t *block_priority;
@@ -3017,18 +3015,18 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
if (!loop->any_upper_bound
|| bound.ult (loop->nb_iterations_upper_bound))
- VEC_safe_push (double_int, heap, bounds, bound);
+ bounds.safe_push (bound);
}
/* Exit early if there is nothing to do. */
- if (!bounds)
+ if (!bounds.exists ())
return;
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, " Trying to walk loop body to reduce the bound.\n");
/* Sort the bounds in decreasing order. */
- qsort (VEC_address (double_int, bounds), VEC_length (double_int, bounds),
+ qsort (bounds.address (), bounds.length (),
sizeof (double_int), double_int_cmp);
/* For every basic block record the lowest bound that is guaranteed to
@@ -3066,7 +3064,7 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
To avoid the need for fibonaci heap on double ints we simply compress
double ints into indexes to BOUNDS array and then represent the queue
as arrays of queues for every index.
- Index of VEC_length (BOUNDS) means that the execution of given BB has
+ Index of BOUNDS.length() means that the execution of given BB has
no bounds determined.
VISITED is a pointer map translating basic block into smallest index
@@ -3074,18 +3072,17 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
latch_index = -1;
/* Start walk in loop header with index set to infinite bound. */
- queue_index = VEC_length (double_int, bounds);
- VEC_safe_grow_cleared (bb_queue, heap, queues, queue_index + 1);
- VEC_safe_push (basic_block, heap, queue, loop->header);
- VEC_replace (bb_queue, queues, queue_index, queue);
+ queue_index = bounds.length ();
+ queues.safe_grow_cleared (queue_index + 1);
+ queue.safe_push (loop->header);
+ queues[queue_index] = queue;
*pointer_map_insert (block_priority, loop->header) = (void *)queue_index;
for (; queue_index >= 0; queue_index--)
{
if (latch_index < queue_index)
{
- while (VEC_length (basic_block,
- VEC_index (bb_queue, queues, queue_index)))
+ while (queues[queue_index].length ())
{
basic_block bb;
ptrdiff_t bound_index = queue_index;
@@ -3093,8 +3090,8 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
edge e;
edge_iterator ei;
- queue = VEC_index (bb_queue, queues, queue_index);
- bb = VEC_pop (basic_block, queue);
+ queue = queues[queue_index];
+ bb = queue.pop ();
/* OK, we later inserted the BB with lower priority, skip it. */
if ((ptrdiff_t)*pointer_map_contains (block_priority, bb) > queue_index)
@@ -3131,32 +3128,30 @@ discover_iteration_bound_by_body_walk (struct loop *loop)
if (insert)
{
- bb_queue queue2 = VEC_index (bb_queue, queues, bound_index);
- VEC_safe_push (basic_block, heap, queue2, e->dest);
- VEC_replace (bb_queue, queues, bound_index, queue2);
+ bb_queue queue2 = queues[bound_index];
+ queue2.safe_push (e->dest);
+ queues[bound_index] = queue2;
}
}
}
}
else
- VEC_free (basic_block, heap, VEC_index (bb_queue, queues, queue_index));
+ queues[queue_index].release ();
}
gcc_assert (latch_index >= 0);
- if ((unsigned)latch_index < VEC_length (double_int, bounds))
+ if ((unsigned)latch_index < bounds.length ())
{
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Found better loop bound ");
- dump_double_int (dump_file,
- VEC_index (double_int, bounds, latch_index), true);
+ dump_double_int (dump_file, bounds[latch_index], true);
fprintf (dump_file, "\n");
}
- record_niter_bound (loop, VEC_index (double_int, bounds, latch_index),
- false, true);
+ record_niter_bound (loop, bounds[latch_index], false, true);
}
- VEC_free (bb_queue, heap, queues);
+ queues.release ();
pointer_map_destroy (bb_bounds);
pointer_map_destroy (block_priority);
}
@@ -3171,7 +3166,7 @@ maybe_lower_iteration_bound (struct loop *loop)
pointer_set_t *not_executed_last_iteration = NULL;
struct nb_iter_bound *elt;
bool found_exit = false;
- VEC (basic_block, heap) *queue = NULL;
+ vec<basic_block> queue = vec<basic_block>();
bitmap visited;
/* Collect all statements with interesting (i.e. lower than
@@ -3199,14 +3194,14 @@ maybe_lower_iteration_bound (struct loop *loop)
effects that may terminate the loop otherwise) without visiting
any of the statements known to have undefined effect on the last
iteration. */
- VEC_safe_push (basic_block, heap, queue, loop->header);
+ queue.safe_push (loop->header);
visited = BITMAP_ALLOC (NULL);
bitmap_set_bit (visited, loop->header->index);
found_exit = false;
do
{
- basic_block bb = VEC_pop (basic_block, queue);
+ basic_block bb = queue.pop ();
gimple_stmt_iterator gsi;
bool stmt_found = false;
@@ -3243,11 +3238,11 @@ maybe_lower_iteration_bound (struct loop *loop)
break;
}
if (bitmap_set_bit (visited, e->dest->index))
- VEC_safe_push (basic_block, heap, queue, e->dest);
+ queue.safe_push (e->dest);
}
}
}
- while (VEC_length (basic_block, queue) && !found_exit);
+ while (queue.length () && !found_exit);
/* If every path through the loop reach bounding statement before exit,
then we know the last iteration of the loop will have undefined effect
@@ -3262,7 +3257,7 @@ maybe_lower_iteration_bound (struct loop *loop)
false, true);
}
BITMAP_FREE (visited);
- VEC_free (basic_block, heap, queue);
+ queue.release ();
}
/* Records estimates on numbers of iterations of LOOP. If USE_UNDEFINED_P
@@ -3271,7 +3266,7 @@ maybe_lower_iteration_bound (struct loop *loop)
void
estimate_numbers_of_iterations_loop (struct loop *loop)
{
- VEC (edge, heap) *exits;
+ vec<edge> exits;
tree niter, type;
unsigned i;
struct tree_niter_desc niter_desc;
@@ -3289,7 +3284,7 @@ estimate_numbers_of_iterations_loop (struct loop *loop)
exits = get_loop_exit_edges (loop);
likely_exit = single_likely_exit (loop);
- FOR_EACH_VEC_ELT (edge, exits, i, ex)
+ FOR_EACH_VEC_ELT (exits, i, ex)
{
if (!number_of_iterations_exit (loop, ex, &niter_desc, false, false))
continue;
@@ -3304,7 +3299,7 @@ estimate_numbers_of_iterations_loop (struct loop *loop)
last_stmt (ex->src),
true, ex == likely_exit, true);
}
- VEC_free (edge, heap, exits);
+ exits.release ();
infer_loop_bounds_from_undefined (loop);
diff --git a/gcc/tree-ssa-loop-prefetch.c b/gcc/tree-ssa-loop-prefetch.c
index dcc65e19abb..975c0b610b2 100644
--- a/gcc/tree-ssa-loop-prefetch.c
+++ b/gcc/tree-ssa-loop-prefetch.c
@@ -1229,13 +1229,13 @@ mark_nontemporal_store (struct mem_ref *ref)
static void
emit_mfence_after_loop (struct loop *loop)
{
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
edge exit;
gimple call;
gimple_stmt_iterator bsi;
unsigned i;
- FOR_EACH_VEC_ELT (edge, exits, i, exit)
+ FOR_EACH_VEC_ELT (exits, i, exit)
{
call = gimple_build_call (FENCE_FOLLOWING_MOVNT, 0);
@@ -1249,7 +1249,7 @@ emit_mfence_after_loop (struct loop *loop)
gsi_insert_before (&bsi, call, GSI_NEW_STMT);
}
- VEC_free (edge, heap, exits);
+ exits.release ();
update_ssa (TODO_update_ssa_only_virtuals);
}
@@ -1267,16 +1267,16 @@ may_use_storent_in_loop_p (struct loop *loop)
is a suitable place for it at each of the loop exits. */
if (FENCE_FOLLOWING_MOVNT != NULL_TREE)
{
- VEC (edge, heap) *exits = get_loop_exit_edges (loop);
+ vec<edge> exits = get_loop_exit_edges (loop);
unsigned i;
edge exit;
- FOR_EACH_VEC_ELT (edge, exits, i, exit)
+ FOR_EACH_VEC_ELT (exits, i, exit)
if ((exit->flags & EDGE_ABNORMAL)
&& exit->dest == EXIT_BLOCK_PTR)
ret = false;
- VEC_free (edge, heap, exits);
+ exits.release ();
}
return ret;
@@ -1471,7 +1471,7 @@ self_reuse_distance (data_reference_p dr, unsigned *loop_sizes, unsigned n,
{
tree stride, access_fn;
HOST_WIDE_INT *strides, astride;
- VEC (tree, heap) *access_fns;
+ vec<tree> access_fns;
tree ref = DR_REF (dr);
unsigned i, ret = ~0u;
@@ -1490,7 +1490,7 @@ self_reuse_distance (data_reference_p dr, unsigned *loop_sizes, unsigned n,
strides = XCNEWVEC (HOST_WIDE_INT, n);
access_fns = DR_ACCESS_FNS (dr);
- FOR_EACH_VEC_ELT (tree, access_fns, i, access_fn)
+ FOR_EACH_VEC_ELT (access_fns, i, access_fn)
{
/* Keep track of the reference corresponding to the subscript, so that we
know its stride. */
@@ -1541,11 +1541,11 @@ determine_loop_nest_reuse (struct loop *loop, struct mem_ref_group *refs,
bool no_other_refs)
{
struct loop *nest, *aloop;
- VEC (data_reference_p, heap) *datarefs = NULL;
- VEC (ddr_p, heap) *dependences = NULL;
+ vec<data_reference_p> datarefs = vec<data_reference_p>();
+ vec<ddr_p> dependences = vec<ddr_p>();
struct mem_ref_group *gr;
struct mem_ref *ref, *refb;
- VEC (loop_p, heap) *vloops = NULL;
+ vec<loop_p> vloops = vec<loop_p>();
unsigned *loop_data_size;
unsigned i, j, n;
unsigned volume, dist, adist;
@@ -1574,7 +1574,7 @@ determine_loop_nest_reuse (struct loop *loop, struct mem_ref_group *refs,
We use this to estimate whether the reference is evicted from the
cache before its reuse. */
find_loop_nest (nest, &vloops);
- n = VEC_length (loop_p, vloops);
+ n = vloops.length ();
loop_data_size = XNEWVEC (unsigned, n);
volume = volume_of_references (refs);
i = n;
@@ -1586,7 +1586,7 @@ determine_loop_nest_reuse (struct loop *loop, struct mem_ref_group *refs,
if (volume > L2_CACHE_SIZE_BYTES)
continue;
- aloop = VEC_index (loop_p, vloops, i);
+ aloop = vloops[i];
vol = estimated_stmt_executions_int (aloop);
if (vol == -1)
vol = expected_loop_iterations (aloop);
@@ -1607,13 +1607,13 @@ determine_loop_nest_reuse (struct loop *loop, struct mem_ref_group *refs,
{
ref->reuse_distance = volume;
dr->aux = ref;
- VEC_safe_push (data_reference_p, heap, datarefs, dr);
+ datarefs.safe_push (dr);
}
else
no_other_refs = false;
}
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
dist = self_reuse_distance (dr, loop_data_size, n, loop);
ref = (struct mem_ref *) dr->aux;
@@ -1627,7 +1627,7 @@ determine_loop_nest_reuse (struct loop *loop, struct mem_ref_group *refs,
if (!compute_all_dependences (datarefs, &dependences, vloops, true))
return false;
- FOR_EACH_VEC_ELT (ddr_p, dependences, i, dep)
+ FOR_EACH_VEC_ELT (dependences, i, dep)
{
if (DDR_ARE_DEPENDENT (dep) == chrec_known)
continue;
diff --git a/gcc/tree-ssa-math-opts.c b/gcc/tree-ssa-math-opts.c
index 01b1d176a06..cfaa420ac5c 100644
--- a/gcc/tree-ssa-math-opts.c
+++ b/gcc/tree-ssa-math-opts.c
@@ -665,18 +665,18 @@ struct gimple_opt_pass pass_cse_reciprocals =
statements in the vector. */
static bool
-maybe_record_sincos (VEC(gimple, heap) **stmts,
+maybe_record_sincos (vec<gimple> *stmts,
basic_block *top_bb, gimple use_stmt)
{
basic_block use_bb = gimple_bb (use_stmt);
if (*top_bb
&& (*top_bb == use_bb
|| dominated_by_p (CDI_DOMINATORS, use_bb, *top_bb)))
- VEC_safe_push (gimple, heap, *stmts, use_stmt);
+ stmts->safe_push (use_stmt);
else if (!*top_bb
|| dominated_by_p (CDI_DOMINATORS, *top_bb, use_bb))
{
- VEC_safe_push (gimple, heap, *stmts, use_stmt);
+ stmts->safe_push (use_stmt);
*top_bb = use_bb;
}
else
@@ -701,7 +701,7 @@ execute_cse_sincos_1 (tree name)
tree fndecl, res, type;
gimple def_stmt, use_stmt, stmt;
int seen_cos = 0, seen_sin = 0, seen_cexpi = 0;
- VEC(gimple, heap) *stmts = NULL;
+ vec<gimple> stmts = vec<gimple>();
basic_block top_bb = NULL;
int i;
bool cfg_changed = false;
@@ -735,7 +735,7 @@ execute_cse_sincos_1 (tree name)
if (seen_cos + seen_sin + seen_cexpi <= 1)
{
- VEC_free(gimple, heap, stmts);
+ stmts.release ();
return false;
}
@@ -764,7 +764,7 @@ execute_cse_sincos_1 (tree name)
sincos_stats.inserted++;
/* And adjust the recorded old call sites. */
- for (i = 0; VEC_iterate(gimple, stmts, i, use_stmt); ++i)
+ for (i = 0; stmts.iterate (i, &use_stmt); ++i)
{
tree rhs = NULL;
fndecl = gimple_call_fndecl (use_stmt);
@@ -796,7 +796,7 @@ execute_cse_sincos_1 (tree name)
cfg_changed = true;
}
- VEC_free(gimple, heap, stmts);
+ stmts.release ();
return cfg_changed;
}
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index ef4dba56b1b..c98bfda1c14 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -107,7 +107,7 @@ along with GCC; see the file COPYING3. If not see
#define opf_not_non_addressable (1 << 4)
/* Array for building all the use operands. */
-static VEC(tree,heap) *build_uses;
+static vec<tree> build_uses;
/* The built VDEF operand. */
static tree build_vdef;
@@ -182,7 +182,7 @@ init_ssa_operands (struct function *fn)
{
if (!n_initialized++)
{
- build_uses = VEC_alloc (tree, heap, 10);
+ build_uses.create (10);
build_vuse = NULL_TREE;
build_vdef = NULL_TREE;
bitmap_obstack_initialize (&operands_bitmap_obstack);
@@ -206,7 +206,7 @@ fini_ssa_operands (void)
if (!--n_initialized)
{
- VEC_free (tree, heap, build_uses);
+ build_uses.release ();
build_vdef = NULL_TREE;
build_vuse = NULL_TREE;
}
@@ -312,7 +312,7 @@ add_use_op (gimple stmt, tree *op, use_optype_p last)
/* Takes elements from build_defs and turns them into def operands of STMT.
- TODO -- Make build_defs VEC of tree *. */
+ TODO -- Make build_defs vec of tree *. */
static inline void
finalize_ssa_defs (gimple stmt)
@@ -351,7 +351,7 @@ finalize_ssa_defs (gimple stmt)
/* Takes elements from build_uses and turns them into use operands of STMT.
- TODO -- Make build_uses VEC of tree *. */
+ TODO -- Make build_uses vec of tree *. */
static inline void
finalize_ssa_uses (gimple stmt)
@@ -370,7 +370,7 @@ finalize_ssa_uses (gimple stmt)
if (oldvuse != (build_vuse != NULL_TREE
? build_vuse : build_vdef))
gimple_set_vuse (stmt, NULL_TREE);
- VEC_safe_insert (tree, heap, build_uses, 0, (tree)gimple_vuse_ptr (stmt));
+ build_uses.safe_insert (0, (tree)gimple_vuse_ptr (stmt));
}
new_list.next = NULL;
@@ -403,9 +403,9 @@ finalize_ssa_uses (gimple stmt)
}
/* Now create nodes for all the new nodes. */
- for (new_i = 0; new_i < VEC_length (tree, build_uses); new_i++)
+ for (new_i = 0; new_i < build_uses.length (); new_i++)
{
- tree *op = (tree *) VEC_index (tree, build_uses, new_i);
+ tree *op = (tree *) build_uses[new_i];
last = add_use_op (stmt, op, last);
}
@@ -422,7 +422,7 @@ cleanup_build_arrays (void)
{
build_vdef = NULL_TREE;
build_vuse = NULL_TREE;
- VEC_truncate (tree, build_uses, 0);
+ build_uses.truncate (0);
}
@@ -442,7 +442,7 @@ finalize_ssa_stmt_operands (gimple stmt)
static inline void
start_ssa_stmt_operands (void)
{
- gcc_assert (VEC_length (tree, build_uses) == 0);
+ gcc_assert (build_uses.length () == 0);
gcc_assert (build_vuse == NULL_TREE);
gcc_assert (build_vdef == NULL_TREE);
}
@@ -453,7 +453,7 @@ start_ssa_stmt_operands (void)
static inline void
append_use (tree *use_p)
{
- VEC_safe_push (tree, heap, build_uses, (tree) use_p);
+ build_uses.safe_push ((tree) use_p);
}
@@ -820,7 +820,7 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
gimple_set_has_volatile_ops (stmt, true);
for (idx = 0;
- VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
+ vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
idx++)
get_expr_operands (stmt, &ce->value, uflags);
@@ -1004,22 +1004,22 @@ verify_ssa_operands (gimple stmt)
FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
- FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ FOR_EACH_VEC_ELT (build_uses, i, use)
{
if (use_p->use == (tree *)use)
{
- VEC_replace (tree, build_uses, i, NULL_TREE);
+ build_uses[i] = NULL_TREE;
break;
}
}
- if (i == VEC_length (tree, build_uses))
+ if (i == build_uses.length ())
{
error ("excess use operand for stmt");
debug_generic_expr (USE_FROM_PTR (use_p));
return true;
}
}
- FOR_EACH_VEC_ELT (tree, build_uses, i, use)
+ FOR_EACH_VEC_ELT (build_uses, i, use)
if (use != NULL_TREE)
{
error ("use operand missing for stmt");
@@ -1080,7 +1080,7 @@ update_stmt_operands (gimple stmt)
split_bbs_on_noreturn_calls during cfg cleanup. */
if (is_gimple_call (stmt)
&& gimple_call_noreturn_p (stmt))
- VEC_safe_push (gimple, gc, MODIFIED_NORETURN_CALLS (cfun), stmt);
+ vec_safe_push (MODIFIED_NORETURN_CALLS (cfun), stmt);
gcc_assert (gimple_modified_p (stmt));
build_ssa_operands (stmt);
diff --git a/gcc/tree-ssa-phiopt.c b/gcc/tree-ssa-phiopt.c
index 3856b92ac0d..c81f65c61ae 100644
--- a/gcc/tree-ssa-phiopt.c
+++ b/gcc/tree-ssa-phiopt.c
@@ -1574,15 +1574,15 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
{
gimple then_assign = last_and_only_stmt (then_bb);
gimple else_assign = last_and_only_stmt (else_bb);
- VEC (data_reference_p, heap) *then_datarefs, *else_datarefs;
- VEC (ddr_p, heap) *then_ddrs, *else_ddrs;
+ vec<data_reference_p> then_datarefs, else_datarefs;
+ vec<ddr_p> then_ddrs, else_ddrs;
gimple then_store, else_store;
bool found, ok = false, res;
struct data_dependence_relation *ddr;
data_reference_p then_dr, else_dr;
int i, j;
tree then_lhs, else_lhs;
- VEC (gimple, heap) *then_stores, *else_stores;
+ vec<gimple> then_stores, else_stores;
basic_block blocks[3];
if (MAX_STORES_TO_SINK == 0)
@@ -1594,14 +1594,14 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
then_assign, else_assign);
/* Find data references. */
- then_datarefs = VEC_alloc (data_reference_p, heap, 1);
- else_datarefs = VEC_alloc (data_reference_p, heap, 1);
+ then_datarefs.create (1);
+ else_datarefs.create (1);
if ((find_data_references_in_bb (NULL, then_bb, &then_datarefs)
== chrec_dont_know)
- || !VEC_length (data_reference_p, then_datarefs)
+ || !then_datarefs.length ()
|| (find_data_references_in_bb (NULL, else_bb, &else_datarefs)
== chrec_dont_know)
- || !VEC_length (data_reference_p, else_datarefs))
+ || !else_datarefs.length ())
{
free_data_refs (then_datarefs);
free_data_refs (else_datarefs);
@@ -1609,9 +1609,9 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
}
/* Find pairs of stores with equal LHS. */
- then_stores = VEC_alloc (gimple, heap, 1);
- else_stores = VEC_alloc (gimple, heap, 1);
- FOR_EACH_VEC_ELT (data_reference_p, then_datarefs, i, then_dr)
+ then_stores.create (1);
+ else_stores.create (1);
+ FOR_EACH_VEC_ELT (then_datarefs, i, then_dr)
{
if (DR_IS_READ (then_dr))
continue;
@@ -1620,7 +1620,7 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
then_lhs = gimple_get_lhs (then_store);
found = false;
- FOR_EACH_VEC_ELT (data_reference_p, else_datarefs, j, else_dr)
+ FOR_EACH_VEC_ELT (else_datarefs, j, else_dr)
{
if (DR_IS_READ (else_dr))
continue;
@@ -1638,33 +1638,35 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
if (!found)
continue;
- VEC_safe_push (gimple, heap, then_stores, then_store);
- VEC_safe_push (gimple, heap, else_stores, else_store);
+ then_stores.safe_push (then_store);
+ else_stores.safe_push (else_store);
}
/* No pairs of stores found. */
- if (!VEC_length (gimple, then_stores)
- || VEC_length (gimple, then_stores) > (unsigned) MAX_STORES_TO_SINK)
+ if (!then_stores.length ()
+ || then_stores.length () > (unsigned) MAX_STORES_TO_SINK)
{
free_data_refs (then_datarefs);
free_data_refs (else_datarefs);
- VEC_free (gimple, heap, then_stores);
- VEC_free (gimple, heap, else_stores);
+ then_stores.release ();
+ else_stores.release ();
return false;
}
/* Compute and check data dependencies in both basic blocks. */
- then_ddrs = VEC_alloc (ddr_p, heap, 1);
- else_ddrs = VEC_alloc (ddr_p, heap, 1);
- if (!compute_all_dependences (then_datarefs, &then_ddrs, NULL, false)
- || !compute_all_dependences (else_datarefs, &else_ddrs, NULL, false))
+ then_ddrs.create (1);
+ else_ddrs.create (1);
+ if (!compute_all_dependences (then_datarefs, &then_ddrs,
+ vec<loop_p>(), false)
+ || !compute_all_dependences (else_datarefs, &else_ddrs,
+ vec<loop_p>(), false))
{
free_dependence_relations (then_ddrs);
free_dependence_relations (else_ddrs);
free_data_refs (then_datarefs);
free_data_refs (else_datarefs);
- VEC_free (gimple, heap, then_stores);
- VEC_free (gimple, heap, else_stores);
+ then_stores.release ();
+ else_stores.release ();
return false;
}
blocks[0] = then_bb;
@@ -1674,7 +1676,7 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
/* Check that there are no read-after-write or write-after-write dependencies
in THEN_BB. */
- FOR_EACH_VEC_ELT (ddr_p, then_ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (then_ddrs, i, ddr)
{
struct data_reference *dra = DDR_A (ddr);
struct data_reference *drb = DDR_B (ddr);
@@ -1690,15 +1692,15 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
free_dependence_relations (else_ddrs);
free_data_refs (then_datarefs);
free_data_refs (else_datarefs);
- VEC_free (gimple, heap, then_stores);
- VEC_free (gimple, heap, else_stores);
+ then_stores.release ();
+ else_stores.release ();
return false;
}
}
/* Check that there are no read-after-write or write-after-write dependencies
in ELSE_BB. */
- FOR_EACH_VEC_ELT (ddr_p, else_ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (else_ddrs, i, ddr)
{
struct data_reference *dra = DDR_A (ddr);
struct data_reference *drb = DDR_B (ddr);
@@ -1714,16 +1716,16 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
free_dependence_relations (else_ddrs);
free_data_refs (then_datarefs);
free_data_refs (else_datarefs);
- VEC_free (gimple, heap, then_stores);
- VEC_free (gimple, heap, else_stores);
+ then_stores.release ();
+ else_stores.release ();
return false;
}
}
/* Sink stores with same LHS. */
- FOR_EACH_VEC_ELT (gimple, then_stores, i, then_store)
+ FOR_EACH_VEC_ELT (then_stores, i, then_store)
{
- else_store = VEC_index (gimple, else_stores, i);
+ else_store = else_stores[i];
res = cond_if_else_store_replacement_1 (then_bb, else_bb, join_bb,
then_store, else_store);
ok = ok || res;
@@ -1733,8 +1735,8 @@ cond_if_else_store_replacement (basic_block then_bb, basic_block else_bb,
free_dependence_relations (else_ddrs);
free_data_refs (then_datarefs);
free_data_refs (else_datarefs);
- VEC_free (gimple, heap, then_stores);
- VEC_free (gimple, heap, else_stores);
+ then_stores.release ();
+ else_stores.release ();
return ok;
}
diff --git a/gcc/tree-ssa-phiprop.c b/gcc/tree-ssa-phiprop.c
index 38fcf5de39f..fdd19f0e714 100644
--- a/gcc/tree-ssa-phiprop.c
+++ b/gcc/tree-ssa-phiprop.c
@@ -369,7 +369,7 @@ next:;
static unsigned int
tree_ssa_phiprop (void)
{
- VEC(basic_block, heap) *bbs;
+ vec<basic_block> bbs;
struct phiprop_d *phivn;
bool did_something = false;
basic_block bb;
@@ -385,14 +385,14 @@ tree_ssa_phiprop (void)
/* Walk the dominator tree in preorder. */
bbs = get_all_dominated_blocks (CDI_DOMINATORS,
single_succ (ENTRY_BLOCK_PTR));
- FOR_EACH_VEC_ELT (basic_block, bbs, i, bb)
+ FOR_EACH_VEC_ELT (bbs, i, bb)
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
did_something |= propagate_with_phi (bb, gsi_stmt (gsi), phivn, n);
if (did_something)
gsi_commit_edge_inserts ();
- VEC_free (basic_block, heap, bbs);
+ bbs.release ();
free (phivn);
return 0;
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 6876dd4c5b7..00b88bfe03c 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -233,11 +233,9 @@ pre_expr_d::hash (const value_type *e)
static unsigned int next_expression_id;
/* Mapping from expression to id number we can use in bitmap sets. */
-DEF_VEC_P (pre_expr);
-DEF_VEC_ALLOC_P (pre_expr, heap);
-static VEC(pre_expr, heap) *expressions;
+static vec<pre_expr> expressions;
static hash_table <pre_expr_d> expression_to_id;
-static VEC(unsigned, heap) *name_to_id;
+static vec<unsigned> name_to_id;
/* Allocate an expression id for EXPR. */
@@ -248,18 +246,18 @@ alloc_expression_id (pre_expr expr)
/* Make sure we won't overflow. */
gcc_assert (next_expression_id + 1 > next_expression_id);
expr->id = next_expression_id++;
- VEC_safe_push (pre_expr, heap, expressions, expr);
+ expressions.safe_push (expr);
if (expr->kind == NAME)
{
unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
- /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
- re-allocations by using VEC_reserve upfront. There is no
- VEC_quick_grow_cleared unfortunately. */
- unsigned old_len = VEC_length (unsigned, name_to_id);
- VEC_reserve (unsigned, heap, name_to_id, num_ssa_names - old_len);
- VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
- gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
- VEC_replace (unsigned, name_to_id, version, expr->id);
+ /* vec::safe_grow_cleared allocates no headroom. Avoid frequent
+ re-allocations by using vec::reserve upfront. There is no
+ vec::quick_grow_cleared unfortunately. */
+ unsigned old_len = name_to_id.length ();
+ name_to_id.reserve (num_ssa_names - old_len);
+ name_to_id.safe_grow_cleared (num_ssa_names);
+ gcc_assert (name_to_id[version] == 0);
+ name_to_id[version] = expr->id;
}
else
{
@@ -286,9 +284,9 @@ lookup_expression_id (const pre_expr expr)
if (expr->kind == NAME)
{
unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
- if (VEC_length (unsigned, name_to_id) <= version)
+ if (name_to_id.length () <= version)
return 0;
- return VEC_index (unsigned, name_to_id, version);
+ return name_to_id[version];
}
else
{
@@ -316,7 +314,7 @@ get_or_alloc_expression_id (pre_expr expr)
static inline pre_expr
expression_for_id (unsigned int id)
{
- return VEC_index (pre_expr, expressions, id);
+ return expressions[id];
}
/* Free the expression id field in all of our expressions,
@@ -325,7 +323,7 @@ expression_for_id (unsigned int id)
static void
clear_expression_ids (void)
{
- VEC_free (pre_expr, heap, expressions);
+ expressions.release ();
}
static alloc_pool pre_expr_pool;
@@ -368,7 +366,7 @@ typedef struct bitmap_set
EXECUTE_IF_SET_IN_BITMAP(&(set)->values, 0, (id), (bi))
/* Mapping from value id to expressions with that value_id. */
-static VEC(bitmap, heap) *value_expressions;
+static vec<bitmap> value_expressions;
/* Sets that we need to keep track of. */
typedef struct bb_bitmap_sets
@@ -583,16 +581,16 @@ add_to_value (unsigned int v, pre_expr e)
gcc_checking_assert (get_expr_value_id (e) == v);
- if (v >= VEC_length (bitmap, value_expressions))
+ if (v >= value_expressions.length ())
{
- VEC_safe_grow_cleared (bitmap, heap, value_expressions, v + 1);
+ value_expressions.safe_grow_cleared (v + 1);
}
- set = VEC_index (bitmap, value_expressions, v);
+ set = value_expressions[v];
if (!set)
{
set = BITMAP_ALLOC (&grand_bitmap_obstack);
- VEC_replace (bitmap, value_expressions, v, set);
+ value_expressions[v] = set;
}
bitmap_set_bit (set, get_or_alloc_expression_id (e));
@@ -645,7 +643,7 @@ sccvn_valnum_from_value_id (unsigned int val)
{
bitmap_iterator bi;
unsigned int i;
- bitmap exprset = VEC_index (bitmap, value_expressions, val);
+ bitmap exprset = value_expressions[val];
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
{
pre_expr vexpr = expression_for_id (i);
@@ -712,15 +710,15 @@ bitmap_set_free (bitmap_set_t set)
/* Generate an topological-ordered array of bitmap set SET. */
-static VEC(pre_expr, heap) *
+static vec<pre_expr>
sorted_array_from_bitmap_set (bitmap_set_t set)
{
unsigned int i, j;
bitmap_iterator bi, bj;
- VEC(pre_expr, heap) *result;
+ vec<pre_expr> result;
/* Pre-allocate roughly enough space for the array. */
- result = VEC_alloc (pre_expr, heap, bitmap_count_bits (&set->values));
+ result.create (bitmap_count_bits (&set->values));
FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
{
@@ -734,11 +732,11 @@ sorted_array_from_bitmap_set (bitmap_set_t set)
If this is somehow a significant lose for some cases, we can
choose which set to walk based on the set size. */
- bitmap exprset = VEC_index (bitmap, value_expressions, i);
+ bitmap exprset = value_expressions[i];
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bj)
{
if (bitmap_bit_p (&set->expressions, j))
- VEC_safe_push (pre_expr, heap, result, expression_for_id (j));
+ result.safe_push (expression_for_id (j));
}
}
@@ -860,7 +858,7 @@ bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
5-10x faster than walking the bitmap. If this is somehow a
significant lose for some cases, we can choose which set to walk
based on the set size. */
- exprset = VEC_index (bitmap, value_expressions, lookfor);
+ exprset = value_expressions[lookfor];
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
{
if (bitmap_clear_bit (&set->expressions, i))
@@ -947,7 +945,7 @@ print_pre_expr (FILE *outfile, const pre_expr expr)
vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
fprintf (outfile, "{");
for (i = 0;
- VEC_iterate (vn_reference_op_s, ref->operands, i, vro);
+ ref->operands.iterate (i, &vro);
i++)
{
bool closebrace = false;
@@ -977,7 +975,7 @@ print_pre_expr (FILE *outfile, const pre_expr expr)
}
if (closebrace)
fprintf (outfile, ">");
- if (i != VEC_length (vn_reference_op_s, ref->operands) - 1)
+ if (i != ref->operands.length () - 1)
fprintf (outfile, ",");
}
fprintf (outfile, "}");
@@ -1056,7 +1054,7 @@ debug_bitmap_sets_for (basic_block bb)
static void
print_value_expressions (FILE *outfile, unsigned int val)
{
- bitmap set = VEC_index (bitmap, value_expressions, val);
+ bitmap set = value_expressions[val];
if (set)
{
bitmap_set x;
@@ -1111,7 +1109,7 @@ get_constant_for_value_id (unsigned int v)
{
unsigned int i;
bitmap_iterator bi;
- bitmap exprset = VEC_index (bitmap, value_expressions, v);
+ bitmap exprset = value_expressions[v];
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
{
@@ -1262,7 +1260,7 @@ fully_constant_expression (pre_expr e)
in case the new vuse doesn't change the value id of the OPERANDS. */
static tree
-translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands,
+translate_vuse_through_block (vec<vn_reference_op_s> operands,
alias_set_type set, tree type, tree vuse,
basic_block phiblock,
basic_block block, bool *same_valid)
@@ -1393,7 +1391,7 @@ get_representative_for (const pre_expr e)
and pick out an SSA_NAME. */
unsigned int i;
bitmap_iterator bi;
- bitmap exprs = VEC_index (bitmap, value_expressions, value_id);
+ bitmap exprs = value_expressions[value_id];
EXECUTE_IF_SET_IN_BITMAP (exprs, 0, i, bi)
{
pre_expr rep = expression_for_id (i);
@@ -1512,9 +1510,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
else
{
new_val_id = get_next_value_id ();
- VEC_safe_grow_cleared (bitmap, heap,
- value_expressions,
- get_max_value_id() + 1);
+ value_expressions.safe_grow_cleared (get_max_value_id() + 1);
nary = vn_nary_op_insert_pieces (newnary->length,
newnary->opcode,
newnary->type,
@@ -1535,17 +1531,18 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
case REFERENCE:
{
vn_reference_t ref = PRE_EXPR_REFERENCE (expr);
- VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vec<vn_reference_op_s> operands = ref->operands;
tree vuse = ref->vuse;
tree newvuse = vuse;
- VEC (vn_reference_op_s, heap) *newoperands = NULL;
+ vec<vn_reference_op_s> newoperands
+ = vec<vn_reference_op_s>();
bool changed = false, same_valid = true;
unsigned int i, j, n;
vn_reference_op_t operand;
vn_reference_t newref;
for (i = 0, j = 0;
- VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++)
+ operands.iterate (i, &operand); i++, j++)
{
pre_expr opresult;
pre_expr leader;
@@ -1593,12 +1590,11 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
}
if (n != 3)
{
- if (newoperands)
- VEC_free (vn_reference_op_s, heap, newoperands);
+ newoperands.release ();
return NULL;
}
- if (!newoperands)
- newoperands = VEC_copy (vn_reference_op_s, heap, operands);
+ if (!newoperands.exists ())
+ newoperands = operands.copy ();
/* We may have changed from an SSA_NAME to a constant */
if (newop.opcode == SSA_NAME && TREE_CODE (op[0]) != SSA_NAME)
newop.opcode = TREE_CODE (op[0]);
@@ -1620,18 +1616,16 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
if (off.fits_shwi ())
newop.off = off.low;
}
- VEC_replace (vn_reference_op_s, newoperands, j, newop);
+ newoperands[j] = newop;
/* If it transforms from an SSA_NAME to an address, fold with
a preceding indirect reference. */
if (j > 0 && op[0] && TREE_CODE (op[0]) == ADDR_EXPR
- && VEC_index (vn_reference_op_s,
- newoperands, j - 1).opcode == MEM_REF)
+ && newoperands[j - 1].opcode == MEM_REF)
vn_reference_fold_indirect (&newoperands, &j);
}
- if (i != VEC_length (vn_reference_op_s, operands))
+ if (i != operands.length ())
{
- if (newoperands)
- VEC_free (vn_reference_op_s, heap, newoperands);
+ newoperands.release ();
return NULL;
}
@@ -1643,7 +1637,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
&same_valid);
if (newvuse == NULL_TREE)
{
- VEC_free (vn_reference_op_s, heap, newoperands);
+ newoperands.release ();
return NULL;
}
}
@@ -1658,7 +1652,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
newoperands,
&newref, VN_WALK);
if (result)
- VEC_free (vn_reference_op_s, heap, newoperands);
+ newoperands.release ();
/* We can always insert constants, so if we have a partial
redundant constant load of another type try to translate it
@@ -1686,7 +1680,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
else if (!result && newref
&& !useless_type_conversion_p (ref->type, newref->type))
{
- VEC_free (vn_reference_op_s, heap, newoperands);
+ newoperands.release ();
return NULL;
}
@@ -1709,9 +1703,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
if (changed || !same_valid)
{
new_val_id = get_next_value_id ();
- VEC_safe_grow_cleared (bitmap, heap,
- value_expressions,
- get_max_value_id() + 1);
+ value_expressions.safe_grow_cleared(get_max_value_id() + 1);
}
else
new_val_id = ref->value_id;
@@ -1719,7 +1711,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
ref->type,
newoperands,
result, new_val_id);
- newoperands = NULL;
+ newoperands.create (0);
PRE_EXPR_REFERENCE (expr) = newref;
constant = fully_constant_expression (expr);
if (constant != expr)
@@ -1728,7 +1720,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
}
add_to_value (new_val_id, expr);
}
- VEC_free (vn_reference_op_s, heap, newoperands);
+ newoperands.release ();
return expr;
}
break;
@@ -1807,7 +1799,7 @@ static void
phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
basic_block phiblock)
{
- VEC (pre_expr, heap) *exprs;
+ vec<pre_expr> exprs;
pre_expr expr;
int i;
@@ -1818,7 +1810,7 @@ phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
}
exprs = sorted_array_from_bitmap_set (set);
- FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
+ FOR_EACH_VEC_ELT (exprs, i, expr)
{
pre_expr translated;
translated = phi_translate (expr, set, NULL, pred, phiblock);
@@ -1834,7 +1826,7 @@ phi_translate_set (bitmap_set_t dest, bitmap_set_t set, basic_block pred,
else
bitmap_value_insert_into_set (dest, translated);
}
- VEC_free (pre_expr, heap, exprs);
+ exprs.release ();
}
/* Find the leader for a value (i.e., the name representing that
@@ -1849,7 +1841,7 @@ bitmap_find_leader (bitmap_set_t set, unsigned int val)
{
unsigned int i;
bitmap_iterator bi;
- bitmap exprset = VEC_index (bitmap, value_expressions, val);
+ bitmap exprset = value_expressions[val];
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
{
@@ -1873,7 +1865,7 @@ bitmap_find_leader (bitmap_set_t set, unsigned int val)
choose which set to walk based on which set is smaller. */
unsigned int i;
bitmap_iterator bi;
- bitmap exprset = VEC_index (bitmap, value_expressions, val);
+ bitmap exprset = value_expressions[val];
EXECUTE_IF_AND_IN_BITMAP (exprset, &set->expressions, 0, i, bi)
return expression_for_id (i);
@@ -2007,7 +1999,7 @@ valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
vn_reference_op_t vro;
unsigned int i;
- FOR_EACH_VEC_ELT (vn_reference_op_s, ref->operands, i, vro)
+ FOR_EACH_VEC_ELT (ref->operands, i, vro)
{
if (!op_valid_in_sets (set1, set2, vro->op0)
|| !op_valid_in_sets (set1, set2, vro->op1)
@@ -2030,16 +2022,16 @@ valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr,
static void
dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
{
- VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set1);
+ vec<pre_expr> exprs = sorted_array_from_bitmap_set (set1);
pre_expr expr;
int i;
- FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
+ FOR_EACH_VEC_ELT (exprs, i, expr)
{
if (!valid_in_sets (set1, set2, expr, block))
bitmap_remove_from_set (set1, expr);
}
- VEC_free (pre_expr, heap, exprs);
+ exprs.release ();
}
/* Clean the set of expressions that are no longer valid in SET. This
@@ -2049,16 +2041,16 @@ dependent_clean (bitmap_set_t set1, bitmap_set_t set2, basic_block block)
static void
clean (bitmap_set_t set, basic_block block)
{
- VEC (pre_expr, heap) *exprs = sorted_array_from_bitmap_set (set);
+ vec<pre_expr> exprs = sorted_array_from_bitmap_set (set);
pre_expr expr;
int i;
- FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
+ FOR_EACH_VEC_ELT (exprs, i, expr)
{
if (!valid_in_sets (set, NULL, expr, block))
bitmap_remove_from_set (set, expr);
}
- VEC_free (pre_expr, heap, exprs);
+ exprs.release ();
}
/* Clean the set of expressions that are no longer valid in SET because
@@ -2198,18 +2190,18 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
phis to translate through. */
else
{
- VEC(basic_block, heap) * worklist;
+ vec<basic_block> worklist;
size_t i;
basic_block bprime, first = NULL;
- worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
+ worklist.create (EDGE_COUNT (block->succs));
FOR_EACH_EDGE (e, ei, block->succs)
{
if (!first
&& BB_VISITED (e->dest))
first = e->dest;
else if (BB_VISITED (e->dest))
- VEC_quick_push (basic_block, worklist, e->dest);
+ worklist.quick_push (e->dest);
}
/* Of multiple successors we have to have visited one already. */
@@ -2219,7 +2211,7 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
BB_VISITED (block) = 0;
BB_DEFERRED (block) = 1;
changed = true;
- VEC_free (basic_block, heap, worklist);
+ worklist.release ();
goto maybe_dump_sets;
}
@@ -2228,7 +2220,7 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
else
bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
- FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
+ FOR_EACH_VEC_ELT (worklist, i, bprime)
{
if (!gimple_seq_empty_p (phi_nodes (bprime)))
{
@@ -2240,7 +2232,7 @@ compute_antic_aux (basic_block block, bool block_has_abnormal_pred_edge)
else
bitmap_set_and (ANTIC_OUT, ANTIC_IN (bprime));
}
- VEC_free (basic_block, heap, worklist);
+ worklist.release ();
}
/* Prune expressions that are clobbered in block and thus become
@@ -2362,20 +2354,20 @@ compute_partial_antic_aux (basic_block block,
them. */
else
{
- VEC(basic_block, heap) * worklist;
+ vec<basic_block> worklist;
size_t i;
basic_block bprime;
- worklist = VEC_alloc (basic_block, heap, EDGE_COUNT (block->succs));
+ worklist.create (EDGE_COUNT (block->succs));
FOR_EACH_EDGE (e, ei, block->succs)
{
if (e->flags & EDGE_DFS_BACK)
continue;
- VEC_quick_push (basic_block, worklist, e->dest);
+ worklist.quick_push (e->dest);
}
- if (VEC_length (basic_block, worklist) > 0)
+ if (worklist.length () > 0)
{
- FOR_EACH_VEC_ELT (basic_block, worklist, i, bprime)
+ FOR_EACH_VEC_ELT (worklist, i, bprime)
{
unsigned int i;
bitmap_iterator bi;
@@ -2398,7 +2390,7 @@ compute_partial_antic_aux (basic_block block,
expression_for_id (i));
}
}
- VEC_free (basic_block, heap, worklist);
+ worklist.release ();
}
/* Prune expressions that are clobbered in block and thus become
@@ -2559,8 +2551,7 @@ static tree
create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
unsigned int *operand, gimple_seq *stmts)
{
- vn_reference_op_t currop = &VEC_index (vn_reference_op_s, ref->operands,
- *operand);
+ vn_reference_op_t currop = &ref->operands[*operand];
tree genop;
++*operand;
switch (currop->opcode)
@@ -2576,9 +2567,8 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
fn = find_or_generate_expression (block, currop->op0, stmts);
if (currop->op1)
sc = find_or_generate_expression (block, currop->op1, stmts);
- args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
- ref->operands) - 1);
- while (*operand < VEC_length (vn_reference_op_s, ref->operands))
+ args = XNEWVEC (tree, ref->operands.length () - 1);
+ while (*operand < ref->operands.length ())
{
args[nargs] = create_component_ref_by_pieces_1 (block, ref,
operand, stmts);
@@ -2618,8 +2608,7 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
case TARGET_MEM_REF:
{
tree genop0 = NULL_TREE, genop1 = NULL_TREE;
- vn_reference_op_t nextop = &VEC_index (vn_reference_op_s, ref->operands,
- ++*operand);
+ vn_reference_op_t nextop = &ref->operands[++*operand];
tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
stmts);
if (currop->op0)
@@ -2789,7 +2778,7 @@ find_or_generate_expression (basic_block block, tree op, gimple_seq *stmts)
}
/* It must be a complex expression, so generate it recursively. */
- bitmap exprset = VEC_index (bitmap, value_expressions, lookfor);
+ bitmap exprset = value_expressions[lookfor];
bitmap_iterator bi;
unsigned int i;
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, i, bi)
@@ -2874,7 +2863,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
}
if (nary->opcode == CONSTRUCTOR)
{
- VEC(constructor_elt,gc) *elts = NULL;
+ vec<constructor_elt, va_gc> *elts = NULL;
for (i = 0; i < nary->length; ++i)
CONSTRUCTOR_APPEND_ELT (elts, NULL_TREE, genop[i]);
folded = build_constructor (nary->type, elts);
@@ -2990,7 +2979,7 @@ static bool
inhibit_phi_insertion (basic_block bb, pre_expr expr)
{
vn_reference_t vr = PRE_EXPR_REFERENCE (expr);
- VEC (vn_reference_op_s, heap) *ops = vr->operands;
+ vec<vn_reference_op_s> ops = vr->operands;
vn_reference_op_t op;
unsigned i;
@@ -3002,7 +2991,7 @@ inhibit_phi_insertion (basic_block bb, pre_expr expr)
memory reference is a simple induction variable. In other
cases the vectorizer won't do anything anyway (either it's
loop invariant or a complicated expression). */
- FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
+ FOR_EACH_VEC_ELT (ops, i, op)
{
switch (op->opcode)
{
@@ -3048,7 +3037,7 @@ inhibit_phi_insertion (basic_block bb, pre_expr expr)
static bool
insert_into_preds_of_block (basic_block block, unsigned int exprnum,
- VEC(pre_expr, heap) *avail)
+ vec<pre_expr> avail)
{
pre_expr expr = expression_for_id (exprnum);
pre_expr newphi;
@@ -3089,7 +3078,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
gimple_seq stmts = NULL;
tree builtexpr;
bprime = pred->src;
- eprime = VEC_index (pre_expr, avail, pred->dest_idx);
+ eprime = avail[pred->dest_idx];
if (eprime->kind != NAME && eprime->kind != CONSTANT)
{
@@ -3097,8 +3086,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
&stmts, type);
gcc_assert (!(pred->flags & EDGE_ABNORMAL));
gsi_insert_seq_on_edge (pred, stmts);
- VEC_replace (pre_expr, avail, pred->dest_idx,
- get_or_alloc_expr_for_name (builtexpr));
+ avail[pred->dest_idx] = get_or_alloc_expr_for_name (builtexpr);
insertions = true;
}
else if (eprime->kind == CONSTANT)
@@ -3136,13 +3124,13 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
}
gsi_insert_seq_on_edge (pred, stmts);
}
- VEC_replace (pre_expr, avail, pred->dest_idx,
- get_or_alloc_expr_for_name (forcedexpr));
+ avail[pred->dest_idx]
+ = get_or_alloc_expr_for_name (forcedexpr);
}
}
else
- VEC_replace (pre_expr, avail, pred->dest_idx,
- get_or_alloc_expr_for_constant (builtexpr));
+ avail[pred->dest_idx]
+ = get_or_alloc_expr_for_constant (builtexpr);
}
}
else if (eprime->kind == NAME)
@@ -3181,8 +3169,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
}
gsi_insert_seq_on_edge (pred, stmts);
}
- VEC_replace (pre_expr, avail, pred->dest_idx,
- get_or_alloc_expr_for_name (forcedexpr));
+ avail[pred->dest_idx] = get_or_alloc_expr_for_name (forcedexpr);
}
}
}
@@ -3207,7 +3194,7 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (temp));
FOR_EACH_EDGE (pred, ei, block->preds)
{
- pre_expr ae = VEC_index (pre_expr, avail, pred->dest_idx);
+ pre_expr ae = avail[pred->dest_idx];
gcc_assert (get_expr_type (ae) == type
|| useless_type_conversion_p (type, get_expr_type (ae)));
if (ae->kind == CONSTANT)
@@ -3273,15 +3260,15 @@ static bool
do_regular_insertion (basic_block block, basic_block dom)
{
bool new_stuff = false;
- VEC (pre_expr, heap) *exprs;
+ vec<pre_expr> exprs;
pre_expr expr;
- VEC (pre_expr, heap) *avail = NULL;
+ vec<pre_expr> avail = vec<pre_expr>();
int i;
exprs = sorted_array_from_bitmap_set (ANTIC_IN (block));
- VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
+ avail.safe_grow (EDGE_COUNT (block->preds));
- FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
+ FOR_EACH_VEC_ELT (exprs, i, expr)
{
if (expr->kind != NAME)
{
@@ -3329,7 +3316,7 @@ do_regular_insertion (basic_block block, basic_block dom)
rest of the results are. */
if (eprime == NULL)
{
- VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
+ avail[pred->dest_idx] = NULL;
cant_insert = true;
break;
}
@@ -3340,12 +3327,12 @@ do_regular_insertion (basic_block block, basic_block dom)
vprime);
if (edoubleprime == NULL)
{
- VEC_replace (pre_expr, avail, pred->dest_idx, eprime);
+ avail[pred->dest_idx] = eprime;
all_same = false;
}
else
{
- VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
+ avail[pred->dest_idx] = edoubleprime;
by_some = true;
/* We want to perform insertions to remove a redundancy on
a path in the CFG we want to optimize for speed. */
@@ -3400,7 +3387,7 @@ do_regular_insertion (basic_block block, basic_block dom)
{
unsigned int j;
bitmap_iterator bi;
- bitmap exprset = VEC_index (bitmap, value_expressions, val);
+ bitmap exprset = value_expressions[val];
unsigned int new_val = get_expr_value_id (edoubleprime);
EXECUTE_IF_SET_IN_BITMAP (exprset, 0, j, bi)
@@ -3426,8 +3413,8 @@ do_regular_insertion (basic_block block, basic_block dom)
}
}
- VEC_free (pre_expr, heap, exprs);
- VEC_free (pre_expr, heap, avail);
+ exprs.release ();
+ avail.release ();
return new_stuff;
}
@@ -3443,15 +3430,15 @@ static bool
do_partial_partial_insertion (basic_block block, basic_block dom)
{
bool new_stuff = false;
- VEC (pre_expr, heap) *exprs;
+ vec<pre_expr> exprs;
pre_expr expr;
- VEC (pre_expr, heap) *avail = NULL;
+ vec<pre_expr> avail = vec<pre_expr>();
int i;
exprs = sorted_array_from_bitmap_set (PA_IN (block));
- VEC_safe_grow (pre_expr, heap, avail, EDGE_COUNT (block->preds));
+ avail.safe_grow (EDGE_COUNT (block->preds));
- FOR_EACH_VEC_ELT (pre_expr, exprs, i, expr)
+ FOR_EACH_VEC_ELT (exprs, i, expr)
{
if (expr->kind != NAME)
{
@@ -3493,7 +3480,7 @@ do_partial_partial_insertion (basic_block block, basic_block dom)
rest of the results are. */
if (eprime == NULL)
{
- VEC_replace (pre_expr, avail, pred->dest_idx, NULL);
+ avail[pred->dest_idx] = NULL;
cant_insert = true;
break;
}
@@ -3501,7 +3488,7 @@ do_partial_partial_insertion (basic_block block, basic_block dom)
eprime = fully_constant_expression (eprime);
vprime = get_expr_value_id (eprime);
edoubleprime = bitmap_find_leader (AVAIL_OUT (bprime), vprime);
- VEC_replace (pre_expr, avail, pred->dest_idx, edoubleprime);
+ avail[pred->dest_idx] = edoubleprime;
if (edoubleprime == NULL)
{
by_all = false;
@@ -3564,8 +3551,8 @@ do_partial_partial_insertion (basic_block block, basic_block dom)
}
}
- VEC_free (pre_expr, heap, exprs);
- VEC_free (pre_expr, heap, avail);
+ exprs.release ();
+ avail.release ();
return new_stuff;
}
@@ -3800,7 +3787,8 @@ compute_avail (void)
{
vn_reference_t ref;
pre_expr result = NULL;
- VEC(vn_reference_op_s, heap) *ops = NULL;
+ vec<vn_reference_op_s> ops
+ = vec<vn_reference_op_s>();
/* We can value number only calls to real functions. */
if (gimple_call_internal_p (stmt))
@@ -3810,7 +3798,7 @@ compute_avail (void)
vn_reference_lookup_pieces (gimple_vuse (stmt), 0,
gimple_expr_type (stmt),
ops, &ref, VN_NOWALK);
- VEC_free (vn_reference_op_s, heap, ops);
+ ops.release ();
if (!ref)
continue;
@@ -3938,11 +3926,11 @@ compute_avail (void)
/* Local state for the eliminate domwalk. */
-static VEC (gimple, heap) *el_to_remove;
-static VEC (gimple, heap) *el_to_update;
+static vec<gimple> el_to_remove;
+static vec<gimple> el_to_update;
static unsigned int el_todo;
-static VEC (tree, heap) *el_avail;
-static VEC (tree, heap) *el_avail_stack;
+static vec<tree> el_avail;
+static vec<tree> el_avail_stack;
/* Return a leader for OP that is available at the current point of the
eliminate domwalk. */
@@ -3955,8 +3943,8 @@ eliminate_avail (tree op)
{
if (SSA_NAME_IS_DEFAULT_DEF (valnum))
return valnum;
- if (VEC_length (tree, el_avail) > SSA_NAME_VERSION (valnum))
- return VEC_index (tree, el_avail, SSA_NAME_VERSION (valnum));
+ if (el_avail.length () > SSA_NAME_VERSION (valnum))
+ return el_avail[SSA_NAME_VERSION (valnum)];
}
else if (is_gimple_min_invariant (valnum))
return valnum;
@@ -3971,11 +3959,10 @@ eliminate_push_avail (tree op)
tree valnum = VN_INFO (op)->valnum;
if (TREE_CODE (valnum) == SSA_NAME)
{
- if (VEC_length (tree, el_avail) <= SSA_NAME_VERSION (valnum))
- VEC_safe_grow_cleared (tree, heap,
- el_avail, SSA_NAME_VERSION (valnum) + 1);
- VEC_replace (tree, el_avail, SSA_NAME_VERSION (valnum), op);
- VEC_safe_push (tree, heap, el_avail_stack, op);
+ if (el_avail.length () <= SSA_NAME_VERSION (valnum))
+ el_avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
+ el_avail[SSA_NAME_VERSION (valnum)] = op;
+ el_avail_stack.safe_push (op);
}
}
@@ -4024,7 +4011,7 @@ eliminate_bb (dom_walk_data *, basic_block b)
gimple stmt;
/* Mark new bb. */
- VEC_safe_push (tree, heap, el_avail_stack, NULL_TREE);
+ el_avail_stack.safe_push (NULL_TREE);
for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
{
@@ -4082,7 +4069,7 @@ eliminate_bb (dom_walk_data *, basic_block b)
gsi2 = gsi_after_labels (b);
gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
/* Queue the copy for eventual removal. */
- VEC_safe_push (gimple, heap, el_to_remove, stmt);
+ el_to_remove.safe_push (stmt);
/* If we inserted this PHI node ourself, it's not an elimination. */
if (inserted_exprs
&& bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
@@ -4259,7 +4246,7 @@ eliminate_bb (dom_walk_data *, basic_block b)
}
/* Queue stmt for removal. */
- VEC_safe_push (gimple, heap, el_to_remove, stmt);
+ el_to_remove.safe_push (stmt);
}
}
/* Visit COND_EXPRs and fold the comparison with the
@@ -4318,7 +4305,7 @@ eliminate_bb (dom_walk_data *, basic_block b)
}
gimple_call_set_fn (stmt, fn);
- VEC_safe_push (gimple, heap, el_to_update, stmt);
+ el_to_update.safe_push (stmt);
/* When changing a call into a noreturn call, cfg cleanup
is needed to fix up the noreturn call. */
@@ -4360,9 +4347,8 @@ static void
eliminate_leave_block (dom_walk_data *, basic_block)
{
tree entry;
- while ((entry = VEC_pop (tree, el_avail_stack)) != NULL_TREE)
- VEC_replace (tree, el_avail,
- SSA_NAME_VERSION (VN_INFO (entry)->valnum), NULL_TREE);
+ while ((entry = el_avail_stack.pop ()) != NULL_TREE)
+ el_avail[SSA_NAME_VERSION (VN_INFO (entry)->valnum)] = NULL_TREE;
}
/* Eliminate fully redundant computations. */
@@ -4378,11 +4364,11 @@ eliminate (void)
need_eh_cleanup = BITMAP_ALLOC (NULL);
need_ab_cleanup = BITMAP_ALLOC (NULL);
- el_to_remove = NULL;
- el_to_update = NULL;
+ el_to_remove.create (0);
+ el_to_update.create (0);
el_todo = 0;
- el_avail = NULL;
- el_avail_stack = NULL;
+ el_avail.create (0);
+ el_avail_stack.create (0);
walk_data.dom_direction = CDI_DOMINATORS;
walk_data.initialize_block_local_data = NULL;
@@ -4394,13 +4380,13 @@ eliminate (void)
walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
fini_walk_dominator_tree (&walk_data);
- VEC_free (tree, heap, el_avail);
- VEC_free (tree, heap, el_avail_stack);
+ el_avail.release ();
+ el_avail_stack.release ();
/* We cannot remove stmts during BB walk, especially not release SSA
names there as this confuses the VN machinery. The stmts ending
up in el_to_remove are either stores or simple copies. */
- FOR_EACH_VEC_ELT (gimple, el_to_remove, i, stmt)
+ FOR_EACH_VEC_ELT (el_to_remove, i, stmt)
{
tree lhs = gimple_assign_lhs (stmt);
tree rhs = gimple_assign_rhs1 (stmt);
@@ -4437,14 +4423,14 @@ eliminate (void)
release_defs (stmt);
}
}
- VEC_free (gimple, heap, el_to_remove);
+ el_to_remove.release ();
/* We cannot update call statements with virtual operands during
SSA walk. This might remove them which in turn makes our
VN lattice invalid. */
- FOR_EACH_VEC_ELT (gimple, el_to_update, i, stmt)
+ FOR_EACH_VEC_ELT (el_to_update, i, stmt)
update_stmt (stmt);
- VEC_free (gimple, heap, el_to_update);
+ el_to_update.release ();
return el_todo;
}
@@ -4601,12 +4587,11 @@ init_pre (void)
basic_block bb;
next_expression_id = 1;
- expressions = NULL;
- VEC_safe_push (pre_expr, heap, expressions, NULL);
- value_expressions = VEC_alloc (bitmap, heap, get_max_value_id () + 1);
- VEC_safe_grow_cleared (bitmap, heap, value_expressions,
- get_max_value_id() + 1);
- name_to_id = NULL;
+ expressions.create (0);
+ expressions.safe_push (NULL);
+ value_expressions.create (get_max_value_id () + 1);
+ value_expressions.safe_grow_cleared (get_max_value_id() + 1);
+ name_to_id.create (0);
inserted_exprs = BITMAP_ALLOC (NULL);
@@ -4644,14 +4629,14 @@ static void
fini_pre ()
{
free (postorder);
- VEC_free (bitmap, heap, value_expressions);
+ value_expressions.release ();
BITMAP_FREE (inserted_exprs);
bitmap_obstack_release (&grand_bitmap_obstack);
free_alloc_pool (bitmap_set_pool);
free_alloc_pool (pre_expr_pool);
phi_translate_table.dispose ();
expression_to_id.dispose ();
- VEC_free (unsigned, heap, name_to_id);
+ name_to_id.release ();
free_aux_for_blocks ();
diff --git a/gcc/tree-ssa-propagate.c b/gcc/tree-ssa-propagate.c
index ca6fc336618..73dfa04ff72 100644
--- a/gcc/tree-ssa-propagate.c
+++ b/gcc/tree-ssa-propagate.c
@@ -128,7 +128,7 @@ static ssa_prop_visit_phi_fn ssa_prop_visit_phi;
static sbitmap executable_blocks;
/* Array of control flow edges on the worklist. */
-static VEC(basic_block,heap) *cfg_blocks;
+static vec<basic_block> cfg_blocks;
static unsigned int cfg_blocks_num = 0;
static int cfg_blocks_tail;
@@ -140,7 +140,7 @@ static sbitmap bb_in_list;
definition has changed. SSA edges are def-use edges in the SSA
web. For each D-U edge, we store the target statement or PHI node
U. */
-static GTY(()) VEC(gimple,gc) *interesting_ssa_edges;
+static GTY(()) vec<gimple, va_gc> *interesting_ssa_edges;
/* Identical to INTERESTING_SSA_EDGES. For performance reasons, the
list of SSA edges is split into two. One contains all SSA edges
@@ -156,7 +156,7 @@ static GTY(()) VEC(gimple,gc) *interesting_ssa_edges;
don't use a separate worklist for VARYING edges, we end up with
situations where lattice values move from
UNDEFINED->INTERESTING->VARYING instead of UNDEFINED->VARYING. */
-static GTY(()) VEC(gimple,gc) *varying_ssa_edges;
+static GTY(()) vec<gimple, va_gc> *varying_ssa_edges;
/* Return true if the block worklist empty. */
@@ -187,37 +187,33 @@ cfg_blocks_add (basic_block bb)
else
{
cfg_blocks_num++;
- if (cfg_blocks_num > VEC_length (basic_block, cfg_blocks))
+ if (cfg_blocks_num > cfg_blocks.length ())
{
/* We have to grow the array now. Adjust to queue to occupy
the full space of the original array. We do not need to
initialize the newly allocated portion of the array
because we keep track of CFG_BLOCKS_HEAD and
CFG_BLOCKS_HEAD. */
- cfg_blocks_tail = VEC_length (basic_block, cfg_blocks);
+ cfg_blocks_tail = cfg_blocks.length ();
cfg_blocks_head = 0;
- VEC_safe_grow (basic_block, heap, cfg_blocks, 2 * cfg_blocks_tail);
+ cfg_blocks.safe_grow (2 * cfg_blocks_tail);
}
/* Minor optimization: we prefer to see blocks with more
predecessors later, because there is more of a chance that
the incoming edges will be executable. */
else if (EDGE_COUNT (bb->preds)
- >= EDGE_COUNT (VEC_index (basic_block, cfg_blocks,
- cfg_blocks_head)->preds))
- cfg_blocks_tail = ((cfg_blocks_tail + 1)
- % VEC_length (basic_block, cfg_blocks));
+ >= EDGE_COUNT (cfg_blocks[cfg_blocks_head]->preds))
+ cfg_blocks_tail = ((cfg_blocks_tail + 1) % cfg_blocks.length ());
else
{
if (cfg_blocks_head == 0)
- cfg_blocks_head = VEC_length (basic_block, cfg_blocks);
+ cfg_blocks_head = cfg_blocks.length ();
--cfg_blocks_head;
head = true;
}
}
- VEC_replace (basic_block, cfg_blocks,
- head ? cfg_blocks_head : cfg_blocks_tail,
- bb);
+ cfg_blocks[head ? cfg_blocks_head : cfg_blocks_tail] = bb;
bitmap_set_bit (bb_in_list, bb->index);
}
@@ -229,13 +225,12 @@ cfg_blocks_get (void)
{
basic_block bb;
- bb = VEC_index (basic_block, cfg_blocks, cfg_blocks_head);
+ bb = cfg_blocks[cfg_blocks_head];
gcc_assert (!cfg_blocks_empty_p ());
gcc_assert (bb);
- cfg_blocks_head = ((cfg_blocks_head + 1)
- % VEC_length (basic_block, cfg_blocks));
+ cfg_blocks_head = ((cfg_blocks_head + 1) % cfg_blocks.length ());
--cfg_blocks_num;
bitmap_clear_bit (bb_in_list, bb->index);
@@ -262,9 +257,9 @@ add_ssa_edge (tree var, bool is_varying)
{
gimple_set_plf (use_stmt, STMT_IN_SSA_EDGE_WORKLIST, true);
if (is_varying)
- VEC_safe_push (gimple, gc, varying_ssa_edges, use_stmt);
+ vec_safe_push (varying_ssa_edges, use_stmt);
else
- VEC_safe_push (gimple, gc, interesting_ssa_edges, use_stmt);
+ vec_safe_push (interesting_ssa_edges, use_stmt);
}
}
}
@@ -360,15 +355,15 @@ simulate_stmt (gimple stmt)
SSA edge is added to it in simulate_stmt. */
static void
-process_ssa_edge_worklist (VEC(gimple,gc) **worklist)
+process_ssa_edge_worklist (vec<gimple, va_gc> **worklist)
{
/* Drain the entire worklist. */
- while (VEC_length (gimple, *worklist) > 0)
+ while ((*worklist)->length () > 0)
{
basic_block bb;
/* Pull the statement to simulate off the worklist. */
- gimple stmt = VEC_pop (gimple, *worklist);
+ gimple stmt = (*worklist)->pop ();
/* If this statement was already visited by simulate_block, then
we don't need to visit it again here. */
@@ -483,8 +478,8 @@ ssa_prop_init (void)
basic_block bb;
/* Worklists of SSA edges. */
- interesting_ssa_edges = VEC_alloc (gimple, gc, 20);
- varying_ssa_edges = VEC_alloc (gimple, gc, 20);
+ vec_alloc (interesting_ssa_edges, 20);
+ vec_alloc (varying_ssa_edges, 20);
executable_blocks = sbitmap_alloc (last_basic_block);
bitmap_clear (executable_blocks);
@@ -495,8 +490,8 @@ ssa_prop_init (void)
if (dump_file && (dump_flags & TDF_DETAILS))
dump_immediate_uses (dump_file);
- cfg_blocks = VEC_alloc (basic_block, heap, 20);
- VEC_safe_grow (basic_block, heap, cfg_blocks, 20);
+ cfg_blocks.create (20);
+ cfg_blocks.safe_grow_cleared (20);
/* Initially assume that every edge in the CFG is not executable.
(including the edges coming out of ENTRY_BLOCK_PTR). */
@@ -526,10 +521,9 @@ ssa_prop_init (void)
static void
ssa_prop_fini (void)
{
- VEC_free (gimple, gc, interesting_ssa_edges);
- VEC_free (gimple, gc, varying_ssa_edges);
- VEC_free (basic_block, heap, cfg_blocks);
- cfg_blocks = NULL;
+ vec_free (interesting_ssa_edges);
+ vec_free (varying_ssa_edges);
+ cfg_blocks.release ();
sbitmap_free (bb_in_list);
sbitmap_free (executable_blocks);
}
@@ -738,21 +732,21 @@ update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
tree fn = CALL_EXPR_FN (expr);
unsigned i;
unsigned nargs = call_expr_nargs (expr);
- VEC(tree, heap) *args = NULL;
+ vec<tree> args = vec<tree>();
gimple new_stmt;
if (nargs > 0)
{
- args = VEC_alloc (tree, heap, nargs);
- VEC_safe_grow (tree, heap, args, nargs);
+ args.create (nargs);
+ args.safe_grow_cleared (nargs);
for (i = 0; i < nargs; i++)
- VEC_replace (tree, args, i, CALL_EXPR_ARG (expr, i));
+ args[i] = CALL_EXPR_ARG (expr, i);
}
new_stmt = gimple_build_call_vec (fn, args);
finish_update_gimple_call (si_p, new_stmt, stmt);
- VEC_free (tree, heap, args);
+ args.release ();
return true;
}
@@ -827,8 +821,8 @@ ssa_propagate (ssa_prop_visit_stmt_fn visit_stmt,
/* Iterate until the worklists are empty. */
while (!cfg_blocks_empty_p ()
- || VEC_length (gimple, interesting_ssa_edges) > 0
- || VEC_length (gimple, varying_ssa_edges) > 0)
+ || interesting_ssa_edges->length () > 0
+ || varying_ssa_edges->length () > 0)
{
if (!cfg_blocks_empty_p ())
{
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 3503c6411a6..471b8e6f46e 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -444,8 +444,6 @@ get_rank (tree e)
return 0;
}
-DEF_VEC_P(operand_entry_t);
-DEF_VEC_ALLOC_P(operand_entry_t, heap);
/* We want integer ones to end up last no matter what, since they are
the ones we can do the most with. */
@@ -508,7 +506,7 @@ sort_by_operand_rank (const void *pa, const void *pb)
/* Add an operand entry to *OPS for the tree operand OP. */
static void
-add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
+add_to_ops_vec (vec<operand_entry_t> *ops, tree op)
{
operand_entry_t oe = (operand_entry_t) pool_alloc (operand_entry_pool);
@@ -516,14 +514,14 @@ add_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op)
oe->rank = get_rank (op);
oe->id = next_operand_entry_id++;
oe->count = 1;
- VEC_safe_push (operand_entry_t, heap, *ops, oe);
+ ops->safe_push (oe);
}
/* Add an operand entry to *OPS for the tree operand OP with repeat
count REPEAT. */
static void
-add_repeat_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op,
+add_repeat_to_ops_vec (vec<operand_entry_t> *ops, tree op,
HOST_WIDE_INT repeat)
{
operand_entry_t oe = (operand_entry_t) pool_alloc (operand_entry_pool);
@@ -532,7 +530,7 @@ add_repeat_to_ops_vec (VEC(operand_entry_t, heap) **ops, tree op,
oe->rank = get_rank (op);
oe->id = next_operand_entry_id++;
oe->count = repeat;
- VEC_safe_push (operand_entry_t, heap, *ops, oe);
+ ops->safe_push (oe);
reassociate_stats.pows_encountered++;
}
@@ -582,7 +580,7 @@ get_unary_op (tree name, enum tree_code opcode)
static bool
eliminate_duplicate_pair (enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops,
+ vec<operand_entry_t> *ops,
bool *all_done,
unsigned int i,
operand_entry_t curr,
@@ -612,7 +610,7 @@ eliminate_duplicate_pair (enum tree_code opcode,
print_generic_stmt (dump_file, last->op, 0);
}
- VEC_ordered_remove (operand_entry_t, *ops, i);
+ ops->ordered_remove (i);
reassociate_stats.ops_eliminated ++;
return true;
@@ -629,17 +627,16 @@ eliminate_duplicate_pair (enum tree_code opcode,
reassociate_stats.ops_eliminated += 2;
- if (VEC_length (operand_entry_t, *ops) == 2)
+ if (ops->length () == 2)
{
- VEC_free (operand_entry_t, heap, *ops);
- *ops = NULL;
+ ops->create (0);
add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (last->op)));
*all_done = true;
}
else
{
- VEC_ordered_remove (operand_entry_t, *ops, i-1);
- VEC_ordered_remove (operand_entry_t, *ops, i-1);
+ ops->ordered_remove (i-1);
+ ops->ordered_remove (i-1);
}
return true;
@@ -651,7 +648,7 @@ eliminate_duplicate_pair (enum tree_code opcode,
return false;
}
-static VEC(tree, heap) *plus_negates;
+static vec<tree> plus_negates;
/* If OPCODE is PLUS_EXPR, CURR->OP is a negate expression or a bitwise not
expression, look in OPS for a corresponding positive operation to cancel
@@ -661,7 +658,7 @@ static VEC(tree, heap) *plus_negates;
static bool
eliminate_plus_minus_pair (enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops,
+ vec<operand_entry_t> *ops,
unsigned int currindex,
operand_entry_t curr)
{
@@ -683,7 +680,7 @@ eliminate_plus_minus_pair (enum tree_code opcode,
one, we can stop. */
for (i = currindex + 1;
- VEC_iterate (operand_entry_t, *ops, i, oe)
+ ops->iterate (i, &oe)
&& oe->rank >= curr->rank - 1 ;
i++)
{
@@ -699,9 +696,9 @@ eliminate_plus_minus_pair (enum tree_code opcode,
fprintf (dump_file, " -> 0\n");
}
- VEC_ordered_remove (operand_entry_t, *ops, i);
+ ops->ordered_remove (i);
add_to_ops_vec (ops, build_zero_cst (TREE_TYPE (oe->op)));
- VEC_ordered_remove (operand_entry_t, *ops, currindex);
+ ops->ordered_remove (currindex);
reassociate_stats.ops_eliminated ++;
return true;
@@ -719,9 +716,9 @@ eliminate_plus_minus_pair (enum tree_code opcode,
fprintf (dump_file, " -> -1\n");
}
- VEC_ordered_remove (operand_entry_t, *ops, i);
+ ops->ordered_remove (i);
add_to_ops_vec (ops, build_int_cst_type (op_type, -1));
- VEC_ordered_remove (operand_entry_t, *ops, currindex);
+ ops->ordered_remove (currindex);
reassociate_stats.ops_eliminated ++;
return true;
@@ -731,7 +728,7 @@ eliminate_plus_minus_pair (enum tree_code opcode,
/* CURR->OP is a negate expr in a plus expr: save it for later
inspection in repropagate_negates(). */
if (negateop != NULL_TREE)
- VEC_safe_push (tree, heap, plus_negates, curr->op);
+ plus_negates.safe_push (curr->op);
return false;
}
@@ -744,7 +741,7 @@ eliminate_plus_minus_pair (enum tree_code opcode,
static bool
eliminate_not_pairs (enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops,
+ vec<operand_entry_t> *ops,
unsigned int currindex,
operand_entry_t curr)
{
@@ -765,7 +762,7 @@ eliminate_not_pairs (enum tree_code opcode,
one, we can stop. */
for (i = currindex + 1;
- VEC_iterate (operand_entry_t, *ops, i, oe)
+ ops->iterate (i, &oe)
&& oe->rank >= curr->rank - 1;
i++)
{
@@ -792,11 +789,9 @@ eliminate_not_pairs (enum tree_code opcode,
oe->op = build_low_bits_mask (TREE_TYPE (oe->op),
TYPE_PRECISION (TREE_TYPE (oe->op)));
- reassociate_stats.ops_eliminated
- += VEC_length (operand_entry_t, *ops) - 1;
- VEC_free (operand_entry_t, heap, *ops);
- *ops = NULL;
- VEC_safe_push (operand_entry_t, heap, *ops, oe);
+ reassociate_stats.ops_eliminated += ops->length () - 1;
+ ops->truncate (0);
+ ops->quick_push (oe);
return true;
}
}
@@ -813,9 +808,9 @@ eliminate_not_pairs (enum tree_code opcode,
static void
eliminate_using_constants (enum tree_code opcode,
- VEC(operand_entry_t, heap) **ops)
+ vec<operand_entry_t> *ops)
{
- operand_entry_t oelast = VEC_last (operand_entry_t, *ops);
+ operand_entry_t oelast = ops->last ();
tree type = TREE_TYPE (oelast->op);
if (oelast->rank == 0
@@ -826,27 +821,25 @@ eliminate_using_constants (enum tree_code opcode,
case BIT_AND_EXPR:
if (integer_zerop (oelast->op))
{
- if (VEC_length (operand_entry_t, *ops) != 1)
+ if (ops->length () != 1)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Found & 0, removing all other ops\n");
- reassociate_stats.ops_eliminated
- += VEC_length (operand_entry_t, *ops) - 1;
+ reassociate_stats.ops_eliminated += ops->length () - 1;
- VEC_free (operand_entry_t, heap, *ops);
- *ops = NULL;
- VEC_safe_push (operand_entry_t, heap, *ops, oelast);
+ ops->truncate (0);
+ ops->quick_push (oelast);
return;
}
}
else if (integer_all_onesp (oelast->op))
{
- if (VEC_length (operand_entry_t, *ops) != 1)
+ if (ops->length () != 1)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Found & -1, removing\n");
- VEC_pop (operand_entry_t, *ops);
+ ops->pop ();
reassociate_stats.ops_eliminated++;
}
}
@@ -854,27 +847,25 @@ eliminate_using_constants (enum tree_code opcode,
case BIT_IOR_EXPR:
if (integer_all_onesp (oelast->op))
{
- if (VEC_length (operand_entry_t, *ops) != 1)
+ if (ops->length () != 1)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Found | -1, removing all other ops\n");
- reassociate_stats.ops_eliminated
- += VEC_length (operand_entry_t, *ops) - 1;
+ reassociate_stats.ops_eliminated += ops->length () - 1;
- VEC_free (operand_entry_t, heap, *ops);
- *ops = NULL;
- VEC_safe_push (operand_entry_t, heap, *ops, oelast);
+ ops->truncate (0);
+ ops->quick_push (oelast);
return;
}
}
else if (integer_zerop (oelast->op))
{
- if (VEC_length (operand_entry_t, *ops) != 1)
+ if (ops->length () != 1)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Found | 0, removing\n");
- VEC_pop (operand_entry_t, *ops);
+ ops->pop ();
reassociate_stats.ops_eliminated++;
}
}
@@ -886,16 +877,14 @@ eliminate_using_constants (enum tree_code opcode,
&& !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
&& real_zerop (oelast->op)))
{
- if (VEC_length (operand_entry_t, *ops) != 1)
+ if (ops->length () != 1)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Found * 0, removing all other ops\n");
- reassociate_stats.ops_eliminated
- += VEC_length (operand_entry_t, *ops) - 1;
- VEC_free (operand_entry_t, heap, *ops);
- *ops = NULL;
- VEC_safe_push (operand_entry_t, heap, *ops, oelast);
+ reassociate_stats.ops_eliminated += ops->length () - 1;
+ ops->truncate (1);
+ ops->quick_push (oelast);
return;
}
}
@@ -904,11 +893,11 @@ eliminate_using_constants (enum tree_code opcode,
&& !HONOR_SNANS (TYPE_MODE (type))
&& real_onep (oelast->op)))
{
- if (VEC_length (operand_entry_t, *ops) != 1)
+ if (ops->length () != 1)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Found * 1, removing\n");
- VEC_pop (operand_entry_t, *ops);
+ ops->pop ();
reassociate_stats.ops_eliminated++;
return;
}
@@ -923,11 +912,11 @@ eliminate_using_constants (enum tree_code opcode,
&& fold_real_zero_addition_p (type, oelast->op,
opcode == MINUS_EXPR)))
{
- if (VEC_length (operand_entry_t, *ops) != 1)
+ if (ops->length () != 1)
{
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Found [|^+] 0, removing\n");
- VEC_pop (operand_entry_t, *ops);
+ ops->pop ();
reassociate_stats.ops_eliminated++;
return;
}
@@ -940,7 +929,7 @@ eliminate_using_constants (enum tree_code opcode,
}
-static void linearize_expr_tree (VEC(operand_entry_t, heap) **, gimple,
+static void linearize_expr_tree (vec<operand_entry_t> *, gimple,
bool, bool);
/* Structure for tracking and counting operands. */
@@ -951,18 +940,16 @@ typedef struct oecount_s {
tree op;
} oecount;
-DEF_VEC_O(oecount);
-DEF_VEC_ALLOC_O(oecount,heap);
/* The heap for the oecount hashtable and the sorted list of operands. */
-static VEC (oecount, heap) *cvec;
+static vec<oecount> cvec;
/* Hash function for oecount. */
static hashval_t
oecount_hash (const void *p)
{
- const oecount *c = &VEC_index (oecount, cvec, (size_t)p - 42);
+ const oecount *c = &cvec[(size_t)p - 42];
return htab_hash_pointer (c->op) ^ (hashval_t)c->oecode;
}
@@ -971,8 +958,8 @@ oecount_hash (const void *p)
static int
oecount_eq (const void *p1, const void *p2)
{
- const oecount *c1 = &VEC_index (oecount, cvec, (size_t)p1 - 42);
- const oecount *c2 = &VEC_index (oecount, cvec, (size_t)p2 - 42);
+ const oecount *c1 = &cvec[(size_t)p1 - 42];
+ const oecount *c2 = &cvec[(size_t)p2 - 42];
return (c1->oecode == c2->oecode
&& c1->op == c2->op);
}
@@ -1263,15 +1250,15 @@ build_and_add_sum (tree type, tree op1, tree op2, enum tree_code opcode)
static bool
undistribute_ops_list (enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops, struct loop *loop)
+ vec<operand_entry_t> *ops, struct loop *loop)
{
- unsigned int length = VEC_length (operand_entry_t, *ops);
+ unsigned int length = ops->length ();
operand_entry_t oe1;
unsigned i, j;
sbitmap candidates, candidates2;
unsigned nr_candidates, nr_candidates2;
sbitmap_iterator sbi0;
- VEC (operand_entry_t, heap) **subops;
+ vec<operand_entry_t> *subops;
htab_t ctable;
bool changed = false;
int next_oecount_id = 0;
@@ -1284,7 +1271,7 @@ undistribute_ops_list (enum tree_code opcode,
candidates = sbitmap_alloc (length);
bitmap_clear (candidates);
nr_candidates = 0;
- FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe1)
+ FOR_EACH_VEC_ELT (*ops, i, oe1)
{
enum tree_code dcode;
gimple oe1def;
@@ -1314,28 +1301,29 @@ undistribute_ops_list (enum tree_code opcode,
{
fprintf (dump_file, "searching for un-distribute opportunities ");
print_generic_expr (dump_file,
- VEC_index (operand_entry_t, *ops,
- bitmap_first_set_bit (candidates))->op, 0);
+ (*ops)[bitmap_first_set_bit (candidates)]->op, 0);
fprintf (dump_file, " %d\n", nr_candidates);
}
/* Build linearized sub-operand lists and the counting table. */
- cvec = NULL;
+ cvec.create (0);
ctable = htab_create (15, oecount_hash, oecount_eq, NULL);
- subops = XCNEWVEC (VEC (operand_entry_t, heap) *,
- VEC_length (operand_entry_t, *ops));
+ /* ??? Macro arguments cannot have multi-argument template types in
+ them. This typedef is needed to workaround that limitation. */
+ typedef vec<operand_entry_t> vec_operand_entry_t_heap;
+ subops = XCNEWVEC (vec_operand_entry_t_heap, ops->length ());
EXECUTE_IF_SET_IN_BITMAP (candidates, 0, i, sbi0)
{
gimple oedef;
enum tree_code oecode;
unsigned j;
- oedef = SSA_NAME_DEF_STMT (VEC_index (operand_entry_t, *ops, i)->op);
+ oedef = SSA_NAME_DEF_STMT ((*ops)[i]->op);
oecode = gimple_assign_rhs_code (oedef);
linearize_expr_tree (&subops[i], oedef,
associative_tree_code (oecode), false);
- FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
+ FOR_EACH_VEC_ELT (subops[i], j, oe1)
{
oecount c;
void **slot;
@@ -1344,8 +1332,8 @@ undistribute_ops_list (enum tree_code opcode,
c.cnt = 1;
c.id = next_oecount_id++;
c.op = oe1->op;
- VEC_safe_push (oecount, heap, cvec, c);
- idx = VEC_length (oecount, cvec) + 41;
+ cvec.safe_push (c);
+ idx = cvec.length () + 41;
slot = htab_find_slot (ctable, (void *)idx, INSERT);
if (!*slot)
{
@@ -1353,21 +1341,21 @@ undistribute_ops_list (enum tree_code opcode,
}
else
{
- VEC_pop (oecount, cvec);
- VEC_index (oecount, cvec, (size_t)*slot - 42).cnt++;
+ cvec.pop ();
+ cvec[(size_t)*slot - 42].cnt++;
}
}
}
htab_delete (ctable);
/* Sort the counting table. */
- VEC_qsort (oecount, cvec, oecount_cmp);
+ cvec.qsort (oecount_cmp);
if (dump_file && (dump_flags & TDF_DETAILS))
{
oecount *c;
fprintf (dump_file, "Candidates:\n");
- FOR_EACH_VEC_ELT (oecount, cvec, j, c)
+ FOR_EACH_VEC_ELT (cvec, j, c)
{
fprintf (dump_file, " %u %s: ", c->cnt,
c->oecode == MULT_EXPR
@@ -1379,9 +1367,9 @@ undistribute_ops_list (enum tree_code opcode,
/* Process the (operand, code) pairs in order of most occurence. */
candidates2 = sbitmap_alloc (length);
- while (!VEC_empty (oecount, cvec))
+ while (!cvec.is_empty ())
{
- oecount *c = &VEC_last (oecount, cvec);
+ oecount *c = &cvec.last ();
if (c->cnt < 2)
break;
@@ -1394,7 +1382,7 @@ undistribute_ops_list (enum tree_code opcode,
gimple oedef;
enum tree_code oecode;
unsigned j;
- tree op = VEC_index (operand_entry_t, *ops, i)->op;
+ tree op = (*ops)[i]->op;
/* If we undistributed in this chain already this may be
a constant. */
@@ -1406,7 +1394,7 @@ undistribute_ops_list (enum tree_code opcode,
if (oecode != c->oecode)
continue;
- FOR_EACH_VEC_ELT (operand_entry_t, subops[i], j, oe1)
+ FOR_EACH_VEC_ELT (subops[i], j, oe1)
{
if (oe1->op == c->op)
{
@@ -1424,7 +1412,7 @@ undistribute_ops_list (enum tree_code opcode,
int first = bitmap_first_set_bit (candidates2);
/* Build the new addition chain. */
- oe1 = VEC_index (operand_entry_t, *ops, first);
+ oe1 = (*ops)[first];
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "Building (");
@@ -1434,7 +1422,7 @@ undistribute_ops_list (enum tree_code opcode,
EXECUTE_IF_SET_IN_BITMAP (candidates2, first+1, i, sbi0)
{
gimple sum;
- oe2 = VEC_index (operand_entry_t, *ops, i);
+ oe2 = (*ops)[i];
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, " + ");
@@ -1462,18 +1450,18 @@ undistribute_ops_list (enum tree_code opcode,
undistribution with this op. */
oe1->op = gimple_assign_lhs (prod);
oe1->rank = get_rank (oe1->op);
- VEC_free (operand_entry_t, heap, subops[first]);
+ subops[first].release ();
changed = true;
}
- VEC_pop (oecount, cvec);
+ cvec.pop ();
}
- for (i = 0; i < VEC_length (operand_entry_t, *ops); ++i)
- VEC_free (operand_entry_t, heap, subops[i]);
+ for (i = 0; i < ops->length (); ++i)
+ subops[i].release ();
free (subops);
- VEC_free (oecount, heap, cvec);
+ cvec.release ();
sbitmap_free (candidates);
sbitmap_free (candidates2);
@@ -1487,7 +1475,7 @@ undistribute_ops_list (enum tree_code opcode,
static bool
eliminate_redundant_comparison (enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops,
+ vec<operand_entry_t> *ops,
unsigned int currindex,
operand_entry_t curr)
{
@@ -1513,9 +1501,7 @@ eliminate_redundant_comparison (enum tree_code opcode,
op2 = gimple_assign_rhs2 (def1);
/* Now look for a similar comparison in the remaining OPS. */
- for (i = currindex + 1;
- VEC_iterate (operand_entry_t, *ops, i, oe);
- i++)
+ for (i = currindex + 1; ops->iterate (i, &oe); i++)
{
tree t;
@@ -1575,7 +1561,7 @@ eliminate_redundant_comparison (enum tree_code opcode,
/* Now we can delete oe, as it has been subsumed by the new combined
expression t. */
- VEC_ordered_remove (operand_entry_t, *ops, i);
+ ops->ordered_remove (i);
reassociate_stats.ops_eliminated ++;
/* If t is the same as curr->op, we're done. Otherwise we must
@@ -1584,7 +1570,7 @@ eliminate_redundant_comparison (enum tree_code opcode,
the current entry. */
if (TREE_CODE (t) == INTEGER_CST)
{
- VEC_ordered_remove (operand_entry_t, *ops, currindex);
+ ops->ordered_remove (currindex);
add_to_ops_vec (ops, t);
}
else if (!operand_equal_p (t, curr->op, 0))
@@ -1614,9 +1600,9 @@ eliminate_redundant_comparison (enum tree_code opcode,
static void
optimize_ops_list (enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops)
+ vec<operand_entry_t> *ops)
{
- unsigned int length = VEC_length (operand_entry_t, *ops);
+ unsigned int length = ops->length ();
unsigned int i;
operand_entry_t oe;
operand_entry_t oelast = NULL;
@@ -1625,13 +1611,13 @@ optimize_ops_list (enum tree_code opcode,
if (length == 1)
return;
- oelast = VEC_last (operand_entry_t, *ops);
+ oelast = ops->last ();
/* If the last two are constants, pop the constants off, merge them
and try the next two. */
if (oelast->rank == 0 && is_gimple_min_invariant (oelast->op))
{
- operand_entry_t oelm1 = VEC_index (operand_entry_t, *ops, length - 2);
+ operand_entry_t oelm1 = (*ops)[length - 2];
if (oelm1->rank == 0
&& is_gimple_min_invariant (oelm1->op)
@@ -1646,8 +1632,8 @@ optimize_ops_list (enum tree_code opcode,
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Merging constants\n");
- VEC_pop (operand_entry_t, *ops);
- VEC_pop (operand_entry_t, *ops);
+ ops->pop ();
+ ops->pop ();
add_to_ops_vec (ops, folded);
reassociate_stats.constants_eliminated++;
@@ -1661,7 +1647,7 @@ optimize_ops_list (enum tree_code opcode,
eliminate_using_constants (opcode, ops);
oelast = NULL;
- for (i = 0; VEC_iterate (operand_entry_t, *ops, i, oe);)
+ for (i = 0; ops->iterate (i, &oe);)
{
bool done = false;
@@ -1681,8 +1667,8 @@ optimize_ops_list (enum tree_code opcode,
i++;
}
- length = VEC_length (operand_entry_t, *ops);
- oelast = VEC_last (operand_entry_t, *ops);
+ length = ops->length ();
+ oelast = ops->last ();
if (iterate)
optimize_ops_list (opcode, ops);
@@ -1940,10 +1926,10 @@ range_entry_cmp (const void *a, const void *b)
static bool
update_range_test (struct range_entry *range, struct range_entry *otherrange,
unsigned int count, enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops, tree exp, bool in_p,
+ vec<operand_entry_t> *ops, tree exp, bool in_p,
tree low, tree high, bool strict_overflow_p)
{
- operand_entry_t oe = VEC_index (oeprand_entry_t, *ops, range->idx);
+ operand_entry_t oe = (*ops)[range->idx];
tree op = oe->op;
gimple stmt = op ? SSA_NAME_DEF_STMT (op) : last_stmt (BASIC_BLOCK (oe->id));
location_t loc = gimple_location (stmt);
@@ -2030,7 +2016,7 @@ update_range_test (struct range_entry *range, struct range_entry *otherrange,
for (range = otherrange; range < otherrange + count; range++)
{
- oe = VEC_index (oeprand_entry_t, *ops, range->idx);
+ oe = (*ops)[range->idx];
/* Now change all the other range test immediate uses, so that
those tests will be optimized away. */
if (opcode == ERROR_MARK)
@@ -2124,9 +2110,9 @@ update_range_test (struct range_entry *range, struct range_entry *otherrange,
static void
optimize_range_tests (enum tree_code opcode,
- VEC (operand_entry_t, heap) **ops)
+ vec<operand_entry_t> *ops)
{
- unsigned int length = VEC_length (operand_entry_t, *ops), i, j, first;
+ unsigned int length = ops->length (), i, j, first;
operand_entry_t oe;
struct range_entry *ranges;
bool any_changes = false;
@@ -2137,7 +2123,7 @@ optimize_range_tests (enum tree_code opcode,
ranges = XNEWVEC (struct range_entry, length);
for (i = 0; i < length; i++)
{
- oe = VEC_index (operand_entry_t, *ops, i);
+ oe = (*ops)[i];
ranges[i].idx = i;
init_range_entry (ranges + i, oe->op,
oe->op ? NULL : last_stmt (BASIC_BLOCK (oe->id)));
@@ -2264,15 +2250,15 @@ optimize_range_tests (enum tree_code opcode,
if (any_changes && opcode != ERROR_MARK)
{
j = 0;
- FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe)
+ FOR_EACH_VEC_ELT (*ops, i, oe)
{
if (oe->op == error_mark_node)
continue;
else if (i != j)
- VEC_replace (operand_entry_t, *ops, j, oe);
+ (*ops)[j] = oe;
j++;
}
- VEC_truncate (operand_entry_t, *ops, j);
+ ops->truncate (j);
}
XDELETEVEC (ranges);
@@ -2493,7 +2479,7 @@ no_side_effect_bb (basic_block bb)
return true and fill in *OPS recursively. */
static bool
-get_ops (tree var, enum tree_code code, VEC(operand_entry_t, heap) **ops,
+get_ops (tree var, enum tree_code code, vec<operand_entry_t> *ops,
struct loop *loop)
{
gimple stmt = SSA_NAME_DEF_STMT (var);
@@ -2517,7 +2503,7 @@ get_ops (tree var, enum tree_code code, VEC(operand_entry_t, heap) **ops,
oe->rank = code;
oe->id = 0;
oe->count = 1;
- VEC_safe_push (operand_entry_t, heap, *ops, oe);
+ ops->safe_push (oe);
}
return true;
}
@@ -2533,7 +2519,7 @@ maybe_optimize_range_tests (gimple stmt)
basic_block bb;
edge_iterator ei;
edge e;
- VEC(operand_entry_t, heap) *ops = NULL;
+ vec<operand_entry_t> ops = vec<operand_entry_t>();
/* Consider only basic blocks that end with GIMPLE_COND or
a cast statement satisfying final_range_test_p. All
@@ -2691,7 +2677,7 @@ maybe_optimize_range_tests (gimple stmt)
oe->rank = code;
oe->id = 0;
oe->count = 1;
- VEC_safe_push (operand_entry_t, heap, ops, oe);
+ ops.safe_push (oe);
}
continue;
}
@@ -2724,14 +2710,14 @@ maybe_optimize_range_tests (gimple stmt)
is. */
oe->id = bb->index;
oe->count = 1;
- VEC_safe_push (operand_entry_t, heap, ops, oe);
+ ops.safe_push (oe);
}
if (bb == first_bb)
break;
}
- if (VEC_length (operand_entry_t, ops) > 1)
+ if (ops.length () > 1)
optimize_range_tests (ERROR_MARK, &ops);
- VEC_free (operand_entry_t, heap, ops);
+ ops.release ();
}
/* Return true if OPERAND is defined by a PHI node which uses the LHS
@@ -2808,14 +2794,14 @@ remove_visited_stmt_chain (tree var)
cases, but it is unlikely to be worth it. */
static void
-swap_ops_for_binary_stmt (VEC(operand_entry_t, heap) * ops,
+swap_ops_for_binary_stmt (vec<operand_entry_t> ops,
unsigned int opindex, gimple stmt)
{
operand_entry_t oe1, oe2, oe3;
- oe1 = VEC_index (operand_entry_t, ops, opindex);
- oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
- oe3 = VEC_index (operand_entry_t, ops, opindex + 2);
+ oe1 = ops[opindex];
+ oe2 = ops[opindex + 1];
+ oe3 = ops[opindex + 2];
if ((oe1->rank == oe2->rank
&& oe2->rank != oe3->rank)
@@ -2849,7 +2835,7 @@ swap_ops_for_binary_stmt (VEC(operand_entry_t, heap) * ops,
static void
rewrite_expr_tree (gimple stmt, unsigned int opindex,
- VEC(operand_entry_t, heap) * ops, bool moved)
+ vec<operand_entry_t> ops, bool moved)
{
tree rhs1 = gimple_assign_rhs1 (stmt);
tree rhs2 = gimple_assign_rhs2 (stmt);
@@ -2857,7 +2843,7 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex,
/* If we have three operands left, then we want to make sure the ones
that get the double binary op are chosen wisely. */
- if (opindex + 3 == VEC_length (operand_entry_t, ops))
+ if (opindex + 3 == ops.length ())
swap_ops_for_binary_stmt (ops, opindex, stmt);
/* The final recursion case for this function is that you have
@@ -2865,12 +2851,12 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex,
If we had one exactly one op in the entire list to start with, we
would have never called this function, and the tail recursion
rewrites them one at a time. */
- if (opindex + 2 == VEC_length (operand_entry_t, ops))
+ if (opindex + 2 == ops.length ())
{
operand_entry_t oe1, oe2;
- oe1 = VEC_index (operand_entry_t, ops, opindex);
- oe2 = VEC_index (operand_entry_t, ops, opindex + 1);
+ oe1 = ops[opindex];
+ oe2 = ops[opindex + 1];
if (rhs1 != oe1->op || rhs2 != oe2->op)
{
@@ -2896,10 +2882,10 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex,
}
/* If we hit here, we should have 3 or more ops left. */
- gcc_assert (opindex + 2 < VEC_length (operand_entry_t, ops));
+ gcc_assert (opindex + 2 < ops.length ());
/* Rewrite the next operator. */
- oe = VEC_index (operand_entry_t, ops, opindex);
+ oe = ops[opindex];
if (oe->op != rhs2)
{
@@ -2910,7 +2896,7 @@ rewrite_expr_tree (gimple stmt, unsigned int opindex,
unsigned int count;
gsinow = gsi_for_stmt (stmt);
- count = VEC_length (operand_entry_t, ops) - opindex - 2;
+ count = ops.length () - opindex - 2;
while (count-- != 0)
{
stmt2 = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt1));
@@ -3021,10 +3007,10 @@ get_reassociation_width (int ops_num, enum tree_code opc,
static void
rewrite_expr_tree_parallel (gimple stmt, int width,
- VEC(operand_entry_t, heap) * ops)
+ vec<operand_entry_t> ops)
{
enum tree_code opcode = gimple_assign_rhs_code (stmt);
- int op_num = VEC_length (operand_entry_t, ops);
+ int op_num = ops.length ();
int stmt_num = op_num - 1;
gimple *stmts = XALLOCAVEC (gimple, stmt_num);
int op_index = op_num - 1;
@@ -3059,7 +3045,7 @@ rewrite_expr_tree_parallel (gimple stmt, int width,
if (ready_stmts_end > stmt_index)
op2 = gimple_assign_lhs (stmts[stmt_index++]);
else if (op_index >= 0)
- op2 = VEC_index (operand_entry_t, ops, op_index--)->op;
+ op2 = ops[op_index--]->op;
else
{
gcc_assert (stmt_index < i);
@@ -3073,8 +3059,8 @@ rewrite_expr_tree_parallel (gimple stmt, int width,
{
if (op_index > 1)
swap_ops_for_binary_stmt (ops, op_index - 2, NULL);
- op2 = VEC_index (operand_entry_t, ops, op_index--)->op;
- op1 = VEC_index (operand_entry_t, ops, op_index--)->op;
+ op2 = ops[op_index--]->op;
+ op1 = ops[op_index--]->op;
}
/* If we emit the last statement then we should put
@@ -3346,7 +3332,7 @@ acceptable_pow_call (gimple stmt, tree *base, HOST_WIDE_INT *exponent)
Place the operands of the expression tree in the vector named OPS. */
static void
-linearize_expr_tree (VEC(operand_entry_t, heap) **ops, gimple stmt,
+linearize_expr_tree (vec<operand_entry_t> *ops, gimple stmt,
bool is_associative, bool set_visited)
{
tree binlhs = gimple_assign_rhs1 (stmt);
@@ -3474,7 +3460,7 @@ repropagate_negates (void)
unsigned int i = 0;
tree negate;
- FOR_EACH_VEC_ELT (tree, plus_negates, i, negate)
+ FOR_EACH_VEC_ELT (plus_negates, i, negate)
{
gimple user = get_single_immediate_use (negate);
@@ -3533,8 +3519,7 @@ repropagate_negates (void)
gimple_assign_set_rhs_with_ops (&gsi2, NEGATE_EXPR, negate, NULL);
update_stmt (gsi_stmt (gsi2));
gsi_move_before (&gsi, &gsi2);
- VEC_safe_push (tree, heap, plus_negates,
- gimple_assign_lhs (gsi_stmt (gsi2)));
+ plus_negates.safe_push (gimple_assign_lhs (gsi_stmt (gsi2)));
}
else
{
@@ -3614,7 +3599,7 @@ break_up_subtract_bb (basic_block bb)
}
else if (gimple_assign_rhs_code (stmt) == NEGATE_EXPR
&& can_reassociate_p (gimple_assign_rhs1 (stmt)))
- VEC_safe_push (tree, heap, plus_negates, gimple_assign_lhs (stmt));
+ plus_negates.safe_push (gimple_assign_lhs (stmt));
}
for (son = first_dom_son (CDI_DOMINATORS, bb);
son;
@@ -3642,10 +3627,8 @@ struct repeat_factor_d
typedef struct repeat_factor_d repeat_factor, *repeat_factor_t;
typedef const struct repeat_factor_d *const_repeat_factor_t;
-DEF_VEC_O (repeat_factor);
-DEF_VEC_ALLOC_O (repeat_factor, heap);
-static VEC (repeat_factor, heap) *repeat_factor_vec;
+static vec<repeat_factor> repeat_factor_vec;
/* Used for sorting the repeat factor vector. Sort primarily by
ascending occurrence count, secondarily by descending rank. */
@@ -3668,7 +3651,7 @@ compare_repeat_factors (const void *x1, const void *x2)
SSA name representing the value of the replacement sequence. */
static tree
-attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
+attempt_builtin_powi (gimple stmt, vec<operand_entry_t> *ops)
{
unsigned i, j, vec_len;
int ii;
@@ -3688,15 +3671,15 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
return NULL_TREE;
/* Allocate the repeated factor vector. */
- repeat_factor_vec = VEC_alloc (repeat_factor, heap, 10);
+ repeat_factor_vec.create (10);
/* Scan the OPS vector for all SSA names in the product and build
up a vector of occurrence counts for each factor. */
- FOR_EACH_VEC_ELT (operand_entry_t, *ops, i, oe)
+ FOR_EACH_VEC_ELT (*ops, i, oe)
{
if (TREE_CODE (oe->op) == SSA_NAME)
{
- FOR_EACH_VEC_ELT (repeat_factor, repeat_factor_vec, j, rf1)
+ FOR_EACH_VEC_ELT (repeat_factor_vec, j, rf1)
{
if (rf1->factor == oe->op)
{
@@ -3705,20 +3688,20 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
}
}
- if (j >= VEC_length (repeat_factor, repeat_factor_vec))
+ if (j >= repeat_factor_vec.length ())
{
rfnew.factor = oe->op;
rfnew.rank = oe->rank;
rfnew.count = oe->count;
rfnew.repr = NULL_TREE;
- VEC_safe_push (repeat_factor, heap, repeat_factor_vec, rfnew);
+ repeat_factor_vec.safe_push (rfnew);
}
}
}
/* Sort the repeated factor vector by (a) increasing occurrence count,
and (b) decreasing rank. */
- VEC_qsort (repeat_factor, repeat_factor_vec, compare_repeat_factors);
+ repeat_factor_vec.qsort (compare_repeat_factors);
/* It is generally best to combine as many base factors as possible
into a product before applying __builtin_powi to the result.
@@ -3750,7 +3733,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
t5 = t3 * t4
result = t5 * y */
- vec_len = VEC_length (repeat_factor, repeat_factor_vec);
+ vec_len = repeat_factor_vec.length ();
/* Repeatedly look for opportunities to create a builtin_powi call. */
while (true)
@@ -3762,7 +3745,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
it if the minimum occurrence count for its factors is at
least 2, or just use this cached product as our next
multiplicand if the minimum occurrence count is 1. */
- FOR_EACH_VEC_ELT (repeat_factor, repeat_factor_vec, j, rf1)
+ FOR_EACH_VEC_ELT (repeat_factor_vec, j, rf1)
{
if (rf1->repr && rf1->count > 0)
break;
@@ -3783,7 +3766,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
fputs ("Multiplying by cached product ", dump_file);
for (elt = j; elt < vec_len; elt++)
{
- rf = &VEC_index (repeat_factor, repeat_factor_vec, elt);
+ rf = &repeat_factor_vec[elt];
print_generic_expr (dump_file, rf->factor, 0);
if (elt < vec_len - 1)
fputs (" * ", dump_file);
@@ -3809,7 +3792,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
dump_file);
for (elt = j; elt < vec_len; elt++)
{
- rf = &VEC_index (repeat_factor, repeat_factor_vec, elt);
+ rf = &repeat_factor_vec[elt];
print_generic_expr (dump_file, rf->factor, 0);
if (elt < vec_len - 1)
fputs (" * ", dump_file);
@@ -3825,7 +3808,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
vector whose occurrence count is at least 2. If no such
factor exists, there are no builtin_powi opportunities
remaining. */
- FOR_EACH_VEC_ELT (repeat_factor, repeat_factor_vec, j, rf1)
+ FOR_EACH_VEC_ELT (repeat_factor_vec, j, rf1)
{
if (rf1->count >= 2)
break;
@@ -3843,7 +3826,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
fputs ("Building __builtin_pow call for (", dump_file);
for (elt = j; elt < vec_len; elt++)
{
- rf = &VEC_index (repeat_factor, repeat_factor_vec, elt);
+ rf = &repeat_factor_vec[elt];
print_generic_expr (dump_file, rf->factor, 0);
if (elt < vec_len - 1)
fputs (" * ", dump_file);
@@ -3868,8 +3851,8 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
{
tree op1, op2;
- rf1 = &VEC_index (repeat_factor, repeat_factor_vec, ii);
- rf2 = &VEC_index (repeat_factor, repeat_factor_vec, ii + 1);
+ rf1 = &repeat_factor_vec[ii];
+ rf2 = &repeat_factor_vec[ii + 1];
/* Init the last factor's representative to be itself. */
if (!rf2->repr)
@@ -3893,7 +3876,7 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
/* Form a call to __builtin_powi for the maximum product
just formed, raised to the power obtained earlier. */
- rf1 = &VEC_index (repeat_factor, repeat_factor_vec, j);
+ rf1 = &repeat_factor_vec[j];
iter_result = make_temp_ssa_name (type, NULL, "reassocpow");
pow_stmt = gimple_build_call (powi_fndecl, 2, rf1->repr,
build_int_cst (integer_type_node,
@@ -3926,16 +3909,16 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
unsigned k = power;
unsigned n;
- rf1 = &VEC_index (repeat_factor, repeat_factor_vec, i);
+ rf1 = &repeat_factor_vec[i];
rf1->count -= power;
- FOR_EACH_VEC_ELT_REVERSE (operand_entry_t, *ops, n, oe)
+ FOR_EACH_VEC_ELT_REVERSE (*ops, n, oe)
{
if (oe->op == rf1->factor)
{
if (oe->count <= k)
{
- VEC_ordered_remove (operand_entry_t, *ops, n);
+ ops->ordered_remove (n);
k -= oe->count;
if (k == 0)
@@ -3955,8 +3938,8 @@ attempt_builtin_powi (gimple stmt, VEC(operand_entry_t, heap) **ops)
remaining occurrence count of 0 or 1, and those with a count of 1
don't have cached representatives. Re-sort the ops vector and
clean up. */
- VEC_qsort (operand_entry_t, *ops, sort_by_operand_rank);
- VEC_free (repeat_factor, heap, repeat_factor_vec);
+ ops->qsort (sort_by_operand_rank);
+ repeat_factor_vec.release ();
/* Return the final product computed herein. Note that there may
still be some elements with single occurrence count left in OPS;
@@ -4084,7 +4067,7 @@ reassociate_bb (basic_block bb)
if (associative_tree_code (rhs_code))
{
- VEC(operand_entry_t, heap) *ops = NULL;
+ vec<operand_entry_t> ops = vec<operand_entry_t>();
tree powi_result = NULL_TREE;
/* There may be no immediate uses left by the time we
@@ -4094,12 +4077,12 @@ reassociate_bb (basic_block bb)
gimple_set_visited (stmt, true);
linearize_expr_tree (&ops, stmt, true, true);
- VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
+ ops.qsort (sort_by_operand_rank);
optimize_ops_list (rhs_code, &ops);
if (undistribute_ops_list (rhs_code, &ops,
loop_containing_stmt (stmt)))
{
- VEC_qsort (operand_entry_t, ops, sort_by_operand_rank);
+ ops.qsort (sort_by_operand_rank);
optimize_ops_list (rhs_code, &ops);
}
@@ -4113,11 +4096,11 @@ reassociate_bb (basic_block bb)
/* If the operand vector is now empty, all operands were
consumed by the __builtin_powi optimization. */
- if (VEC_length (operand_entry_t, ops) == 0)
+ if (ops.length () == 0)
transform_stmt_to_copy (&gsi, stmt, powi_result);
- else if (VEC_length (operand_entry_t, ops) == 1)
+ else if (ops.length () == 1)
{
- tree last_op = VEC_last (operand_entry_t, ops)->op;
+ tree last_op = ops.last ()->op;
if (powi_result)
transform_stmt_to_multiply (&gsi, stmt, last_op,
@@ -4128,7 +4111,7 @@ reassociate_bb (basic_block bb)
else
{
enum machine_mode mode = TYPE_MODE (TREE_TYPE (lhs));
- int ops_num = VEC_length (operand_entry_t, ops);
+ int ops_num = ops.length ();
int width = get_reassociation_width (ops_num, rhs_code, mode);
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -4136,7 +4119,7 @@ reassociate_bb (basic_block bb)
"Width = %d was chosen for reassociation\n", width);
if (width > 1
- && VEC_length (operand_entry_t, ops) > 3)
+ && ops.length () > 3)
rewrite_expr_tree_parallel (stmt, width, ops);
else
rewrite_expr_tree (stmt, 0, ops, false);
@@ -4160,7 +4143,7 @@ reassociate_bb (basic_block bb)
}
}
- VEC_free (operand_entry_t, heap, ops);
+ ops.release ();
}
}
}
@@ -4170,18 +4153,18 @@ reassociate_bb (basic_block bb)
reassociate_bb (son);
}
-void dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops);
-void debug_ops_vector (VEC (operand_entry_t, heap) *ops);
+void dump_ops_vector (FILE *file, vec<operand_entry_t> ops);
+void debug_ops_vector (vec<operand_entry_t> ops);
/* Dump the operand entry vector OPS to FILE. */
void
-dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
+dump_ops_vector (FILE *file, vec<operand_entry_t> ops)
{
operand_entry_t oe;
unsigned int i;
- FOR_EACH_VEC_ELT (operand_entry_t, ops, i, oe)
+ FOR_EACH_VEC_ELT (ops, i, oe)
{
fprintf (file, "Op %d -> rank: %d, tree: ", i, oe->rank);
print_generic_expr (file, oe->op, 0);
@@ -4191,7 +4174,7 @@ dump_ops_vector (FILE *file, VEC (operand_entry_t, heap) *ops)
/* Dump the operand entry vector OPS to STDERR. */
DEBUG_FUNCTION void
-debug_ops_vector (VEC (operand_entry_t, heap) *ops)
+debug_ops_vector (vec<operand_entry_t> ops)
{
dump_ops_vector (stderr, ops);
}
@@ -4245,7 +4228,7 @@ init_reassoc (void)
free (bbs);
calculate_dominance_info (CDI_POST_DOMINATORS);
- plus_negates = NULL;
+ plus_negates = vec<tree>();
}
/* Cleanup after the reassociation pass, and print stats if
@@ -4270,7 +4253,7 @@ fini_reassoc (void)
pointer_map_destroy (operand_rank);
free_alloc_pool (operand_entry_pool);
free (bb_rank);
- VEC_free (tree, heap, plus_negates);
+ plus_negates.release ();
free_dominance_info (CDI_POST_DOMINATORS);
loop_optimizer_finalize ();
}
diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c
index 2391632364d..ed89a5ab0d9 100644
--- a/gcc/tree-ssa-sccvn.c
+++ b/gcc/tree-ssa-sccvn.c
@@ -149,17 +149,15 @@ static unsigned int next_value_id;
detection. */
static unsigned int next_dfs_num;
-static VEC (tree, heap) *sccstack;
+static vec<tree> sccstack;
-DEF_VEC_P(vn_ssa_aux_t);
-DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
/* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
are allocated on an obstack for locality reasons, and to free them
- without looping over the VEC. */
+ without looping over the vec. */
-static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
+static vec<vn_ssa_aux_t> vn_ssa_aux_table;
static struct obstack vn_ssa_aux_obstack;
/* Return the value numbering information for a given SSA name. */
@@ -167,8 +165,7 @@ static struct obstack vn_ssa_aux_obstack;
vn_ssa_aux_t
VN_INFO (tree name)
{
- vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name));
+ vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
gcc_checking_assert (res);
return res;
}
@@ -179,8 +176,7 @@ VN_INFO (tree name)
static inline void
VN_INFO_SET (tree name, vn_ssa_aux_t value)
{
- VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name), value);
+ vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
}
/* Initialize the value numbering info for a given SSA name.
@@ -193,11 +189,9 @@ VN_INFO_GET (tree name)
newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
memset (newinfo, 0, sizeof (struct vn_ssa_aux));
- if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
- VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
- SSA_NAME_VERSION (name) + 1);
- VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
- SSA_NAME_VERSION (name), newinfo);
+ if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
+ vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
+ vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
return newinfo;
}
@@ -351,7 +345,7 @@ static void
free_phi (void *vp)
{
vn_phi_t phi = (vn_phi_t) vp;
- VEC_free (tree, heap, phi->phiargs);
+ phi->phiargs.release ();
}
/* Free a reference operation structure VP. */
@@ -360,7 +354,7 @@ static void
free_reference (void *vp)
{
vn_reference_t vr = (vn_reference_t) vp;
- VEC_free (vn_reference_op_s, heap, vr->operands);
+ vr->operands.release ();
}
/* Hash table equality function for vn_constant_t. */
@@ -493,7 +487,7 @@ vn_reference_compute_hash (const vn_reference_t vr1)
HOST_WIDE_INT off = -1;
bool deref = false;
- FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
+ FOR_EACH_VEC_ELT (vr1->operands, i, vro)
{
if (vro->opcode == MEM_REF)
deref = true;
@@ -582,7 +576,7 @@ vn_reference_eq (const void *p1, const void *p2)
vn_reference_op_t vro1, vro2;
vn_reference_op_s tem1, tem2;
bool deref1 = false, deref2 = false;
- for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
+ for (; vr1->operands.iterate (i, &vro1); i++)
{
if (vro1->opcode == MEM_REF)
deref1 = true;
@@ -590,7 +584,7 @@ vn_reference_eq (const void *p1, const void *p2)
break;
off1 += vro1->off;
}
- for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
+ for (; vr2->operands.iterate (j, &vro2); j++)
{
if (vro2->opcode == MEM_REF)
deref2 = true;
@@ -625,8 +619,8 @@ vn_reference_eq (const void *p1, const void *p2)
++j;
++i;
}
- while (VEC_length (vn_reference_op_s, vr1->operands) != i
- || VEC_length (vn_reference_op_s, vr2->operands) != j);
+ while (vr1->operands.length () != i
+ || vr2->operands.length () != j);
return true;
}
@@ -635,7 +629,7 @@ vn_reference_eq (const void *p1, const void *p2)
vn_reference_op_s's. */
void
-copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
+copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
{
if (TREE_CODE (ref) == TARGET_MEM_REF)
{
@@ -648,21 +642,21 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
temp.op1 = TMR_STEP (ref);
temp.op2 = TMR_OFFSET (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
temp.opcode = ERROR_MARK;
temp.op0 = TMR_INDEX2 (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
memset (&temp, 0, sizeof (temp));
temp.type = NULL_TREE;
temp.opcode = TREE_CODE (TMR_BASE (ref));
temp.op0 = TMR_BASE (ref);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
return;
}
@@ -757,7 +751,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
temp.opcode = MEM_REF;
temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
temp.off = 0;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
temp.opcode = ADDR_EXPR;
temp.op0 = build_fold_addr_expr (ref);
temp.type = TREE_TYPE (temp.op0);
@@ -796,7 +790,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
default:
gcc_unreachable ();
}
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
if (REFERENCE_CLASS_P (ref)
|| TREE_CODE (ref) == MODIFY_EXPR
@@ -816,7 +810,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
bool
ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *ops)
+ vec<vn_reference_op_s> ops)
{
vn_reference_op_t op;
unsigned i;
@@ -829,7 +823,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
alias_set_type base_alias_set = -1;
/* First get the final access size from just the outermost expression. */
- op = &VEC_index (vn_reference_op_s, ops, 0);
+ op = &ops[0];
if (op->opcode == COMPONENT_REF)
size_tree = DECL_SIZE (op->op0);
else if (op->opcode == BIT_FIELD_REF)
@@ -856,7 +850,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
/* Compute cumulative bit-offset for nested component-refs and array-refs,
and find the ultimate containing object. */
- FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
+ FOR_EACH_VEC_ELT (ops, i, op)
{
switch (op->opcode)
{
@@ -869,7 +863,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
&& op->op0
&& DECL_P (TREE_OPERAND (op->op0, 0)))
{
- vn_reference_op_t pop = &VEC_index (vn_reference_op_s, ops, i-1);
+ vn_reference_op_t pop = &ops[i-1];
base = TREE_OPERAND (op->op0, 0);
if (pop->off == -1)
{
@@ -990,7 +984,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
void
copy_reference_ops_from_call (gimple call,
- VEC(vn_reference_op_s, heap) **result)
+ vec<vn_reference_op_s> *result)
{
vn_reference_op_s temp;
unsigned i;
@@ -1006,7 +1000,7 @@ copy_reference_ops_from_call (gimple call,
temp.type = TREE_TYPE (lhs);
temp.op0 = lhs;
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
}
/* Copy the type, opcode, function being called and static chain. */
@@ -1016,7 +1010,7 @@ copy_reference_ops_from_call (gimple call,
temp.op0 = gimple_call_fn (call);
temp.op1 = gimple_call_chain (call);
temp.off = -1;
- VEC_safe_push (vn_reference_op_s, heap, *result, temp);
+ result->safe_push (temp);
/* Copy the call arguments. As they can be references as well,
just chain them together. */
@@ -1030,10 +1024,10 @@ copy_reference_ops_from_call (gimple call,
/* Create a vector of vn_reference_op_s structures from REF, a
REFERENCE_CLASS_P tree. The vector is not shared. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
create_reference_ops_from_ref (tree ref)
{
- VEC (vn_reference_op_s, heap) *result = NULL;
+ vec<vn_reference_op_s> result = vec<vn_reference_op_s>();
copy_reference_ops_from_ref (ref, &result);
return result;
@@ -1042,10 +1036,10 @@ create_reference_ops_from_ref (tree ref)
/* Create a vector of vn_reference_op_s structures from CALL, a
call statement. The vector is not shared. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
create_reference_ops_from_call (gimple call)
{
- VEC (vn_reference_op_s, heap) *result = NULL;
+ vec<vn_reference_op_s> result = vec<vn_reference_op_s>();
copy_reference_ops_from_call (call, &result);
return result;
@@ -1054,12 +1048,12 @@ create_reference_ops_from_call (gimple call)
/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
*I_P to point to the last element of the replacement. */
void
-vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
+vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &(*ops)[i];
+ vn_reference_op_t mem_op = &(*ops)[i - 1];
tree addr_base;
HOST_WIDE_INT addr_offset = 0;
@@ -1086,12 +1080,12 @@ vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
/* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
*I_P to point to the last element of the replacement. */
static void
-vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
+vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
unsigned int *i_p)
{
unsigned int i = *i_p;
- vn_reference_op_t op = &VEC_index (vn_reference_op_s, *ops, i);
- vn_reference_op_t mem_op = &VEC_index (vn_reference_op_s, *ops, i - 1);
+ vn_reference_op_t op = &(*ops)[i];
+ vn_reference_op_t mem_op = &(*ops)[i - 1];
gimple def_stmt;
enum tree_code code;
double_int off;
@@ -1163,24 +1157,24 @@ vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
tree
fully_constant_vn_reference_p (vn_reference_t ref)
{
- VEC (vn_reference_op_s, heap) *operands = ref->operands;
+ vec<vn_reference_op_s> operands = ref->operands;
vn_reference_op_t op;
/* Try to simplify the translated expression if it is
a call to a builtin function with at most two arguments. */
- op = &VEC_index (vn_reference_op_s, operands, 0);
+ op = &operands[0];
if (op->opcode == CALL_EXPR
&& TREE_CODE (op->op0) == ADDR_EXPR
&& TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
&& DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
- && VEC_length (vn_reference_op_s, operands) >= 2
- && VEC_length (vn_reference_op_s, operands) <= 3)
+ && operands.length () >= 2
+ && operands.length () <= 3)
{
vn_reference_op_t arg0, arg1 = NULL;
bool anyconst = false;
- arg0 = &VEC_index (vn_reference_op_s, operands, 1);
- if (VEC_length (vn_reference_op_s, operands) > 2)
- arg1 = &VEC_index (vn_reference_op_s, operands, 2);
+ arg0 = &operands[1];
+ if (operands.length () > 2)
+ arg1 = &operands[2];
if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
|| (arg0->opcode == ADDR_EXPR
&& is_gimple_min_invariant (arg0->op0)))
@@ -1209,10 +1203,10 @@ fully_constant_vn_reference_p (vn_reference_t ref)
else if (op->opcode == ARRAY_REF
&& TREE_CODE (op->op0) == INTEGER_CST
&& integer_zerop (op->op1)
- && VEC_length (vn_reference_op_s, operands) == 2)
+ && operands.length () == 2)
{
vn_reference_op_t arg0;
- arg0 = &VEC_index (vn_reference_op_s, operands, 1);
+ arg0 = &operands[1];
if (arg0->opcode == STRING_CST
&& (TYPE_MODE (op->type)
== TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
@@ -1232,15 +1226,15 @@ fully_constant_vn_reference_p (vn_reference_t ref)
the vector passed in is returned. *VALUEIZED_ANYTHING will specify
whether any operands were valueized. */
-static VEC (vn_reference_op_s, heap) *
-valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
+static vec<vn_reference_op_s>
+valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
{
vn_reference_op_t vro;
unsigned int i;
*valueized_anything = false;
- FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
+ FOR_EACH_VEC_ELT (orig, i, vro)
{
if (vro->opcode == SSA_NAME
|| (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
@@ -1279,13 +1273,11 @@ valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
if (i > 0
&& vro->op0
&& TREE_CODE (vro->op0) == ADDR_EXPR
- && VEC_index (vn_reference_op_s,
- orig, i - 1).opcode == MEM_REF)
+ && orig[i - 1].opcode == MEM_REF)
vn_reference_fold_indirect (&orig, &i);
else if (i > 0
&& vro->opcode == SSA_NAME
- && VEC_index (vn_reference_op_s,
- orig, i - 1).opcode == MEM_REF)
+ && orig[i - 1].opcode == MEM_REF)
vn_reference_maybe_forwprop_address (&orig, &i);
/* If it transforms a non-constant ARRAY_REF into a constant
one, adjust the constant offset. */
@@ -1306,26 +1298,26 @@ valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
return orig;
}
-static VEC (vn_reference_op_s, heap) *
-valueize_refs (VEC (vn_reference_op_s, heap) *orig)
+static vec<vn_reference_op_s>
+valueize_refs (vec<vn_reference_op_s> orig)
{
bool tem;
return valueize_refs_1 (orig, &tem);
}
-static VEC(vn_reference_op_s, heap) *shared_lookup_references;
+static vec<vn_reference_op_s> shared_lookup_references;
/* Create a vector of vn_reference_op_s structures from REF, a
REFERENCE_CLASS_P tree. The vector is shared among all callers of
this function. *VALUEIZED_ANYTHING will specify whether any
operands were valueized. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
{
if (!ref)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ return vec<vn_reference_op_s>();
+ shared_lookup_references.truncate (0);
copy_reference_ops_from_ref (ref, &shared_lookup_references);
shared_lookup_references = valueize_refs_1 (shared_lookup_references,
valueized_anything);
@@ -1336,12 +1328,12 @@ valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
call statement. The vector is shared among all callers of
this function. */
-static VEC(vn_reference_op_s, heap) *
+static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call (gimple call)
{
if (!call)
- return NULL;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
+ return vec<vn_reference_op_s>();
+ shared_lookup_references.truncate (0);
copy_reference_ops_from_call (call, &shared_lookup_references);
shared_lookup_references = valueize_refs (shared_lookup_references);
return shared_lookup_references;
@@ -1425,8 +1417,8 @@ static vn_reference_t
vn_reference_lookup_or_insert_for_pieces (tree vuse,
alias_set_type set,
tree type,
- VEC (vn_reference_op_s,
- heap) *operands,
+ vec<vn_reference_op_s,
+ va_heap> operands,
tree value)
{
struct vn_reference_s vr1;
@@ -1444,8 +1436,7 @@ vn_reference_lookup_or_insert_for_pieces (tree vuse,
else
value_id = get_or_alloc_constant_value_id (value);
return vn_reference_insert_pieces (vuse, set, type,
- VEC_copy (vn_reference_op_s, heap,
- operands), value, value_id);
+ operands.copy (), value, value_id);
}
/* Callback for walk_non_aliased_vuses. Tries to perform a lookup
@@ -1460,18 +1451,19 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
tree base;
HOST_WIDE_INT offset, maxsize;
- static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
+ static vec<vn_reference_op_s>
+ lhs_ops = vec<vn_reference_op_s>();
ao_ref lhs_ref;
bool lhs_ref_ok = false;
/* First try to disambiguate after value-replacing in the definitions LHS. */
if (is_gimple_assign (def_stmt))
{
- VEC (vn_reference_op_s, heap) *tem;
+ vec<vn_reference_op_s> tem;
tree lhs = gimple_assign_lhs (def_stmt);
bool valueized_anything = false;
/* Avoid re-allocation overhead. */
- VEC_truncate (vn_reference_op_s, lhs_ops, 0);
+ lhs_ops.truncate (0);
copy_reference_ops_from_ref (lhs, &lhs_ops);
tem = lhs_ops;
lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
@@ -1665,7 +1657,8 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
tree base2;
HOST_WIDE_INT offset2, size2, maxsize2;
int i, j;
- VEC (vn_reference_op_s, heap) *rhs = NULL;
+ vec<vn_reference_op_s>
+ rhs = vec<vn_reference_op_s>();
vn_reference_op_t vro;
ao_ref r;
@@ -1685,12 +1678,10 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
/* Find the common base of ref and the lhs. lhs_ops already
contains valueized operands for the lhs. */
- i = VEC_length (vn_reference_op_s, vr->operands) - 1;
- j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
+ i = vr->operands.length () - 1;
+ j = lhs_ops.length () - 1;
while (j >= 0 && i >= 0
- && vn_reference_op_eq (&VEC_index (vn_reference_op_s,
- vr->operands, i),
- &VEC_index (vn_reference_op_s, lhs_ops, j)))
+ && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
{
i--;
j--;
@@ -1703,10 +1694,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
don't care here - further lookups with the rewritten operands
will simply fail if we messed up types too badly. */
if (j == 0 && i >= 0
- && VEC_index (vn_reference_op_s, lhs_ops, 0).opcode == MEM_REF
- && VEC_index (vn_reference_op_s, lhs_ops, 0).off != -1
- && (VEC_index (vn_reference_op_s, lhs_ops, 0).off
- == VEC_index (vn_reference_op_s, vr->operands, i).off))
+ && lhs_ops[0].opcode == MEM_REF
+ && lhs_ops[0].off != -1
+ && (lhs_ops[0].off == vr->operands[i].off))
i--, j--;
/* i now points to the first additional op.
@@ -1719,22 +1709,19 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
/* Now re-write REF to be based on the rhs of the assignment. */
copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
/* We need to pre-pend vr->operands[0..i] to rhs. */
- if (i + 1 + VEC_length (vn_reference_op_s, rhs)
- > VEC_length (vn_reference_op_s, vr->operands))
+ if (i + 1 + rhs.length () > vr->operands.length ())
{
- VEC (vn_reference_op_s, heap) *old = vr->operands;
- VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
- i + 1 + VEC_length (vn_reference_op_s, rhs));
+ vec<vn_reference_op_s> old = vr->operands;
+ vr->operands.safe_grow (i + 1 + rhs.length ());
if (old == shared_lookup_references
&& vr->operands != old)
- shared_lookup_references = NULL;
+ shared_lookup_references = vec<vn_reference_op_s>();
}
else
- VEC_truncate (vn_reference_op_s, vr->operands,
- i + 1 + VEC_length (vn_reference_op_s, rhs));
- FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
- VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, *vro);
- VEC_free (vn_reference_op_s, heap, rhs);
+ vr->operands.truncate (i + 1 + rhs.length ());
+ FOR_EACH_VEC_ELT (rhs, j, vro)
+ vr->operands[i + 1 + j] = *vro;
+ rhs.release ();
vr->operands = valueize_refs (vr->operands);
vr->hashcode = vn_reference_compute_hash (vr);
@@ -1854,16 +1841,16 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
return (void *)-1;
/* Make room for 2 operands in the new reference. */
- if (VEC_length (vn_reference_op_s, vr->operands) < 2)
+ if (vr->operands.length () < 2)
{
- VEC (vn_reference_op_s, heap) *old = vr->operands;
- VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
+ vec<vn_reference_op_s> old = vr->operands;
+ vr->operands.safe_grow_cleared (2);
if (old == shared_lookup_references
&& vr->operands != old)
- shared_lookup_references = NULL;
+ shared_lookup_references.create (0);
}
else
- VEC_truncate (vn_reference_op_s, vr->operands, 2);
+ vr->operands.truncate (2);
/* The looked-through reference is a simple MEM_REF. */
memset (&op, 0, sizeof (op));
@@ -1871,12 +1858,12 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
op.opcode = MEM_REF;
op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
op.off = at - lhs_offset + rhs_offset;
- VEC_replace (vn_reference_op_s, vr->operands, 0, op);
+ vr->operands[0] = op;
op.type = TREE_TYPE (rhs);
op.opcode = TREE_CODE (rhs);
op.op0 = rhs;
op.off = -1;
- VEC_replace (vn_reference_op_s, vr->operands, 1, op);
+ vr->operands[1] = op;
vr->hashcode = vn_reference_compute_hash (vr);
/* Adjust *ref from the new operands. */
@@ -1905,7 +1892,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
tree
vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *operands,
+ vec<vn_reference_op_s> operands,
vn_reference_t *vnresult, vn_lookup_kind kind)
{
struct vn_reference_s vr1;
@@ -1917,13 +1904,12 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
*vnresult = NULL;
vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
- VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
- VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
- VEC_length (vn_reference_op_s, operands));
- memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
- VEC_address (vn_reference_op_s, operands),
+ shared_lookup_references.truncate (0);
+ shared_lookup_references.safe_grow (operands.length ());
+ memcpy (shared_lookup_references.address (),
+ operands.address (),
sizeof (vn_reference_op_s)
- * VEC_length (vn_reference_op_s, operands));
+ * operands.length ());
vr1.operands = operands = shared_lookup_references
= valueize_refs (shared_lookup_references);
vr1.type = type;
@@ -1945,7 +1931,7 @@ vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
- VEC_free (vn_reference_op_s, heap, vr1.operands);
+ vr1.operands.release ();
}
if (*vnresult)
@@ -1964,7 +1950,7 @@ tree
vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_t *vnresult)
{
- VEC (vn_reference_op_s, heap) *operands;
+ vec<vn_reference_op_s> operands;
struct vn_reference_s vr1;
tree cst;
bool valuezied_anything;
@@ -1998,7 +1984,7 @@ vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
vn_reference_lookup_2,
vn_reference_lookup_3, &vr1);
if (vr1.operands != operands)
- VEC_free (vn_reference_op_s, heap, vr1.operands);
+ vr1.operands.release ();
if (wvnresult)
{
if (vnresult)
@@ -2060,7 +2046,7 @@ vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
vn_reference_t
vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
- VEC (vn_reference_op_s, heap) *operands,
+ vec<vn_reference_op_s> operands,
tree result, unsigned int value_id)
{
@@ -2416,12 +2402,12 @@ vn_phi_compute_hash (vn_phi_t vp1)
/* If all PHI arguments are constants we need to distinguish
the PHI node via its type. */
- type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
+ type = TREE_TYPE (vp1->phiargs[0]);
result += (INTEGRAL_TYPE_P (type)
+ (INTEGRAL_TYPE_P (type)
? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
- FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
+ FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
{
if (phi1op == VN_TOP)
continue;
@@ -2458,15 +2444,15 @@ vn_phi_eq (const void *p1, const void *p2)
/* If the PHI nodes do not have compatible types
they are not the same. */
- if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
- TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
+ if (!types_compatible_p (TREE_TYPE (vp1->phiargs[0]),
+ TREE_TYPE (vp2->phiargs[0])))
return false;
/* Any phi in the same block will have it's arguments in the
same edge order, because of how we store phi nodes. */
- FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
+ FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
{
- tree phi2op = VEC_index (tree, vp2->phiargs, i);
+ tree phi2op = vp2->phiargs[i];
if (phi1op == VN_TOP || phi2op == VN_TOP)
continue;
if (!expressions_equal_p (phi1op, phi2op))
@@ -2477,7 +2463,7 @@ vn_phi_eq (const void *p1, const void *p2)
return false;
}
-static VEC(tree, heap) *shared_lookup_phiargs;
+static vec<tree> shared_lookup_phiargs;
/* Lookup PHI in the current hash table, and return the resulting
value number if it exists in the hash table. Return NULL_TREE if
@@ -2490,14 +2476,14 @@ vn_phi_lookup (gimple phi)
struct vn_phi_s vp1;
unsigned i;
- VEC_truncate (tree, shared_lookup_phiargs, 0);
+ shared_lookup_phiargs.truncate (0);
/* Canonicalize the SSA_NAME's to their value number. */
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree def = PHI_ARG_DEF (phi, i);
def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
- VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
+ shared_lookup_phiargs.safe_push (def);
}
vp1.phiargs = shared_lookup_phiargs;
vp1.block = gimple_bb (phi);
@@ -2521,14 +2507,14 @@ vn_phi_insert (gimple phi, tree result)
void **slot;
vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
unsigned i;
- VEC (tree, heap) *args = NULL;
+ vec<tree> args = vec<tree>();
/* Canonicalize the SSA_NAME's to their value number. */
for (i = 0; i < gimple_phi_num_args (phi); i++)
{
tree def = PHI_ARG_DEF (phi, i);
def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
- VEC_safe_push (tree, heap, args, def);
+ args.safe_push (def);
}
vp1->value_id = VN_INFO (result)->value_id;
vp1->phiargs = args;
@@ -2549,13 +2535,13 @@ vn_phi_insert (gimple phi, tree result)
/* Print set of components in strongly connected component SCC to OUT. */
static void
-print_scc (FILE *out, VEC (tree, heap) *scc)
+print_scc (FILE *out, vec<tree> scc)
{
tree var;
unsigned int i;
fprintf (out, "SCC consists of:");
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
{
fprintf (out, " ");
print_generic_expr (out, var, 0);
@@ -3557,9 +3543,9 @@ compare_ops (const void *pa, const void *pb)
array will give you the members in RPO order. */
static void
-sort_scc (VEC (tree, heap) *scc)
+sort_scc (vec<tree> scc)
{
- VEC_qsort (tree, scc, compare_ops);
+ scc.qsort (compare_ops);
}
/* Insert the no longer used nary ONARY to the hash INFO. */
@@ -3582,7 +3568,7 @@ copy_phi (vn_phi_t ophi, vn_tables_t info)
vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
void **slot;
memcpy (phi, ophi, sizeof (*phi));
- ophi->phiargs = NULL;
+ ophi->phiargs.create (0);
slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
gcc_assert (!*slot);
*slot = phi;
@@ -3597,7 +3583,7 @@ copy_reference (vn_reference_t oref, vn_tables_t info)
void **slot;
ref = (vn_reference_t) pool_alloc (info->references_pool);
memcpy (ref, oref, sizeof (*ref));
- oref->operands = NULL;
+ oref->operands.create (0);
slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
INSERT);
if (*slot)
@@ -3608,7 +3594,7 @@ copy_reference (vn_reference_t oref, vn_tables_t info)
/* Process a strongly connected component in the SSA graph. */
static void
-process_scc (VEC (tree, heap) *scc)
+process_scc (vec<tree> scc)
{
tree var;
unsigned int i;
@@ -3620,9 +3606,9 @@ process_scc (VEC (tree, heap) *scc)
vn_reference_t ref;
/* If the SCC has a single member, just visit it. */
- if (VEC_length (tree, scc) == 1)
+ if (scc.length () == 1)
{
- tree use = VEC_index (tree, scc, 0);
+ tree use = scc[0];
if (VN_INFO (use)->use_processed)
return;
/* We need to make sure it doesn't form a cycle itself, which can
@@ -3658,9 +3644,9 @@ process_scc (VEC (tree, heap) *scc)
gcc_obstack_init (&optimistic_info->nary_obstack);
empty_alloc_pool (optimistic_info->phis_pool);
empty_alloc_pool (optimistic_info->references_pool);
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
VN_INFO (var)->expr = NULL_TREE;
- FOR_EACH_VEC_ELT (tree, scc, i, var)
+ FOR_EACH_VEC_ELT (scc, i, var)
changed |= visit_use (var);
}
@@ -3678,8 +3664,6 @@ process_scc (VEC (tree, heap) *scc)
current_info = valid_info;
}
-DEF_VEC_O(ssa_op_iter);
-DEF_VEC_ALLOC_O(ssa_op_iter,heap);
/* Pop the components of the found SCC for NAME off the SCC stack
and process them. Returns true if all went well, false if
@@ -3688,33 +3672,33 @@ DEF_VEC_ALLOC_O(ssa_op_iter,heap);
static bool
extract_and_process_scc_for_name (tree name)
{
- VEC (tree, heap) *scc = NULL;
+ vec<tree> scc = vec<tree>();
tree x;
/* Found an SCC, pop the components off the SCC stack and
process them. */
do
{
- x = VEC_pop (tree, sccstack);
+ x = sccstack.pop ();
VN_INFO (x)->on_sccstack = false;
- VEC_safe_push (tree, heap, scc, x);
+ scc.safe_push (x);
} while (x != name);
/* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
- if (VEC_length (tree, scc)
+ if (scc.length ()
> (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
{
if (dump_file)
fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
- "SCC size %u exceeding %u\n", VEC_length (tree, scc),
+ "SCC size %u exceeding %u\n", scc.length (),
(unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
- VEC_free (tree, heap, scc);
+ scc.release ();
return false;
}
- if (VEC_length (tree, scc) > 1)
+ if (scc.length () > 1)
sort_scc (scc);
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -3722,7 +3706,7 @@ extract_and_process_scc_for_name (tree name)
process_scc (scc);
- VEC_free (tree, heap, scc);
+ scc.release ();
return true;
}
@@ -3737,8 +3721,8 @@ extract_and_process_scc_for_name (tree name)
static bool
DFS (tree name)
{
- VEC(ssa_op_iter, heap) *itervec = NULL;
- VEC(tree, heap) *namevec = NULL;
+ vec<ssa_op_iter> itervec = vec<ssa_op_iter>();
+ vec<tree> namevec = vec<tree>();
use_operand_p usep = NULL;
gimple defstmt;
tree use;
@@ -3750,7 +3734,7 @@ start_over:
VN_INFO (name)->visited = true;
VN_INFO (name)->low = VN_INFO (name)->dfsnum;
- VEC_safe_push (tree, heap, sccstack, name);
+ sccstack.safe_push (name);
VN_INFO (name)->on_sccstack = true;
defstmt = SSA_NAME_DEF_STMT (name);
@@ -3776,25 +3760,25 @@ start_over:
if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
if (!extract_and_process_scc_for_name (name))
{
- VEC_free (tree, heap, namevec);
- VEC_free (ssa_op_iter, heap, itervec);
+ namevec.release ();
+ itervec.release ();
return false;
}
/* Check if we are done. */
- if (VEC_empty (tree, namevec))
+ if (namevec.is_empty ())
{
- VEC_free (tree, heap, namevec);
- VEC_free (ssa_op_iter, heap, itervec);
+ namevec.release ();
+ itervec.release ();
return true;
}
/* Restore the last use walker and continue walking there. */
use = name;
- name = VEC_pop (tree, namevec);
- memcpy (&iter, &VEC_last (ssa_op_iter, itervec),
+ name = namevec.pop ();
+ memcpy (&iter, &itervec.last (),
sizeof (ssa_op_iter));
- VEC_pop (ssa_op_iter, itervec);
+ itervec.pop ();
goto continue_walking;
}
@@ -3808,8 +3792,8 @@ start_over:
{
/* Recurse by pushing the current use walking state on
the stack and starting over. */
- VEC_safe_push(ssa_op_iter, heap, itervec, iter);
- VEC_safe_push(tree, heap, namevec, name);
+ itervec.safe_push (iter);
+ namevec.safe_push (name);
name = use;
goto start_over;
@@ -3869,7 +3853,7 @@ init_scc_vn (void)
int *rpo_numbers_temp;
calculate_dominance_info (CDI_DOMINATORS);
- sccstack = NULL;
+ sccstack.create (0);
constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
free);
@@ -3878,15 +3862,14 @@ init_scc_vn (void)
next_dfs_num = 1;
next_value_id = 1;
- vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
+ vn_ssa_aux_table.create (num_ssa_names + 1);
/* VEC_alloc doesn't actually grow it to the right size, it just
preallocates the space to do so. */
- VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table,
- num_ssa_names + 1);
+ vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
gcc_obstack_init (&vn_ssa_aux_obstack);
- shared_lookup_phiargs = NULL;
- shared_lookup_references = NULL;
+ shared_lookup_phiargs.create (0);
+ shared_lookup_references.create (0);
rpo_numbers = XNEWVEC (int, last_basic_block);
rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
@@ -3930,8 +3913,8 @@ free_scc_vn (void)
htab_delete (constant_to_value_id);
BITMAP_FREE (constant_value_ids);
- VEC_free (tree, heap, shared_lookup_phiargs);
- VEC_free (vn_reference_op_s, heap, shared_lookup_references);
+ shared_lookup_phiargs.release ();
+ shared_lookup_references.release ();
XDELETEVEC (rpo_numbers);
for (i = 0; i < num_ssa_names; i++)
@@ -3942,9 +3925,9 @@ free_scc_vn (void)
release_ssa_name (name);
}
obstack_free (&vn_ssa_aux_obstack, NULL);
- VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
+ vn_ssa_aux_table.release ();
- VEC_free (tree, heap, sccstack);
+ sccstack.release ();
free_vn_table (valid_info);
XDELETE (valid_info);
free_vn_table (optimistic_info);
diff --git a/gcc/tree-ssa-sccvn.h b/gcc/tree-ssa-sccvn.h
index cd418da8704..ea229b0c962 100644
--- a/gcc/tree-ssa-sccvn.h
+++ b/gcc/tree-ssa-sccvn.h
@@ -65,7 +65,7 @@ typedef struct vn_phi_s
/* Unique identifier that all expressions with the same value have. */
unsigned int value_id;
hashval_t hashcode;
- VEC (tree, heap) *phiargs;
+ vec<tree> phiargs;
basic_block block;
tree result;
} *vn_phi_t;
@@ -90,8 +90,6 @@ typedef struct vn_reference_op_struct
typedef vn_reference_op_s *vn_reference_op_t;
typedef const vn_reference_op_s *const_vn_reference_op_t;
-DEF_VEC_O(vn_reference_op_s);
-DEF_VEC_ALLOC_O(vn_reference_op_s, heap);
/* A reference operation in the hashtable is representation as
the vuse, representing the memory state at the time of
@@ -108,7 +106,7 @@ typedef struct vn_reference_s
tree vuse;
alias_set_type set;
tree type;
- VEC (vn_reference_op_s, heap) *operands;
+ vec<vn_reference_op_s> operands;
tree result;
tree result_vdef;
} *vn_reference_t;
@@ -193,19 +191,19 @@ vn_nary_op_t vn_nary_op_insert (tree, tree);
vn_nary_op_t vn_nary_op_insert_stmt (gimple, tree);
vn_nary_op_t vn_nary_op_insert_pieces (unsigned int, enum tree_code,
tree, tree *, tree, unsigned int);
-void vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **,
+void vn_reference_fold_indirect (vec<vn_reference_op_s> *,
unsigned int *);
-void copy_reference_ops_from_ref (tree, VEC(vn_reference_op_s, heap) **);
-void copy_reference_ops_from_call (gimple, VEC(vn_reference_op_s, heap) **);
+void copy_reference_ops_from_ref (tree, vec<vn_reference_op_s> *);
+void copy_reference_ops_from_call (gimple, vec<vn_reference_op_s> *);
bool ao_ref_init_from_vn_reference (ao_ref *, alias_set_type, tree,
- VEC (vn_reference_op_s, heap) *);
+ vec<vn_reference_op_s> );
tree vn_reference_lookup_pieces (tree, alias_set_type, tree,
- VEC (vn_reference_op_s, heap) *,
+ vec<vn_reference_op_s> ,
vn_reference_t *, vn_lookup_kind);
tree vn_reference_lookup (tree, tree, vn_lookup_kind, vn_reference_t *);
vn_reference_t vn_reference_insert (tree, tree, tree, tree);
vn_reference_t vn_reference_insert_pieces (tree, alias_set_type, tree,
- VEC (vn_reference_op_s, heap) *,
+ vec<vn_reference_op_s> ,
tree, unsigned int);
hashval_t vn_nary_op_compute_hash (const vn_nary_op_t);
diff --git a/gcc/tree-ssa-strlen.c b/gcc/tree-ssa-strlen.c
index 9d288b3e311..aa8b3d69164 100644
--- a/gcc/tree-ssa-strlen.c
+++ b/gcc/tree-ssa-strlen.c
@@ -33,7 +33,7 @@ along with GCC; see the file COPYING3. If not see
/* A vector indexed by SSA_NAME_VERSION. 0 means unknown, positive value
is an index into strinfo vector, negative value stands for
string length of a string literal (~strlen). */
-static VEC (int, heap) *ssa_ver_to_stridx;
+static vec<int> ssa_ver_to_stridx;
/* Number of currently active string indexes plus one. */
static int max_stridx;
@@ -84,8 +84,6 @@ typedef struct strinfo_struct
be invalidated. Always cleared by maybe_invalidate. */
bool dont_invalidate;
} *strinfo;
-DEF_VEC_P(strinfo);
-DEF_VEC_ALLOC_P(strinfo,heap);
/* Pool for allocating strinfo_struct entries. */
static alloc_pool strinfo_pool;
@@ -96,7 +94,7 @@ static alloc_pool strinfo_pool;
a basic block pointer to the owner basic_block if shared.
If some other bb wants to modify the vector, the vector needs
to be unshared first, and only the owner bb is supposed to free it. */
-static VEC(strinfo, heap) *stridx_to_strinfo;
+static vec<strinfo, va_heap, vl_embed> *stridx_to_strinfo;
/* One OFFSET->IDX mapping. */
struct stridxlist
@@ -180,7 +178,7 @@ get_stridx (tree exp)
tree s, o;
if (TREE_CODE (exp) == SSA_NAME)
- return VEC_index (int, ssa_ver_to_stridx, SSA_NAME_VERSION (exp));
+ return ssa_ver_to_stridx[SSA_NAME_VERSION (exp)];
if (TREE_CODE (exp) == ADDR_EXPR)
{
@@ -209,8 +207,8 @@ get_stridx (tree exp)
static inline bool
strinfo_shared (void)
{
- return VEC_length (strinfo, stridx_to_strinfo)
- && VEC_index (strinfo, stridx_to_strinfo, 0) != NULL;
+ return vec_safe_length (stridx_to_strinfo)
+ && (*stridx_to_strinfo)[0] != NULL;
}
/* Unshare strinfo vector that is shared with the immediate dominator. */
@@ -222,11 +220,11 @@ unshare_strinfo_vec (void)
unsigned int i = 0;
gcc_assert (strinfo_shared ());
- stridx_to_strinfo = VEC_copy (strinfo, heap, stridx_to_strinfo);
- for (i = 1; VEC_iterate (strinfo, stridx_to_strinfo, i, si); ++i)
+ stridx_to_strinfo = vec_safe_copy (stridx_to_strinfo);
+ for (i = 1; vec_safe_iterate (stridx_to_strinfo, i, &si); ++i)
if (si != NULL)
si->refcount++;
- VEC_replace (strinfo, stridx_to_strinfo, 0, NULL);
+ (*stridx_to_strinfo)[0] = NULL;
}
/* Attempt to create a string index for exp, ADDR_EXPR's operand.
@@ -297,7 +295,7 @@ new_stridx (tree exp)
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (exp))
return 0;
idx = max_stridx++;
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (exp), idx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (exp)] = idx;
return idx;
}
if (TREE_CODE (exp) == ADDR_EXPR)
@@ -365,9 +363,9 @@ free_strinfo (strinfo si)
static inline strinfo
get_strinfo (int idx)
{
- if (VEC_length (strinfo, stridx_to_strinfo) <= (unsigned int) idx)
+ if (vec_safe_length (stridx_to_strinfo) <= (unsigned int) idx)
return NULL;
- return VEC_index (strinfo, stridx_to_strinfo, idx);
+ return (*stridx_to_strinfo)[idx];
}
/* Set strinfo in the vector entry IDX to SI. */
@@ -375,11 +373,11 @@ get_strinfo (int idx)
static inline void
set_strinfo (int idx, strinfo si)
{
- if (VEC_length (strinfo, stridx_to_strinfo) && VEC_index (strinfo, stridx_to_strinfo, 0))
+ if (vec_safe_length (stridx_to_strinfo) && (*stridx_to_strinfo)[0])
unshare_strinfo_vec ();
- if (VEC_length (strinfo, stridx_to_strinfo) <= (unsigned int) idx)
- VEC_safe_grow_cleared (strinfo, heap, stridx_to_strinfo, idx + 1);
- VEC_replace (strinfo, stridx_to_strinfo, idx, si);
+ if (vec_safe_length (stridx_to_strinfo) <= (unsigned int) idx)
+ vec_safe_grow_cleared (stridx_to_strinfo, idx + 1);
+ (*stridx_to_strinfo)[idx] = si;
}
/* Return string length, or NULL if it can't be computed. */
@@ -489,7 +487,7 @@ maybe_invalidate (gimple stmt)
unsigned int i;
bool nonempty = false;
- for (i = 1; VEC_iterate (strinfo, stridx_to_strinfo, i, si); ++i)
+ for (i = 1; vec_safe_iterate (stridx_to_strinfo, i, &si); ++i)
if (si != NULL)
{
if (!si->dont_invalidate)
@@ -605,8 +603,7 @@ zero_length_string (tree ptr, strinfo chainsi)
chainsi = unshare_strinfo (chainsi);
chainsi->next = 0;
}
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (ptr),
- chainsi->idx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (ptr)] = chainsi->idx;
return chainsi;
}
}
@@ -727,12 +724,12 @@ find_equal_ptrs (tree ptr, int idx)
/* We might find an endptr created in this pass. Grow the
vector in that case. */
- if (VEC_length (int, ssa_ver_to_stridx) <= SSA_NAME_VERSION (ptr))
- VEC_safe_grow_cleared (int, heap, ssa_ver_to_stridx, num_ssa_names);
+ if (ssa_ver_to_stridx.length () <= SSA_NAME_VERSION (ptr))
+ ssa_ver_to_stridx.safe_grow_cleared (num_ssa_names);
- if (VEC_index (int, ssa_ver_to_stridx, SSA_NAME_VERSION (ptr)) != 0)
+ if (ssa_ver_to_stridx[SSA_NAME_VERSION (ptr)] != 0)
return;
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (ptr), idx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (ptr)] = idx;
}
}
@@ -1189,12 +1186,12 @@ handle_builtin_strcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
case BUILT_IN_STRCPY:
fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
if (lhs)
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (lhs), didx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)] = didx;
break;
case BUILT_IN_STRCPY_CHK:
fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
if (lhs)
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (lhs), didx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)] = didx;
break;
case BUILT_IN_STPCPY:
/* This would need adjustment of the lhs (subtract one),
@@ -1389,7 +1386,7 @@ handle_builtin_memcpy (enum built_in_function bcode, gimple_stmt_iterator *gsi)
laststmt.len = dsi->length;
laststmt.stridx = dsi->idx;
if (lhs)
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (lhs), didx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)] = didx;
break;
case BUILT_IN_MEMPCPY:
case BUILT_IN_MEMPCPY_CHK:
@@ -1609,8 +1606,8 @@ handle_pointer_plus (gimple_stmt_iterator *gsi)
if (host_integerp (off, 1)
&& (unsigned HOST_WIDE_INT) tree_low_cst (off, 1)
<= (unsigned HOST_WIDE_INT) ~idx)
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (lhs),
- ~(~idx - (int) tree_low_cst (off, 1)));
+ ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)]
+ = ~(~idx - (int) tree_low_cst (off, 1));
return;
}
@@ -1793,8 +1790,7 @@ strlen_optimize_stmt (gimple_stmt_iterator *gsi)
&& POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt)))))
{
int idx = get_stridx (gimple_assign_rhs1 (stmt));
- VEC_replace (int, ssa_ver_to_stridx, SSA_NAME_VERSION (lhs),
- idx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (lhs)] = idx;
}
else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
handle_pointer_plus (gsi);
@@ -1884,7 +1880,7 @@ strlen_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
stridx_to_strinfo = NULL;
else
{
- stridx_to_strinfo = (VEC(strinfo, heap) *) dombb->aux;
+ stridx_to_strinfo = ((vec<strinfo, va_heap, vl_embed> *) dombb->aux);
if (stridx_to_strinfo)
{
for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
@@ -1918,8 +1914,7 @@ strlen_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
if (idx != get_stridx (gimple_phi_arg_def (phi, i)))
break;
if (i == n)
- VEC_replace (int, ssa_ver_to_stridx,
- SSA_NAME_VERSION (result), idx);
+ ssa_ver_to_stridx[SSA_NAME_VERSION (result)] = idx;
}
}
}
@@ -1930,8 +1925,8 @@ strlen_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
gsi_next (&gsi);
bb->aux = stridx_to_strinfo;
- if (VEC_length (strinfo, stridx_to_strinfo) && !strinfo_shared ())
- VEC_replace (strinfo, stridx_to_strinfo, 0, (strinfo) bb);
+ if (vec_safe_length (stridx_to_strinfo) && !strinfo_shared ())
+ (*stridx_to_strinfo)[0] = (strinfo) bb;
}
/* Callback for walk_dominator_tree. Free strinfo vector if it is
@@ -1943,16 +1938,16 @@ strlen_leave_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
{
if (bb->aux)
{
- stridx_to_strinfo = (VEC(strinfo, heap) *) bb->aux;
- if (VEC_length (strinfo, stridx_to_strinfo)
- && VEC_index (strinfo, stridx_to_strinfo, 0) == (strinfo) bb)
+ stridx_to_strinfo = ((vec<strinfo, va_heap, vl_embed> *) bb->aux);
+ if (vec_safe_length (stridx_to_strinfo)
+ && (*stridx_to_strinfo)[0] == (strinfo) bb)
{
unsigned int i;
strinfo si;
- for (i = 1; VEC_iterate (strinfo, stridx_to_strinfo, i, si); ++i)
+ for (i = 1; vec_safe_iterate (stridx_to_strinfo, i, &si); ++i)
free_strinfo (si);
- VEC_free (strinfo, heap, stridx_to_strinfo);
+ vec_free (stridx_to_strinfo);
}
bb->aux = NULL;
}
@@ -1965,7 +1960,7 @@ tree_ssa_strlen (void)
{
struct dom_walk_data walk_data;
- VEC_safe_grow_cleared (int, heap, ssa_ver_to_stridx, num_ssa_names);
+ ssa_ver_to_stridx.safe_grow_cleared (num_ssa_names);
max_stridx = 1;
strinfo_pool = create_alloc_pool ("strinfo_struct pool",
sizeof (struct strinfo_struct), 64);
@@ -1990,7 +1985,7 @@ tree_ssa_strlen (void)
/* Finalize the dominator walker. */
fini_walk_dominator_tree (&walk_data);
- VEC_free (int, heap, ssa_ver_to_stridx);
+ ssa_ver_to_stridx.release ();
free_alloc_pool (strinfo_pool);
if (decl_to_stridxlist_htab)
{
diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c
index e3fdeb6972c..787115ff340 100644
--- a/gcc/tree-ssa-structalias.c
+++ b/gcc/tree-ssa-structalias.c
@@ -218,8 +218,6 @@ static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
struct constraint;
typedef struct constraint *constraint_t;
-DEF_VEC_P(constraint_t);
-DEF_VEC_ALLOC_P(constraint_t,heap);
#define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
if (a) \
@@ -306,20 +304,18 @@ static inline bool type_can_have_subvars (const_tree);
/* Pool of variable info structures. */
static alloc_pool variable_info_pool;
-DEF_VEC_P(varinfo_t);
-DEF_VEC_ALLOC_P(varinfo_t, heap);
/* Table of variable info structures for constraint variables.
Indexed directly by variable info id. */
-static VEC(varinfo_t,heap) *varmap;
+static vec<varinfo_t> varmap;
/* Return the varmap element N */
static inline varinfo_t
get_varinfo (unsigned int n)
{
- return VEC_index (varinfo_t, varmap, n);
+ return varmap[n];
}
/* Static IDs for the special variables. */
@@ -334,7 +330,7 @@ enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
static varinfo_t
new_var_info (tree t, const char *name)
{
- unsigned index = VEC_length (varinfo_t, varmap);
+ unsigned index = varmap.length ();
varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
ret->id = index;
@@ -362,7 +358,7 @@ new_var_info (tree t, const char *name)
stats.total_vars++;
- VEC_safe_push (varinfo_t, heap, varmap, ret);
+ varmap.safe_push (ret);
return ret;
}
@@ -471,12 +467,10 @@ struct constraint_expr
#define UNKNOWN_OFFSET ((HOST_WIDE_INT)-1 << (HOST_BITS_PER_WIDE_INT-1))
typedef struct constraint_expr ce_s;
-DEF_VEC_O(ce_s);
-DEF_VEC_ALLOC_O(ce_s, heap);
-static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool, bool);
-static void get_constraint_for (tree, VEC(ce_s, heap) **);
-static void get_constraint_for_rhs (tree, VEC(ce_s, heap) **);
-static void do_deref (VEC (ce_s, heap) **);
+static void get_constraint_for_1 (tree, vec<ce_s> *, bool, bool);
+static void get_constraint_for (tree, vec<ce_s> *);
+static void get_constraint_for_rhs (tree, vec<ce_s> *);
+static void do_deref (vec<ce_s> *);
/* Our set constraints are made up of two constraint expressions, one
LHS, and one RHS.
@@ -492,7 +486,7 @@ struct constraint
/* List of constraints that we use to build the constraint graph from. */
-static VEC(constraint_t,heap) *constraints;
+static vec<constraint_t> constraints;
static alloc_pool constraint_pool;
/* The constraint graph is represented as an array of bitmaps
@@ -566,7 +560,7 @@ struct constraint_graph
/* Vector of complex constraints for each graph node. Complex
constraints are those involving dereferences or offsets that are
not 0. */
- VEC(constraint_t,heap) **complex;
+ vec<constraint_t> *complex;
};
static constraint_graph_t graph;
@@ -575,7 +569,7 @@ static constraint_graph_t graph;
cycle finding, we create nodes to represent dereferences and
address taken constraints. These represent where these start and
end. */
-#define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
+#define FIRST_REF_NODE (varmap).length ()
#define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
/* Return the representative node for NODE, if NODE has been unioned
@@ -670,7 +664,7 @@ dump_constraints (FILE *file, int from)
{
int i;
constraint_t c;
- for (i = from; VEC_iterate (constraint_t, constraints, i, c); i++)
+ for (i = from; constraints.iterate (i, &c); i++)
if (c)
{
dump_constraint (file, c);
@@ -714,12 +708,12 @@ dump_constraint_graph (FILE *file)
fprintf (file, "\"%s\"", get_varinfo (i)->name);
else
fprintf (file, "\"*%s\"", get_varinfo (i - FIRST_REF_NODE)->name);
- if (graph->complex[i])
+ if (graph->complex[i].exists ())
{
unsigned j;
constraint_t c;
fprintf (file, " [label=\"\\N\\n");
- for (j = 0; VEC_iterate (constraint_t, graph->complex[i], j, c); ++j)
+ for (j = 0; graph->complex[i].iterate (j, &c); ++j)
{
dump_constraint (file, c);
fprintf (file, "\\l");
@@ -849,19 +843,19 @@ constraint_equal (struct constraint a, struct constraint b)
/* Find a constraint LOOKFOR in the sorted constraint vector VEC */
static constraint_t
-constraint_vec_find (VEC(constraint_t,heap) *vec,
+constraint_vec_find (vec<constraint_t> vec,
struct constraint lookfor)
{
unsigned int place;
constraint_t found;
- if (vec == NULL)
+ if (!vec.exists ())
return NULL;
- place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
- if (place >= VEC_length (constraint_t, vec))
+ place = vec.lower_bound (&lookfor, constraint_less);
+ if (place >= vec.length ())
return NULL;
- found = VEC_index (constraint_t, vec, place);
+ found = vec[place];
if (!constraint_equal (*found, lookfor))
return NULL;
return found;
@@ -870,19 +864,18 @@ constraint_vec_find (VEC(constraint_t,heap) *vec,
/* Union two constraint vectors, TO and FROM. Put the result in TO. */
static void
-constraint_set_union (VEC(constraint_t,heap) **to,
- VEC(constraint_t,heap) **from)
+constraint_set_union (vec<constraint_t> *to,
+ vec<constraint_t> *from)
{
int i;
constraint_t c;
- FOR_EACH_VEC_ELT (constraint_t, *from, i, c)
+ FOR_EACH_VEC_ELT (*from, i, c)
{
if (constraint_vec_find (*to, *c) == NULL)
{
- unsigned int place = VEC_lower_bound (constraint_t, *to, c,
- constraint_less);
- VEC_safe_insert (constraint_t, heap, *to, place, c);
+ unsigned int place = to->lower_bound (c, constraint_less);
+ to->safe_insert (place, c);
}
}
}
@@ -1009,14 +1002,13 @@ static void
insert_into_complex (constraint_graph_t graph,
unsigned int var, constraint_t c)
{
- VEC (constraint_t, heap) *complex = graph->complex[var];
- unsigned int place = VEC_lower_bound (constraint_t, complex, c,
- constraint_less);
+ vec<constraint_t> complex = graph->complex[var];
+ unsigned int place = complex.lower_bound (c, constraint_less);
/* Only insert constraints that do not already exist. */
- if (place >= VEC_length (constraint_t, complex)
- || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
- VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
+ if (place >= complex.length ()
+ || !constraint_equal (*c, *complex[place]))
+ graph->complex[var].safe_insert (place, c);
}
@@ -1033,7 +1025,7 @@ merge_node_constraints (constraint_graph_t graph, unsigned int to,
gcc_assert (find (from) == to);
/* Move all complex constraints from src node into to node */
- FOR_EACH_VEC_ELT (constraint_t, graph->complex[from], i, c)
+ FOR_EACH_VEC_ELT (graph->complex[from], i, c)
{
/* In complex constraints for node src, we may have either
a = *src, and *src = a, or an offseted constraint which are
@@ -1047,8 +1039,7 @@ merge_node_constraints (constraint_graph_t graph, unsigned int to,
c->rhs.var = to;
}
constraint_set_union (&graph->complex[to], &graph->complex[from]);
- VEC_free (constraint_t, heap, graph->complex[from]);
- graph->complex[from] = NULL;
+ graph->complex[from].release ();
}
@@ -1173,7 +1164,10 @@ init_graph (unsigned int size)
graph->succs = XCNEWVEC (bitmap, graph->size);
graph->indirect_cycles = XNEWVEC (int, graph->size);
graph->rep = XNEWVEC (unsigned int, graph->size);
- graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
+ /* ??? Macros do not support template types with multiple arguments,
+ so we use a typedef to work around it. */
+ typedef vec<constraint_t> vec_constraint_t_heap;
+ graph->complex = XCNEWVEC (vec_constraint_t_heap, size);
graph->pe = XCNEWVEC (unsigned int, graph->size);
graph->pe_rep = XNEWVEC (int, graph->size);
@@ -1214,10 +1208,10 @@ build_pred_graph (void)
for (j = 0; j < graph->size; j++)
graph->eq_rep[j] = -1;
- for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
+ for (j = 0; j < varmap.length (); j++)
graph->indirect_cycles[j] = -1;
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
@@ -1296,7 +1290,7 @@ build_succ_graph (void)
unsigned i, t;
constraint_t c;
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs;
struct constraint_expr rhs;
@@ -1361,7 +1355,7 @@ struct scc_info
unsigned int *dfs;
unsigned int *node_mapping;
int current_index;
- VEC(unsigned,heap) *scc_stack;
+ vec<unsigned> scc_stack;
};
@@ -1414,8 +1408,8 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
/* See if any components have been identified. */
if (si->dfs[n] == my_dfs)
{
- if (VEC_length (unsigned, si->scc_stack) > 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ if (si->scc_stack.length () > 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
bitmap scc = BITMAP_ALLOC (NULL);
unsigned int lowest_node;
@@ -1423,10 +1417,10 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
bitmap_set_bit (scc, n);
- while (VEC_length (unsigned, si->scc_stack) != 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ while (si->scc_stack.length () != 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
- unsigned int w = VEC_pop (unsigned, si->scc_stack);
+ unsigned int w = si->scc_stack.pop ();
bitmap_set_bit (scc, w);
}
@@ -1453,7 +1447,7 @@ scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
bitmap_set_bit (si->deleted, n);
}
else
- VEC_safe_push (unsigned, heap, si->scc_stack, n);
+ si->scc_stack.safe_push (n);
}
/* Unify node FROM into node TO, updating the changed count if
@@ -1521,7 +1515,7 @@ struct topo_info
sbitmap visited;
/* Array that stores the topological order of the graph, *in
reverse*. */
- VEC(unsigned,heap) *topo_order;
+ vec<unsigned> topo_order;
};
@@ -1534,7 +1528,7 @@ init_topo_info (void)
struct topo_info *ti = XNEW (struct topo_info);
ti->visited = sbitmap_alloc (size);
bitmap_clear (ti->visited);
- ti->topo_order = VEC_alloc (unsigned, heap, 1);
+ ti->topo_order.create (1);
return ti;
}
@@ -1545,7 +1539,7 @@ static void
free_topo_info (struct topo_info *ti)
{
sbitmap_free (ti->visited);
- VEC_free (unsigned, heap, ti->topo_order);
+ ti->topo_order.release ();
free (ti);
}
@@ -1568,7 +1562,7 @@ topo_visit (constraint_graph_t graph, struct topo_info *ti,
topo_visit (graph, ti, j);
}
- VEC_safe_push (unsigned, heap, ti->topo_order, n);
+ ti->topo_order.safe_push (n);
}
/* Process a constraint C that represents x = *(y + off), using DELTA as the
@@ -1820,7 +1814,7 @@ init_scc_info (size_t size)
for (i = 0; i < size; i++)
si->node_mapping[i] = i;
- si->scc_stack = VEC_alloc (unsigned, heap, 1);
+ si->scc_stack.create (1);
return si;
}
@@ -1833,7 +1827,7 @@ free_scc_info (struct scc_info *si)
sbitmap_free (si->deleted);
free (si->node_mapping);
free (si->dfs);
- VEC_free (unsigned, heap, si->scc_stack);
+ si->scc_stack.release ();
free (si);
}
@@ -2061,10 +2055,10 @@ condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
/* See if any components have been identified. */
if (si->dfs[n] == my_dfs)
{
- while (VEC_length (unsigned, si->scc_stack) != 0
- && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
+ while (si->scc_stack.length () != 0
+ && si->dfs[si->scc_stack.last ()] >= my_dfs)
{
- unsigned int w = VEC_pop (unsigned, si->scc_stack);
+ unsigned int w = si->scc_stack.pop ();
si->node_mapping[w] = n;
if (!bitmap_bit_p (graph->direct_nodes, w))
@@ -2095,7 +2089,7 @@ condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
bitmap_set_bit (si->deleted, n);
}
else
- VEC_safe_push (unsigned, heap, si->scc_stack, n);
+ si->scc_stack.safe_push (n);
}
/* Label pointer equivalences. */
@@ -2342,7 +2336,7 @@ move_complex_constraints (constraint_graph_t graph)
int i;
constraint_t c;
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
if (c)
{
@@ -2383,7 +2377,7 @@ rewrite_constraints (constraint_graph_t graph,
for (j = 0; j < graph->size; j++)
gcc_assert (find (j) == j);
- FOR_EACH_VEC_ELT (constraint_t, constraints, i, c)
+ FOR_EACH_VEC_ELT (constraints, i, c)
{
struct constraint_expr lhs = c->lhs;
struct constraint_expr rhs = c->rhs;
@@ -2410,7 +2404,7 @@ rewrite_constraints (constraint_graph_t graph,
dump_constraint (dump_file, c);
fprintf (dump_file, "\n");
}
- VEC_replace (constraint_t, constraints, i, NULL);
+ constraints[i] = NULL;
continue;
}
@@ -2425,7 +2419,7 @@ rewrite_constraints (constraint_graph_t graph,
dump_constraint (dump_file, c);
fprintf (dump_file, "\n");
}
- VEC_replace (constraint_t, constraints, i, NULL);
+ constraints[i] = NULL;
continue;
}
@@ -2447,7 +2441,7 @@ eliminate_indirect_cycles (unsigned int node)
&& !bitmap_empty_p (get_varinfo (node)->solution))
{
unsigned int i;
- VEC(unsigned,heap) *queue = NULL;
+ vec<unsigned> queue = vec<unsigned>();
int queuepos;
unsigned int to = find (graph->indirect_cycles[node]);
bitmap_iterator bi;
@@ -2461,17 +2455,17 @@ eliminate_indirect_cycles (unsigned int node)
if (find (i) == i && i != to)
{
if (unite (to, i))
- VEC_safe_push (unsigned, heap, queue, i);
+ queue.safe_push (i);
}
}
for (queuepos = 0;
- VEC_iterate (unsigned, queue, queuepos, i);
+ queue.iterate (queuepos, &i);
queuepos++)
{
unify_nodes (graph, to, i, true);
}
- VEC_free (unsigned, heap, queue);
+ queue.release ();
return true;
}
return false;
@@ -2499,7 +2493,7 @@ solve_graph (constraint_graph_t graph)
varinfo_t ivi = get_varinfo (i);
if (find (i) == i && !bitmap_empty_p (ivi->solution)
&& ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
- || VEC_length (constraint_t, graph->complex[i]) > 0))
+ || graph->complex[i].length () > 0))
bitmap_set_bit (changed, i);
}
@@ -2516,10 +2510,10 @@ solve_graph (constraint_graph_t graph)
compute_topo_order (graph, ti);
- while (VEC_length (unsigned, ti->topo_order) != 0)
+ while (ti->topo_order.length () != 0)
{
- i = VEC_pop (unsigned, ti->topo_order);
+ i = ti->topo_order.pop ();
/* If this variable is not a representative, skip it. */
if (find (i) != i)
@@ -2537,7 +2531,7 @@ solve_graph (constraint_graph_t graph)
unsigned int j;
constraint_t c;
bitmap solution;
- VEC(constraint_t,heap) *complex = graph->complex[i];
+ vec<constraint_t> complex = graph->complex[i];
varinfo_t vi = get_varinfo (i);
bool solution_empty;
@@ -2562,7 +2556,7 @@ solve_graph (constraint_graph_t graph)
solution_empty = bitmap_empty_p (solution);
/* Process the complex constraints */
- FOR_EACH_VEC_ELT (constraint_t, complex, j, c)
+ FOR_EACH_VEC_ELT (complex, j, c)
{
/* XXX: This is going to unsort the constraints in
some cases, which will occasionally add duplicate
@@ -2741,7 +2735,7 @@ new_scalar_tmp_constraint_exp (const char *name)
If address_p is true, the result will be taken its address of. */
static void
-get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
+get_constraint_for_ssa_var (tree t, vec<ce_s> *results, bool address_p)
{
struct constraint_expr cexpr;
varinfo_t vi;
@@ -2793,12 +2787,12 @@ get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
for (; vi; vi = vi->next)
{
cexpr.var = vi->id;
- VEC_safe_push (ce_s, heap, *results, cexpr);
+ results->safe_push (cexpr);
}
return;
}
- VEC_safe_push (ce_s, heap, *results, cexpr);
+ results->safe_push (cexpr);
}
/* Process constraint T, performing various simplifications and then
@@ -2810,8 +2804,8 @@ process_constraint (constraint_t t)
struct constraint_expr rhs = t->rhs;
struct constraint_expr lhs = t->lhs;
- gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
- gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
+ gcc_assert (rhs.var < varmap.length ());
+ gcc_assert (lhs.var < varmap.length ());
/* If we didn't get any useful constraint from the lhs we get
&ANYTHING as fallback from get_constraint_for. Deal with
@@ -2853,7 +2847,7 @@ process_constraint (constraint_t t)
else
{
gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
- VEC_safe_push (constraint_t, heap, constraints, t);
+ constraints.safe_push (t);
}
}
@@ -2878,7 +2872,7 @@ bitpos_of_field (const tree fdecl)
static void
get_constraint_for_ptr_offset (tree ptr, tree offset,
- VEC (ce_s, heap) **results)
+ vec<ce_s> *results)
{
struct constraint_expr c;
unsigned int j, n;
@@ -2921,12 +2915,12 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
return;
/* As we are eventually appending to the solution do not use
- VEC_iterate here. */
- n = VEC_length (ce_s, *results);
+ vec::iterate here. */
+ n = results->length ();
for (j = 0; j < n; j++)
{
varinfo_t curr;
- c = VEC_index (ce_s, *results, j);
+ c = (*results)[j];
curr = get_varinfo (c.var);
if (c.type == ADDRESSOF
@@ -2945,7 +2939,7 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
c2.type = ADDRESSOF;
c2.offset = 0;
if (c2.var != c.var)
- VEC_safe_push (ce_s, heap, *results, c2);
+ results->safe_push (c2);
temp = temp->next;
}
while (temp);
@@ -2980,7 +2974,7 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
c2.var = temp->next->id;
c2.type = ADDRESSOF;
c2.offset = 0;
- VEC_safe_push (ce_s, heap, *results, c2);
+ results->safe_push (c2);
}
c.var = temp->id;
c.offset = 0;
@@ -2988,7 +2982,7 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
else
c.offset = rhsoffset;
- VEC_replace (ce_s, *results, j, c);
+ (*results)[j] = c;
}
}
@@ -2999,7 +2993,7 @@ get_constraint_for_ptr_offset (tree ptr, tree offset,
as the lhs. */
static void
-get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
+get_constraint_for_component_ref (tree t, vec<ce_s> *results,
bool address_p, bool lhs_p)
{
tree orig_t = t;
@@ -3007,7 +3001,6 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
HOST_WIDE_INT bitmaxsize = -1;
HOST_WIDE_INT bitpos;
tree forzero;
- struct constraint_expr *result;
/* Some people like to do cute things like take the address of
&0->a.b */
@@ -3024,7 +3017,7 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
temp.offset = 0;
temp.var = integer_id;
temp.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, temp);
+ results->safe_push (temp);
return;
}
@@ -3046,7 +3039,7 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
temp.offset = 0;
temp.var = anything_id;
temp.type = ADDRESSOF;
- VEC_safe_push (ce_s, heap, *results, temp);
+ results->safe_push (temp);
return;
}
}
@@ -3056,30 +3049,30 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
/* Pretend to take the address of the base, we'll take care of
adding the required subset of sub-fields below. */
get_constraint_for_1 (t, results, true, lhs_p);
- gcc_assert (VEC_length (ce_s, *results) == 1);
- result = &VEC_last (ce_s, *results);
+ gcc_assert (results->length () == 1);
+ struct constraint_expr &result = results->last ();
- if (result->type == SCALAR
- && get_varinfo (result->var)->is_full_var)
+ if (result.type == SCALAR
+ && get_varinfo (result.var)->is_full_var)
/* For single-field vars do not bother about the offset. */
- result->offset = 0;
- else if (result->type == SCALAR)
+ result.offset = 0;
+ else if (result.type == SCALAR)
{
/* In languages like C, you can access one past the end of an
array. You aren't allowed to dereference it, so we can
ignore this constraint. When we handle pointer subtraction,
we may have to do something cute here. */
- if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
+ if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result.var)->fullsize
&& bitmaxsize != 0)
{
/* It's also not true that the constraint will actually start at the
right offset, it may start in some padding. We only care about
setting the constraint to the first actual field it touches, so
walk to find it. */
- struct constraint_expr cexpr = *result;
+ struct constraint_expr cexpr = result;
varinfo_t curr;
- VEC_pop (ce_s, *results);
+ results->pop ();
cexpr.offset = 0;
for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
{
@@ -3087,7 +3080,7 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
bitpos, bitmaxsize))
{
cexpr.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, cexpr);
+ results->safe_push (cexpr);
if (address_p)
break;
}
@@ -3095,16 +3088,15 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
/* If we are going to take the address of this field then
to be able to compute reachability correctly add at least
the last field of the variable. */
- if (address_p
- && VEC_length (ce_s, *results) == 0)
+ if (address_p && results->length () == 0)
{
curr = get_varinfo (cexpr.var);
while (curr->next != NULL)
curr = curr->next;
cexpr.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, cexpr);
+ results->safe_push (cexpr);
}
- else if (VEC_length (ce_s, *results) == 0)
+ else if (results->length () == 0)
/* Assert that we found *some* field there. The user couldn't be
accessing *only* padding. */
/* Still the user could access one past the end of an array
@@ -3115,7 +3107,7 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
cexpr.type = SCALAR;
cexpr.var = anything_id;
cexpr.offset = 0;
- VEC_safe_push (ce_s, heap, *results, cexpr);
+ results->safe_push (cexpr);
}
}
else if (bitmaxsize == 0)
@@ -3128,7 +3120,7 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
if (dump_file && (dump_flags & TDF_DETAILS))
fprintf (dump_file, "Access to past the end of variable, ignoring\n");
}
- else if (result->type == DEREF)
+ else if (result.type == DEREF)
{
/* If we do not know exactly where the access goes say so. Note
that only for non-structure accesses we know that we access
@@ -3136,18 +3128,18 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
if (bitpos == -1
|| bitsize != bitmaxsize
|| AGGREGATE_TYPE_P (TREE_TYPE (orig_t))
- || result->offset == UNKNOWN_OFFSET)
- result->offset = UNKNOWN_OFFSET;
+ || result.offset == UNKNOWN_OFFSET)
+ result.offset = UNKNOWN_OFFSET;
else
- result->offset += bitpos;
+ result.offset += bitpos;
}
- else if (result->type == ADDRESSOF)
+ else if (result.type == ADDRESSOF)
{
/* We can end up here for component references on a
VIEW_CONVERT_EXPR <>(&foobar). */
- result->type = SCALAR;
- result->var = anything_id;
- result->offset = 0;
+ result.type = SCALAR;
+ result.var = anything_id;
+ result.offset = 0;
}
else
gcc_unreachable ();
@@ -3161,12 +3153,12 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
This is needed so that we can handle dereferencing DEREF constraints. */
static void
-do_deref (VEC (ce_s, heap) **constraints)
+do_deref (vec<ce_s> *constraints)
{
struct constraint_expr *c;
unsigned int i = 0;
- FOR_EACH_VEC_ELT (ce_s, *constraints, i, c)
+ FOR_EACH_VEC_ELT (*constraints, i, c)
{
if (c->type == SCALAR)
c->type = DEREF;
@@ -3188,14 +3180,14 @@ do_deref (VEC (ce_s, heap) **constraints)
address of it. */
static void
-get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
+get_constraint_for_address_of (tree t, vec<ce_s> *results)
{
struct constraint_expr *c;
unsigned int i;
get_constraint_for_1 (t, results, true, true);
- FOR_EACH_VEC_ELT (ce_s, *results, i, c)
+ FOR_EACH_VEC_ELT (*results, i, c)
{
if (c->type == DEREF)
c->type = SCALAR;
@@ -3207,7 +3199,7 @@ get_constraint_for_address_of (tree t, VEC (ce_s, heap) **results)
/* Given a tree T, return the constraint expression for it. */
static void
-get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
+get_constraint_for_1 (tree t, vec<ce_s> *results, bool address_p,
bool lhs_p)
{
struct constraint_expr temp;
@@ -3239,7 +3231,7 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
temp.var = nonlocal_id;
temp.type = ADDRESSOF;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, temp);
+ results->safe_push (temp);
return;
}
@@ -3249,7 +3241,7 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
temp.var = readonly_id;
temp.type = SCALAR;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, temp);
+ results->safe_push (temp);
return;
}
@@ -3283,13 +3275,13 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
if (address_p)
return;
- cs = VEC_last (ce_s, *results);
+ cs = results->last ();
if (cs.type == DEREF
&& type_can_have_subvars (TREE_TYPE (t)))
{
/* For dereferences this means we have to defer it
to solving time. */
- VEC_last (ce_s, *results).offset = UNKNOWN_OFFSET;
+ results->last ().offset = UNKNOWN_OFFSET;
return;
}
if (cs.type != SCALAR)
@@ -3310,7 +3302,7 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
if (curr->offset - vi->offset < size)
{
cs.var = curr->id;
- VEC_safe_push (ce_s, heap, *results, cs);
+ results->safe_push (cs);
}
else
break;
@@ -3345,17 +3337,17 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
{
unsigned int i;
tree val;
- VEC (ce_s, heap) *tmp = NULL;
+ vec<ce_s> tmp = vec<ce_s>();
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
{
struct constraint_expr *rhsp;
unsigned j;
get_constraint_for_1 (val, &tmp, address_p, lhs_p);
- FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
- VEC_safe_push (ce_s, heap, *results, *rhsp);
- VEC_truncate (ce_s, tmp, 0);
+ FOR_EACH_VEC_ELT (tmp, j, rhsp)
+ results->safe_push (*rhsp);
+ tmp.truncate (0);
}
- VEC_free (ce_s, heap, tmp);
+ tmp.release ();
/* We do not know whether the constructor was complete,
so technically we have to add &NOTHING or &ANYTHING
like we do for an empty constructor as well. */
@@ -3376,7 +3368,7 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
temp.type = ADDRESSOF;
temp.var = nonlocal_id;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, temp);
+ results->safe_push (temp);
return;
}
default:;
@@ -3386,15 +3378,15 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p,
temp.type = ADDRESSOF;
temp.var = anything_id;
temp.offset = 0;
- VEC_safe_push (ce_s, heap, *results, temp);
+ results->safe_push (temp);
}
/* Given a gimple tree T, return the constraint expression vector for it. */
static void
-get_constraint_for (tree t, VEC (ce_s, heap) **results)
+get_constraint_for (tree t, vec<ce_s> *results)
{
- gcc_assert (VEC_length (ce_s, *results) == 0);
+ gcc_assert (results->length () == 0);
get_constraint_for_1 (t, results, false, true);
}
@@ -3403,9 +3395,9 @@ get_constraint_for (tree t, VEC (ce_s, heap) **results)
to be used as the rhs of a constraint. */
static void
-get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
+get_constraint_for_rhs (tree t, vec<ce_s> *results)
{
- gcc_assert (VEC_length (ce_s, *results) == 0);
+ gcc_assert (results->length () == 0);
get_constraint_for_1 (t, results, false, false);
}
@@ -3415,25 +3407,25 @@ get_constraint_for_rhs (tree t, VEC (ce_s, heap) **results)
entries in *LHSC. */
static void
-process_all_all_constraints (VEC (ce_s, heap) *lhsc, VEC (ce_s, heap) *rhsc)
+process_all_all_constraints (vec<ce_s> lhsc,
+ vec<ce_s> rhsc)
{
struct constraint_expr *lhsp, *rhsp;
unsigned i, j;
- if (VEC_length (ce_s, lhsc) <= 1
- || VEC_length (ce_s, rhsc) <= 1)
+ if (lhsc.length () <= 1 || rhsc.length () <= 1)
{
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (*lhsp, *rhsp));
}
else
{
struct constraint_expr tmp;
tmp = new_scalar_tmp_constraint_exp ("allalltmp");
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (tmp, *rhsp));
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, tmp));
}
}
@@ -3445,25 +3437,26 @@ static void
do_structure_copy (tree lhsop, tree rhsop)
{
struct constraint_expr *lhsp, *rhsp;
- VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
+ vec<ce_s> lhsc = vec<ce_s>();
+ vec<ce_s> rhsc = vec<ce_s>();
unsigned j;
get_constraint_for (lhsop, &lhsc);
get_constraint_for_rhs (rhsop, &rhsc);
- lhsp = &VEC_index (ce_s, lhsc, 0);
- rhsp = &VEC_index (ce_s, rhsc, 0);
+ lhsp = &lhsc[0];
+ rhsp = &rhsc[0];
if (lhsp->type == DEREF
|| (lhsp->type == ADDRESSOF && lhsp->var == anything_id)
|| rhsp->type == DEREF)
{
if (lhsp->type == DEREF)
{
- gcc_assert (VEC_length (ce_s, lhsc) == 1);
+ gcc_assert (lhsc.length () == 1);
lhsp->offset = UNKNOWN_OFFSET;
}
if (rhsp->type == DEREF)
{
- gcc_assert (VEC_length (ce_s, rhsc) == 1);
+ gcc_assert (rhsc.length () == 1);
rhsp->offset = UNKNOWN_OFFSET;
}
process_all_all_constraints (lhsc, rhsc);
@@ -3477,10 +3470,10 @@ do_structure_copy (tree lhsop, tree rhsop)
unsigned k = 0;
get_ref_base_and_extent (lhsop, &lhsoffset, &lhssize, &lhsmaxsize);
get_ref_base_and_extent (rhsop, &rhsoffset, &rhssize, &rhsmaxsize);
- for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp);)
+ for (j = 0; lhsc.iterate (j, &lhsp);)
{
varinfo_t lhsv, rhsv;
- rhsp = &VEC_index (ce_s, rhsc, k);
+ rhsp = &rhsc[k];
lhsv = get_varinfo (lhsp->var);
rhsv = get_varinfo (rhsp->var);
if (lhsv->may_have_pointers
@@ -3495,7 +3488,7 @@ do_structure_copy (tree lhsop, tree rhsop)
> rhsv->offset + lhsoffset + rhsv->size)))
{
++k;
- if (k >= VEC_length (ce_s, rhsc))
+ if (k >= rhsc.length ())
break;
}
else
@@ -3505,14 +3498,14 @@ do_structure_copy (tree lhsop, tree rhsop)
else
gcc_unreachable ();
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
/* Create constraints ID = { rhsc }. */
static void
-make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
+make_constraints_to (unsigned id, vec<ce_s> rhsc)
{
struct constraint_expr *c;
struct constraint_expr includes;
@@ -3522,7 +3515,7 @@ make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
includes.offset = 0;
includes.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, c)
+ FOR_EACH_VEC_ELT (rhsc, j, c)
process_constraint (new_constraint (includes, *c));
}
@@ -3531,10 +3524,10 @@ make_constraints_to (unsigned id, VEC(ce_s, heap) *rhsc)
static void
make_constraint_to (unsigned id, tree op)
{
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> rhsc = vec<ce_s>();
get_constraint_for_rhs (op, &rhsc);
make_constraints_to (id, rhsc);
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
/* Create a constraint ID = &FROM. */
@@ -3725,7 +3718,7 @@ get_function_part_constraint (varinfo_t fi, unsigned part)
RHS. */
static void
-handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_rhs_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
@@ -3793,7 +3786,7 @@ handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
rhsc.var = get_call_use_vi (stmt)->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, rhsc);
+ results->safe_push (rhsc);
}
/* The static chain escapes as well. */
@@ -3805,22 +3798,22 @@ handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
&& gimple_call_lhs (stmt) != NULL_TREE
&& TREE_ADDRESSABLE (TREE_TYPE (gimple_call_lhs (stmt))))
{
- VEC(ce_s, heap) *tmpc = NULL;
+ vec<ce_s> tmpc = vec<ce_s>();
struct constraint_expr lhsc, *c;
get_constraint_for_address_of (gimple_call_lhs (stmt), &tmpc);
lhsc.var = escaped_id;
lhsc.offset = 0;
lhsc.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, tmpc, i, c)
+ FOR_EACH_VEC_ELT (tmpc, i, c)
process_constraint (new_constraint (lhsc, *c));
- VEC_free(ce_s, heap, tmpc);
+ tmpc.release ();
}
/* Regular functions return nonlocal memory. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, rhsc);
+ results->safe_push (rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call
@@ -3828,10 +3821,10 @@ handle_rhs_call (gimple stmt, VEC(ce_s, heap) **results)
the LHS point to global and escaped variables. */
static void
-handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
+handle_lhs_call (gimple stmt, tree lhs, int flags, vec<ce_s> rhsc,
tree fndecl)
{
- VEC(ce_s, heap) *lhsc = NULL;
+ vec<ce_s> lhsc = vec<ce_s>();
get_constraint_for (lhs, &lhsc);
/* If the store is to a global decl make sure to
@@ -3845,7 +3838,7 @@ handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
tmpc.var = escaped_id;
tmpc.offset = 0;
tmpc.type = SCALAR;
- VEC_safe_push (ce_s, heap, lhsc, tmpc);
+ lhsc.safe_push (tmpc);
}
/* If the call returns an argument unmodified override the rhs
@@ -3855,17 +3848,17 @@ handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
&& (flags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (stmt))
{
tree arg;
- rhsc = NULL;
+ rhsc.create (0);
arg = gimple_call_arg (stmt, flags & ERF_RETURN_ARG_MASK);
get_constraint_for (arg, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
else if (flags & ERF_NOALIAS)
{
varinfo_t vi;
struct constraint_expr tmpc;
- rhsc = NULL;
+ rhsc.create (0);
vi = make_heapvar ("HEAP");
/* We delay marking allocated storage global until we know if
it escapes. */
@@ -3880,21 +3873,21 @@ handle_lhs_call (gimple stmt, tree lhs, int flags, VEC(ce_s, heap) *rhsc,
tmpc.var = vi->id;
tmpc.offset = 0;
tmpc.type = ADDRESSOF;
- VEC_safe_push (ce_s, heap, rhsc, tmpc);
+ rhsc.safe_push (tmpc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
else
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
}
/* For non-IPA mode, generate constraints necessary for a call of a
const function that returns a pointer in the statement STMT. */
static void
-handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_const_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned int k;
@@ -3909,34 +3902,34 @@ handle_const_call (gimple stmt, VEC(ce_s, heap) **results)
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, rhsc);
+ results->safe_push (rhsc);
}
/* May return arguments. */
for (k = 0; k < gimple_call_num_args (stmt); ++k)
{
tree arg = gimple_call_arg (stmt, k);
- VEC(ce_s, heap) *argc = NULL;
+ vec<ce_s> argc = vec<ce_s>();
unsigned i;
struct constraint_expr *argp;
get_constraint_for_rhs (arg, &argc);
- FOR_EACH_VEC_ELT (ce_s, argc, i, argp)
- VEC_safe_push (ce_s, heap, *results, *argp);
- VEC_free(ce_s, heap, argc);
+ FOR_EACH_VEC_ELT (argc, i, argp)
+ results->safe_push (*argp);
+ argc.release ();
}
/* May return addresses of globals. */
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = ADDRESSOF;
- VEC_safe_push (ce_s, heap, *results, rhsc);
+ results->safe_push (rhsc);
}
/* For non-IPA mode, generate constraints necessary for a call to a
pure function in statement STMT. */
static void
-handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
+handle_pure_call (gimple stmt, vec<ce_s> *results)
{
struct constraint_expr rhsc;
unsigned i;
@@ -3971,12 +3964,12 @@ handle_pure_call (gimple stmt, VEC(ce_s, heap) **results)
rhsc.var = uses->id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, rhsc);
+ results->safe_push (rhsc);
}
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- VEC_safe_push (ce_s, heap, *results, rhsc);
+ results->safe_push (rhsc);
}
@@ -4017,8 +4010,8 @@ static bool
find_func_aliases_for_builtin_call (gimple t)
{
tree fndecl = gimple_call_fndecl (t);
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vec<ce_s>();
+ vec<ce_s> rhsc = vec<ce_s>();
varinfo_t fi;
if (fndecl != NULL_TREE
@@ -4072,16 +4065,16 @@ find_func_aliases_for_builtin_call (gimple t)
else
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
do_deref (&lhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
return true;
}
case BUILT_IN_MEMSET:
@@ -4098,8 +4091,8 @@ find_func_aliases_for_builtin_call (gimple t)
get_constraint_for (res, &lhsc);
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
do_deref (&lhsc);
@@ -4115,9 +4108,9 @@ find_func_aliases_for_builtin_call (gimple t)
ac.var = integer_id;
}
ac.offset = 0;
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, ac));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
return true;
}
case BUILT_IN_ASSUME_ALIGNED:
@@ -4129,8 +4122,8 @@ find_func_aliases_for_builtin_call (gimple t)
get_constraint_for (res, &lhsc);
get_constraint_for (dest, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
}
return true;
}
@@ -4162,7 +4155,7 @@ find_func_aliases_for_builtin_call (gimple t)
if (gimple_call_lhs (t))
{
handle_lhs_call (t, gimple_call_lhs (t), gimple_call_flags (t),
- NULL, fndecl);
+ vec<ce_s>(), fndecl);
get_constraint_for_ptr_offset (gimple_call_lhs (t),
NULL_TREE, &lhsc);
get_constraint_for_ptr_offset (gimple_call_arg (t, 0),
@@ -4170,8 +4163,8 @@ find_func_aliases_for_builtin_call (gimple t)
do_deref (&lhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
return true;
}
break;
@@ -4193,9 +4186,9 @@ find_func_aliases_for_builtin_call (gimple t)
{
lhs = get_function_part_constraint (nfi, fi_static_chain);
get_constraint_for (frame, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
/* Make the frame point to the function for
the trampoline adjustment call. */
@@ -4203,8 +4196,8 @@ find_func_aliases_for_builtin_call (gimple t)
do_deref (&lhsc);
get_constraint_for (nfunc, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ rhsc.release ();
+ lhsc.release ();
return true;
}
@@ -4223,8 +4216,8 @@ find_func_aliases_for_builtin_call (gimple t)
get_constraint_for (tramp, &rhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ rhsc.release ();
+ lhsc.release ();
}
return true;
}
@@ -4246,8 +4239,8 @@ find_func_aliases_for_builtin_call (gimple t)
do_deref (&lhsc);
get_constraint_for (src, &rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
return true;
}
CASE_BUILT_IN_TM_LOAD (1):
@@ -4268,8 +4261,8 @@ find_func_aliases_for_builtin_call (gimple t)
get_constraint_for (addr, &rhsc);
do_deref (&rhsc);
process_all_all_constraints (lhsc, rhsc);
- VEC_free (ce_s, heap, lhsc);
- VEC_free (ce_s, heap, rhsc);
+ lhsc.release ();
+ rhsc.release ();
return true;
}
/* Variadic argument handling needs to be handled in IPA
@@ -4296,9 +4289,9 @@ find_func_aliases_for_builtin_call (gimple t)
rhs.type = ADDRESSOF;
rhs.offset = 0;
}
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
/* va_list is clobbered. */
make_constraint_to (get_call_clobber_vi (t)->id, valist);
return true;
@@ -4341,8 +4334,8 @@ static void
find_func_aliases_for_call (gimple t)
{
tree fndecl = gimple_call_fndecl (t);
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vec<ce_s>();
+ vec<ce_s> rhsc = vec<ce_s>();
varinfo_t fi;
if (fndecl != NULL_TREE
@@ -4354,7 +4347,7 @@ find_func_aliases_for_call (gimple t)
if (!in_ipa_mode
|| (fndecl && !fi->is_fn_info))
{
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> rhsc = vec<ce_s>();
int flags = gimple_call_flags (t);
/* Const functions can return their arguments and addresses
@@ -4373,7 +4366,7 @@ find_func_aliases_for_call (gimple t)
handle_rhs_call (t, &rhsc);
if (gimple_call_lhs (t))
handle_lhs_call (t, gimple_call_lhs (t), flags, rhsc, fndecl);
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
else
{
@@ -4390,11 +4383,11 @@ find_func_aliases_for_call (gimple t)
get_constraint_for_rhs (arg, &rhsc);
lhs = get_function_part_constraint (fi, fi_parm_base + j);
- while (VEC_length (ce_s, rhsc) != 0)
+ while (rhsc.length () != 0)
{
- rhsp = &VEC_last (ce_s, rhsc);
+ rhsp = &rhsc.last ();
process_constraint (new_constraint (lhs, *rhsp));
- VEC_pop (ce_s, rhsc);
+ rhsc.pop ();
}
}
@@ -4411,13 +4404,13 @@ find_func_aliases_for_call (gimple t)
&& DECL_RESULT (fndecl)
&& DECL_BY_REFERENCE (DECL_RESULT (fndecl)))
{
- VEC(ce_s, heap) *tem = NULL;
- VEC_safe_push (ce_s, heap, tem, rhs);
+ vec<ce_s> tem = vec<ce_s>();
+ tem.safe_push (rhs);
do_deref (&tem);
- rhs = VEC_index (ce_s, tem, 0);
- VEC_free(ce_s, heap, tem);
+ rhs = tem[0];
+ tem.release ();
}
- FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhs));
}
@@ -4432,9 +4425,9 @@ find_func_aliases_for_call (gimple t)
get_constraint_for_address_of (lhsop, &rhsc);
lhs = get_function_part_constraint (fi, fi_result);
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
/* If we use a static chain, pass it along. */
@@ -4445,7 +4438,7 @@ find_func_aliases_for_call (gimple t)
get_constraint_for (gimple_call_chain (t), &rhsc);
lhs = get_function_part_constraint (fi, fi_static_chain);
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
@@ -4460,8 +4453,8 @@ static void
find_func_aliases (gimple origt)
{
gimple t = origt;
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vec<ce_s>();
+ vec<ce_s> rhsc = vec<ce_s>();
struct constraint_expr *c;
varinfo_t fi;
@@ -4481,14 +4474,14 @@ find_func_aliases (gimple origt)
STRIP_NOPS (strippedrhs);
get_constraint_for_rhs (gimple_phi_arg_def (t, i), &rhsc);
- FOR_EACH_VEC_ELT (ce_s, lhsc, j, c)
+ FOR_EACH_VEC_ELT (lhsc, j, c)
{
struct constraint_expr *c2;
- while (VEC_length (ce_s, rhsc) > 0)
+ while (rhsc.length () > 0)
{
- c2 = &VEC_last (ce_s, rhsc);
+ c2 = &rhsc.last ();
process_constraint (new_constraint (*c, *c2));
- VEC_pop (ce_s, rhsc);
+ rhsc.pop ();
}
}
}
@@ -4543,14 +4536,14 @@ find_func_aliases (gimple origt)
else if (code == COND_EXPR)
{
/* The result is a merge of both COND_EXPR arms. */
- VEC (ce_s, heap) *tmp = NULL;
+ vec<ce_s> tmp = vec<ce_s>();
struct constraint_expr *rhsp;
unsigned i;
get_constraint_for_rhs (gimple_assign_rhs2 (t), &rhsc);
get_constraint_for_rhs (gimple_assign_rhs3 (t), &tmp);
- FOR_EACH_VEC_ELT (ce_s, tmp, i, rhsp)
- VEC_safe_push (ce_s, heap, rhsc, *rhsp);
- VEC_free (ce_s, heap, tmp);
+ FOR_EACH_VEC_ELT (tmp, i, rhsp)
+ rhsc.safe_push (*rhsp);
+ tmp.release ();
}
else if (truth_value_p (code))
/* Truth value results are not pointer (parts). Or at least
@@ -4559,18 +4552,18 @@ find_func_aliases (gimple origt)
else
{
/* All other operations are merges. */
- VEC (ce_s, heap) *tmp = NULL;
+ vec<ce_s> tmp = vec<ce_s>();
struct constraint_expr *rhsp;
unsigned i, j;
get_constraint_for_rhs (gimple_assign_rhs1 (t), &rhsc);
for (i = 2; i < gimple_num_ops (t); ++i)
{
get_constraint_for_rhs (gimple_op (t, i), &tmp);
- FOR_EACH_VEC_ELT (ce_s, tmp, j, rhsp)
- VEC_safe_push (ce_s, heap, rhsc, *rhsp);
- VEC_truncate (ce_s, tmp, 0);
+ FOR_EACH_VEC_ELT (tmp, j, rhsp)
+ rhsc.safe_push (*rhsp);
+ tmp.truncate (0);
}
- VEC_free (ce_s, heap, tmp);
+ tmp.release ();
}
process_all_all_constraints (lhsc, rhsc);
}
@@ -4599,7 +4592,7 @@ find_func_aliases (gimple origt)
lhs = get_function_part_constraint (fi, fi_result);
get_constraint_for_rhs (gimple_return_retval (t), &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
}
@@ -4632,16 +4625,16 @@ find_func_aliases (gimple origt)
any global memory. */
if (op)
{
- VEC(ce_s, heap) *lhsc = NULL;
+ vec<ce_s> lhsc = vec<ce_s>();
struct constraint_expr rhsc, *lhsp;
unsigned j;
get_constraint_for (op, &lhsc);
rhsc.var = nonlocal_id;
rhsc.offset = 0;
rhsc.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, lhsc, j, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, j, lhsp)
process_constraint (new_constraint (*lhsp, rhsc));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
}
}
for (i = 0; i < gimple_asm_ninputs (t); ++i)
@@ -4665,8 +4658,8 @@ find_func_aliases (gimple origt)
}
}
- VEC_free (ce_s, heap, rhsc);
- VEC_free (ce_s, heap, lhsc);
+ rhsc.release ();
+ lhsc.release ();
}
@@ -4676,14 +4669,14 @@ find_func_aliases (gimple origt)
static void
process_ipa_clobber (varinfo_t fi, tree ptr)
{
- VEC(ce_s, heap) *ptrc = NULL;
+ vec<ce_s> ptrc = vec<ce_s>();
struct constraint_expr *c, lhs;
unsigned i;
get_constraint_for_rhs (ptr, &ptrc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- FOR_EACH_VEC_ELT (ce_s, ptrc, i, c)
+ FOR_EACH_VEC_ELT (ptrc, i, c)
process_constraint (new_constraint (lhs, *c));
- VEC_free (ce_s, heap, ptrc);
+ ptrc.release ();
}
/* Walk statement T setting up clobber and use constraints according to the
@@ -4694,8 +4687,8 @@ static void
find_func_clobbers (gimple origt)
{
gimple t = origt;
- VEC(ce_s, heap) *lhsc = NULL;
- VEC(ce_s, heap) *rhsc = NULL;
+ vec<ce_s> lhsc = vec<ce_s>();
+ vec<ce_s> rhsc = vec<ce_s>();
varinfo_t fi;
/* Add constraints for clobbered/used in IPA mode.
@@ -4732,9 +4725,9 @@ find_func_clobbers (gimple origt)
unsigned i;
lhsc = get_function_part_constraint (fi, fi_clobbers);
get_constraint_for_address_of (lhs, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhsc, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
@@ -4760,9 +4753,9 @@ find_func_clobbers (gimple origt)
unsigned i;
lhs = get_function_part_constraint (fi, fi_uses);
get_constraint_for_address_of (rhs, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
@@ -4809,14 +4802,14 @@ find_func_clobbers (gimple origt)
struct constraint_expr *rhsp, *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
get_constraint_for_ptr_offset (src, NULL_TREE, &rhsc);
lhs = get_function_part_constraint (fi, fi_uses);
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
return;
}
/* The following function clobbers memory pointed to by
@@ -4829,9 +4822,9 @@ find_func_clobbers (gimple origt)
ce_s *lhsp;
get_constraint_for_ptr_offset (dest, NULL_TREE, &lhsc);
lhs = get_function_part_constraint (fi, fi_clobbers);
- FOR_EACH_VEC_ELT (ce_s, lhsc, i, lhsp)
+ FOR_EACH_VEC_ELT (lhsc, i, lhsp)
process_constraint (new_constraint (lhs, *lhsp));
- VEC_free (ce_s, heap, lhsc);
+ lhsc.release ();
return;
}
/* The following functions clobber their second and third
@@ -4899,9 +4892,9 @@ find_func_clobbers (gimple origt)
continue;
get_constraint_for_address_of (arg, &rhsc);
- FOR_EACH_VEC_ELT (ce_s, rhsc, j, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, j, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
/* Build constraints for propagating clobbers/uses along the
@@ -4962,7 +4955,7 @@ find_func_clobbers (gimple origt)
anything_id);
}
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
@@ -5048,8 +5041,6 @@ struct fieldoff
};
typedef struct fieldoff fieldoff_s;
-DEF_VEC_O(fieldoff_s);
-DEF_VEC_ALLOC_O(fieldoff_s,heap);
/* qsort comparison function for two fieldoff's PA and PB */
@@ -5076,9 +5067,9 @@ fieldoff_compare (const void *pa, const void *pb)
/* Sort a fieldstack according to the field offset and sizes. */
static void
-sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
+sort_fieldstack (vec<fieldoff_s> fieldstack)
{
- VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
+ fieldstack.qsort (fieldoff_compare);
}
/* Return true if T is a type that can have subvars. */
@@ -5144,7 +5135,7 @@ field_must_have_pointers (tree t)
recursed for. */
static bool
-push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
+push_fields_onto_fieldstack (tree type, vec<fieldoff_s> *fieldstack,
HOST_WIDE_INT offset)
{
tree field;
@@ -5154,9 +5145,9 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
return false;
/* If the vector of fields is growing too big, bail out early.
- Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
+ Callers check for vec::length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
sure this fails. */
- if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ if (fieldstack->length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
return false;
for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
@@ -5184,15 +5175,15 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
bool has_unknown_size = false;
bool must_have_pointers_p;
- if (!VEC_empty (fieldoff_s, *fieldstack))
- pair = &VEC_last (fieldoff_s, *fieldstack);
+ if (!fieldstack->is_empty ())
+ pair = &fieldstack->last ();
/* If there isn't anything at offset zero, create sth. */
if (!pair
&& offset + foff != 0)
{
fieldoff_s e = {0, offset + foff, false, false, false, false};
- pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, e);
+ pair = fieldstack->safe_push (e);
}
if (!DECL_SIZE (field)
@@ -5225,7 +5216,7 @@ push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
= (!has_unknown_size
&& POINTER_TYPE_P (TREE_TYPE (field))
&& TYPE_RESTRICT (TREE_TYPE (field)));
- VEC_safe_push (fieldoff_s, heap, *fieldstack, e);
+ fieldstack->safe_push (e);
}
}
@@ -5442,13 +5433,13 @@ create_function_info_for (tree decl, const char *name)
FIELDSTACK is assumed to be sorted by offset. */
static bool
-check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
+check_for_overlaps (vec<fieldoff_s> fieldstack)
{
fieldoff_s *fo = NULL;
unsigned int i;
HOST_WIDE_INT lastoffset = -1;
- FOR_EACH_VEC_ELT (fieldoff_s, fieldstack, i, fo)
+ FOR_EACH_VEC_ELT (fieldstack, i, fo)
{
if (fo->offset == lastoffset)
return true;
@@ -5467,7 +5458,7 @@ create_variable_info_for_1 (tree decl, const char *name)
varinfo_t vi, newvi;
tree decl_type = TREE_TYPE (decl);
tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
- VEC (fieldoff_s,heap) *fieldstack = NULL;
+ vec<fieldoff_s> fieldstack = vec<fieldoff_s>();
fieldoff_s *fo;
unsigned int i;
@@ -5499,7 +5490,7 @@ create_variable_info_for_1 (tree decl, const char *name)
push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
- for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
+ for (i = 0; !notokay && fieldstack.iterate (i, &fo); i++)
if (fo->has_unknown_size
|| fo->offset < 0)
{
@@ -5522,13 +5513,13 @@ create_variable_info_for_1 (tree decl, const char *name)
}
if (notokay)
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
}
/* If we didn't end up collecting sub-variables create a full
variable for the decl. */
- if (VEC_length (fieldoff_s, fieldstack) <= 1
- || VEC_length (fieldoff_s, fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
+ if (fieldstack.length () <= 1
+ || fieldstack.length () > MAX_FIELDS_FOR_FIELD_SENSITIVE)
{
vi = new_var_info (decl, name);
vi->offset = 0;
@@ -5536,14 +5527,14 @@ create_variable_info_for_1 (tree decl, const char *name)
vi->fullsize = TREE_INT_CST_LOW (declsize);
vi->size = vi->fullsize;
vi->is_full_var = true;
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
return vi;
}
vi = new_var_info (decl, name);
vi->fullsize = TREE_INT_CST_LOW (declsize);
for (i = 0, newvi = vi;
- VEC_iterate (fieldoff_s, fieldstack, i, fo);
+ fieldstack.iterate (i, &fo);
++i, newvi = newvi->next)
{
const char *newname = "NULL";
@@ -5562,11 +5553,11 @@ create_variable_info_for_1 (tree decl, const char *name)
newvi->fullsize = vi->fullsize;
newvi->may_have_pointers = fo->may_have_pointers;
newvi->only_restrict_pointers = fo->only_restrict_pointers;
- if (i + 1 < VEC_length (fieldoff_s, fieldstack))
+ if (i + 1 < fieldstack.length ())
newvi->next = new_var_info (decl, name);
}
- VEC_free (fieldoff_s, heap, fieldstack);
+ fieldstack.release ();
return vi;
}
@@ -5618,14 +5609,14 @@ create_variable_info_for (tree decl, const char *name)
if (DECL_INITIAL (decl)
&& vnode->analyzed)
{
- VEC (ce_s, heap) *rhsc = NULL;
+ vec<ce_s> rhsc = vec<ce_s>();
struct constraint_expr lhs, *rhsp;
unsigned i;
get_constraint_for_rhs (DECL_INITIAL (decl), &rhsc);
lhs.var = vi->id;
lhs.offset = 0;
lhs.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
/* If this is a variable that escapes from the unit
the initializer escapes as well. */
@@ -5634,10 +5625,10 @@ create_variable_info_for (tree decl, const char *name)
lhs.var = escaped_id;
lhs.offset = 0;
lhs.type = SCALAR;
- FOR_EACH_VEC_ELT (ce_s, rhsc, i, rhsp)
+ FOR_EACH_VEC_ELT (rhsc, i, rhsp)
process_constraint (new_constraint (lhs, *rhsp));
}
- VEC_free (ce_s, heap, rhsc);
+ rhsc.release ();
}
}
}
@@ -6259,7 +6250,7 @@ dump_sa_points_to_info (FILE *outfile)
stats.num_implicit_edges);
}
- for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
+ for (i = 0; i < varmap.length (); i++)
{
varinfo_t vi = get_varinfo (i);
if (!vi->may_have_pointers)
@@ -6329,7 +6320,7 @@ init_base_vars (void)
/* This specifically does not use process_constraint because
process_constraint ignores all anything = anything constraints, since all
but this one are redundant. */
- VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
+ constraints.safe_push (new_constraint (lhs, rhs));
/* Create the READONLY variable, used to represent that a variable
points to readonly memory. */
@@ -6465,8 +6456,8 @@ init_alias_vars (void)
sizeof (struct constraint), 30);
variable_info_pool = create_alloc_pool ("Variable info pool",
sizeof (struct variable_info), 30);
- constraints = VEC_alloc (constraint_t, heap, 8);
- varmap = VEC_alloc (varinfo_t, heap, 8);
+ constraints.create (8);
+ varmap.create (8);
vi_for_tree = pointer_map_create ();
call_stmt_vars = pointer_map_create ();
@@ -6504,7 +6495,7 @@ remove_preds_and_fake_succs (constraint_graph_t graph)
/* Now reallocate the size of the successor list as, and blow away
the predecessor bitmaps. */
- graph->size = VEC_length (varinfo_t, varmap);
+ graph->size = varmap.length ();
graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
free (graph->implicit_preds);
@@ -6526,7 +6517,7 @@ solve_constraints (void)
"\nCollapsing static cycles and doing variable "
"substitution\n");
- init_graph (VEC_length (varinfo_t, varmap) * 2);
+ init_graph (varmap.length () * 2);
if (dump_file)
fprintf (dump_file, "Building predecessor graph\n");
@@ -6643,7 +6634,7 @@ compute_points_to_sets (void)
cfun->gimple_df->escaped.escaped = 0;
/* Mark escaped HEAP variables as global. */
- FOR_EACH_VEC_ELT (varinfo_t, varmap, i, vi)
+ FOR_EACH_VEC_ELT (varmap, i, vi)
if (vi->is_heap_var
&& !vi->is_global_var)
DECL_EXTERNAL (vi->decl) = vi->is_global_var
@@ -6733,10 +6724,10 @@ delete_points_to_sets (void)
pointer_map_destroy (vi_for_tree);
pointer_map_destroy (call_stmt_vars);
bitmap_obstack_release (&pta_obstack);
- VEC_free (constraint_t, heap, constraints);
+ constraints.release ();
for (i = 0; i < graph->size; i++)
- VEC_free (constraint_t, heap, graph->complex[i]);
+ graph->complex[i].release ();
free (graph->complex);
free (graph->rep);
@@ -6746,7 +6737,7 @@ delete_points_to_sets (void)
free (graph->indirect_cycles);
free (graph);
- VEC_free (varinfo_t, heap, varmap);
+ varmap.release ();
free_alloc_pool (variable_info_pool);
free_alloc_pool (constraint_pool);
@@ -6921,7 +6912,7 @@ ipa_pta_execute (void)
dump_constraints (dump_file, 0);
fprintf (dump_file, "\n");
}
- from = VEC_length (constraint_t, constraints);
+ from = constraints.length ();
FOR_EACH_DEFINED_FUNCTION (node)
{
@@ -7009,7 +7000,7 @@ ipa_pta_execute (void)
dump_constraints (dump_file, from);
fprintf (dump_file, "\n");
}
- from = VEC_length (constraint_t, constraints);
+ from = constraints.length ();
}
/* From the constraints compute the points-to sets. */
@@ -7044,7 +7035,7 @@ ipa_pta_execute (void)
fn = DECL_STRUCT_FUNCTION (node->symbol.decl);
/* Compute the points-to sets for pointer SSA_NAMEs. */
- FOR_EACH_VEC_ELT (tree, fn->gimple_df->ssa_names, i, ptr)
+ FOR_EACH_VEC_ELT (*fn->gimple_df->ssa_names, i, ptr)
{
if (ptr
&& POINTER_TYPE_P (TREE_TYPE (ptr)))
diff --git a/gcc/tree-ssa-tail-merge.c b/gcc/tree-ssa-tail-merge.c
index 660b68c10b2..0db205209c3 100644
--- a/gcc/tree-ssa-tail-merge.c
+++ b/gcc/tree-ssa-tail-merge.c
@@ -219,7 +219,7 @@ struct same_succ_def
bb. */
bitmap inverse;
/* The edge flags for each of the successor bbs. */
- VEC (int, heap) *succ_flags;
+ vec<int> succ_flags;
/* Indicates whether the struct is currently in the worklist. */
bool in_worklist;
/* The hash value of the struct. */
@@ -376,8 +376,8 @@ same_succ_print (FILE *file, const same_succ e)
bitmap_print (file, e->succs, "succs:", "\n");
bitmap_print (file, e->inverse, "inverse:", "\n");
fprintf (file, "flags:");
- for (i = 0; i < VEC_length (int, e->succ_flags); ++i)
- fprintf (file, " %x", VEC_index (int, e->succ_flags, i));
+ for (i = 0; i < e->succ_flags.length (); ++i)
+ fprintf (file, " %x", e->succ_flags[i]);
fprintf (file, "\n");
}
@@ -476,9 +476,9 @@ same_succ_hash (const_same_succ e)
hashval = iterative_hash_hashval_t (size, hashval);
BB_SIZE (bb) = size;
- for (i = 0; i < VEC_length (int, e->succ_flags); ++i)
+ for (i = 0; i < e->succ_flags.length (); ++i)
{
- flags = VEC_index (int, e->succ_flags, i);
+ flags = e->succ_flags[i];
flags = flags & ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
hashval = iterative_hash_hashval_t (flags, hashval);
}
@@ -512,13 +512,13 @@ inverse_flags (const_same_succ e1, const_same_succ e2)
int f1a, f1b, f2a, f2b;
int mask = ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
- if (VEC_length (int, e1->succ_flags) != 2)
+ if (e1->succ_flags.length () != 2)
return false;
- f1a = VEC_index (int, e1->succ_flags, 0);
- f1b = VEC_index (int, e1->succ_flags, 1);
- f2a = VEC_index (int, e2->succ_flags, 0);
- f2b = VEC_index (int, e2->succ_flags, 1);
+ f1a = e1->succ_flags[0];
+ f1b = e1->succ_flags[1];
+ f2a = e2->succ_flags[0];
+ f2b = e2->succ_flags[1];
if (f1a == f2a && f1b == f2b)
return false;
@@ -539,7 +539,7 @@ same_succ_def::equal (const value_type *e1, const compare_type *e2)
if (e1->hashval != e2->hashval)
return 0;
- if (VEC_length (int, e1->succ_flags) != VEC_length (int, e2->succ_flags))
+ if (e1->succ_flags.length () != e2->succ_flags.length ())
return 0;
if (!bitmap_equal_p (e1->succs, e2->succs))
@@ -547,9 +547,8 @@ same_succ_def::equal (const value_type *e1, const compare_type *e2)
if (!inverse_flags (e1, e2))
{
- for (i = 0; i < VEC_length (int, e1->succ_flags); ++i)
- if (VEC_index (int, e1->succ_flags, i)
- != VEC_index (int, e1->succ_flags, i))
+ for (i = 0; i < e1->succ_flags.length (); ++i)
+ if (e1->succ_flags[i] != e1->succ_flags[i])
return 0;
}
@@ -593,7 +592,7 @@ same_succ_alloc (void)
same->bbs = BITMAP_ALLOC (NULL);
same->succs = BITMAP_ALLOC (NULL);
same->inverse = BITMAP_ALLOC (NULL);
- same->succ_flags = VEC_alloc (int, heap, 10);
+ same->succ_flags.create (10);
same->in_worklist = false;
return same;
@@ -607,7 +606,7 @@ same_succ_def::remove (same_succ e)
BITMAP_FREE (e->bbs);
BITMAP_FREE (e->succs);
BITMAP_FREE (e->inverse);
- VEC_free (int, heap, e->succ_flags);
+ e->succ_flags.release ();
XDELETE (e);
}
@@ -620,7 +619,7 @@ same_succ_reset (same_succ same)
bitmap_clear (same->bbs);
bitmap_clear (same->succs);
bitmap_clear (same->inverse);
- VEC_truncate (int, same->succ_flags, 0);
+ same->succ_flags.truncate (0);
}
static hash_table <same_succ_def> same_succ_htab;
@@ -647,12 +646,10 @@ debug_same_succ ( void)
same_succ_htab.traverse <FILE *, ssa_same_succ_print_traverse> (stderr);
}
-DEF_VEC_P (same_succ);
-DEF_VEC_ALLOC_P (same_succ, heap);
/* Vector of bbs to process. */
-static VEC (same_succ, heap) *worklist;
+static vec<same_succ> worklist;
/* Prints worklist to FILE. */
@@ -660,8 +657,8 @@ static void
print_worklist (FILE *file)
{
unsigned int i;
- for (i = 0; i < VEC_length (same_succ, worklist); ++i)
- same_succ_print (file, VEC_index (same_succ, worklist, i));
+ for (i = 0; i < worklist.length (); ++i)
+ same_succ_print (file, worklist[i]);
}
/* Adds SAME to worklist. */
@@ -676,7 +673,7 @@ add_to_worklist (same_succ same)
return;
same->in_worklist = true;
- VEC_safe_push (same_succ, heap, worklist, same);
+ worklist.safe_push (same);
}
/* Add BB to same_succ_htab. */
@@ -701,7 +698,7 @@ find_same_succ_bb (basic_block bb, same_succ *same_p)
same_succ_edge_flags[index] = e->flags;
}
EXECUTE_IF_SET_IN_BITMAP (same->succs, 0, j, bj)
- VEC_safe_push (int, heap, same->succ_flags, same_succ_edge_flags[j]);
+ same->succ_flags.safe_push (same_succ_edge_flags[j]);
same->hashval = same_succ_hash (same);
@@ -752,7 +749,7 @@ init_worklist (void)
same_succ_edge_flags = XCNEWVEC (int, last_basic_block);
deleted_bbs = BITMAP_ALLOC (NULL);
deleted_bb_preds = BITMAP_ALLOC (NULL);
- worklist = VEC_alloc (same_succ, heap, n_basic_blocks);
+ worklist.create (n_basic_blocks);
find_same_succ ();
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -773,7 +770,7 @@ delete_worklist (void)
same_succ_edge_flags = NULL;
BITMAP_FREE (deleted_bbs);
BITMAP_FREE (deleted_bb_preds);
- VEC_free (same_succ, heap, worklist);
+ worklist.release ();
}
/* Mark BB as deleted, and mark its predecessors. */
@@ -970,19 +967,17 @@ delete_cluster (bb_cluster c)
XDELETE (c);
}
-DEF_VEC_P (bb_cluster);
-DEF_VEC_ALLOC_P (bb_cluster, heap);
/* Array that contains all clusters. */
-static VEC (bb_cluster, heap) *all_clusters;
+static vec<bb_cluster> all_clusters;
/* Allocate all cluster vectors. */
static void
alloc_cluster_vectors (void)
{
- all_clusters = VEC_alloc (bb_cluster, heap, n_basic_blocks);
+ all_clusters.create (n_basic_blocks);
}
/* Reset all cluster vectors. */
@@ -992,9 +987,9 @@ reset_cluster_vectors (void)
{
unsigned int i;
basic_block bb;
- for (i = 0; i < VEC_length (bb_cluster, all_clusters); ++i)
- delete_cluster (VEC_index (bb_cluster, all_clusters, i));
- VEC_truncate (bb_cluster, all_clusters, 0);
+ for (i = 0; i < all_clusters.length (); ++i)
+ delete_cluster (all_clusters[i]);
+ all_clusters.truncate (0);
FOR_EACH_BB (bb)
BB_CLUSTER (bb) = NULL;
}
@@ -1005,9 +1000,9 @@ static void
delete_cluster_vectors (void)
{
unsigned int i;
- for (i = 0; i < VEC_length (bb_cluster, all_clusters); ++i)
- delete_cluster (VEC_index (bb_cluster, all_clusters, i));
- VEC_free (bb_cluster, heap, all_clusters);
+ for (i = 0; i < all_clusters.length (); ++i)
+ delete_cluster (all_clusters[i]);
+ all_clusters.release ();
}
/* Merge cluster C2 into C1. */
@@ -1035,8 +1030,8 @@ set_cluster (basic_block bb1, basic_block bb2)
add_bb_to_cluster (c, bb2);
BB_CLUSTER (bb1) = c;
BB_CLUSTER (bb2) = c;
- c->index = VEC_length (bb_cluster, all_clusters);
- VEC_safe_push (bb_cluster, heap, all_clusters, c);
+ c->index = all_clusters.length ();
+ all_clusters.safe_push (c);
}
else if (BB_CLUSTER (bb1) == NULL || BB_CLUSTER (bb2) == NULL)
{
@@ -1056,7 +1051,7 @@ set_cluster (basic_block bb1, basic_block bb2)
merge_clusters (merge, old);
EXECUTE_IF_SET_IN_BITMAP (old->bbs, 0, i, bi)
BB_CLUSTER (BASIC_BLOCK (i)) = merge;
- VEC_replace (bb_cluster, all_clusters, old->index, NULL);
+ all_clusters[old->index] = NULL;
update_rep_bb (merge, old->rep_bb);
delete_cluster (old);
}
@@ -1423,9 +1418,9 @@ find_clusters (void)
{
same_succ same;
- while (!VEC_empty (same_succ, worklist))
+ while (!worklist.is_empty ())
{
- same = VEC_pop (same_succ, worklist);
+ same = worklist.pop ();
same->in_worklist = false;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1514,9 +1509,9 @@ apply_clusters (void)
bitmap_iterator bj;
int nr_bbs_removed = 0;
- for (i = 0; i < VEC_length (bb_cluster, all_clusters); ++i)
+ for (i = 0; i < all_clusters.length (); ++i)
{
- c = VEC_index (bb_cluster, all_clusters, i);
+ c = all_clusters[i];
if (c == NULL)
continue;
@@ -1621,7 +1616,7 @@ tail_merge_optimize (unsigned int todo)
}
init_worklist ();
- while (!VEC_empty (same_succ, worklist))
+ while (!worklist.is_empty ())
{
if (!loop_entered)
{
@@ -1637,8 +1632,8 @@ tail_merge_optimize (unsigned int todo)
fprintf (dump_file, "worklist iteration #%d\n", iteration_nr);
find_clusters ();
- gcc_assert (VEC_empty (same_succ, worklist));
- if (VEC_empty (bb_cluster, all_clusters))
+ gcc_assert (worklist.is_empty ());
+ if (all_clusters.is_empty ())
break;
nr_bbs_removed = apply_clusters ();
diff --git a/gcc/tree-ssa-threadedge.c b/gcc/tree-ssa-threadedge.c
index 64e42f3b701..40c9d44d924 100644
--- a/gcc/tree-ssa-threadedge.c
+++ b/gcc/tree-ssa-threadedge.c
@@ -43,32 +43,31 @@ along with GCC; see the file COPYING3. If not see
static int stmt_count;
/* Array to record value-handles per SSA_NAME. */
-VEC(tree,heap) *ssa_name_values;
+vec<tree> ssa_name_values;
/* Set the value for the SSA name NAME to VALUE. */
void
set_ssa_name_value (tree name, tree value)
{
- if (SSA_NAME_VERSION (name) >= VEC_length (tree, ssa_name_values))
- VEC_safe_grow_cleared (tree, heap, ssa_name_values,
- SSA_NAME_VERSION (name) + 1);
- VEC_replace (tree, ssa_name_values, SSA_NAME_VERSION (name), value);
+ if (SSA_NAME_VERSION (name) >= ssa_name_values.length ())
+ ssa_name_values.safe_grow_cleared (SSA_NAME_VERSION (name) + 1);
+ ssa_name_values[SSA_NAME_VERSION (name)] = value;
}
/* Initialize the per SSA_NAME value-handles array. Returns it. */
void
threadedge_initialize_values (void)
{
- gcc_assert (ssa_name_values == NULL);
- ssa_name_values = VEC_alloc(tree, heap, num_ssa_names);
+ gcc_assert (!ssa_name_values.exists ());
+ ssa_name_values.create (num_ssa_names);
}
/* Free the per SSA_NAME value-handle array. */
void
threadedge_finalize_values (void)
{
- VEC_free(tree, heap, ssa_name_values);
+ ssa_name_values.release ();
}
/* Return TRUE if we may be able to thread an incoming edge into
@@ -134,20 +133,20 @@ lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt)
structures. */
static void
-remove_temporary_equivalences (VEC(tree, heap) **stack)
+remove_temporary_equivalences (vec<tree> *stack)
{
- while (VEC_length (tree, *stack) > 0)
+ while (stack->length () > 0)
{
tree prev_value, dest;
- dest = VEC_pop (tree, *stack);
+ dest = stack->pop ();
/* A NULL value indicates we should stop unwinding, otherwise
pop off the next entry as they're recorded in pairs. */
if (dest == NULL)
break;
- prev_value = VEC_pop (tree, *stack);
+ prev_value = stack->pop ();
set_ssa_name_value (dest, prev_value);
}
}
@@ -157,7 +156,7 @@ remove_temporary_equivalences (VEC(tree, heap) **stack)
done processing the current edge. */
static void
-record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
+record_temporary_equivalence (tree x, tree y, vec<tree> *stack)
{
tree prev_x = SSA_NAME_VALUE (x);
@@ -168,9 +167,9 @@ record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
}
set_ssa_name_value (x, y);
- VEC_reserve (tree, heap, *stack, 2);
- VEC_quick_push (tree, *stack, prev_x);
- VEC_quick_push (tree, *stack, x);
+ stack->reserve (2);
+ stack->quick_push (prev_x);
+ stack->quick_push (x);
}
/* Record temporary equivalences created by PHIs at the target of the
@@ -180,7 +179,7 @@ record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack)
indicating we should not thread this edge, else return TRUE. */
static bool
-record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack)
+record_temporary_equivalences_from_phis (edge e, vec<tree> *stack)
{
gimple_stmt_iterator gsi;
@@ -281,7 +280,7 @@ fold_assignment_stmt (gimple stmt)
static gimple
record_temporary_equivalences_from_stmts_at_dest (edge e,
- VEC(tree, heap) **stack,
+ vec<tree> *stack,
tree (*simplify) (gimple,
gimple))
{
@@ -610,10 +609,6 @@ cond_arg_set_in_bb (edge e, basic_block bb)
return false;
}
-DEF_VEC_O(tree);
-DEF_VEC_ALLOC_O_STACK(tree);
-#define VEC_tree_stack_alloc(alloc) VEC_stack_alloc (tree, alloc)
-
/* Copy debug stmts from DEST's chain of single predecessors up to
SRC, so that we don't lose the bindings as PHI nodes are introduced
when DEST gains new predecessors. */
@@ -643,7 +638,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
i++;
}
- VEC(tree, stack) *fewvars = NULL;
+ vec<tree, va_stack> fewvars = vec<tree, va_stack>();
pointer_set_t *vars = NULL;
/* If we're already starting with 3/4 of alloc_count, go for a
@@ -652,7 +647,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
if (i * 4 > alloc_count * 3)
vars = pointer_set_create ();
else if (alloc_count)
- fewvars = VEC_alloc (tree, stack, alloc_count);
+ vec_stack_alloc (tree, fewvars, alloc_count);
/* Now go through the initial debug stmts in DEST again, this time
actually inserting in VARS or FEWVARS. Don't bother checking for
@@ -675,7 +670,7 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
if (vars)
pointer_set_insert (vars, var);
else
- VEC_quick_push (tree, fewvars, var);
+ fewvars.quick_push (var);
}
basic_block bb = dest;
@@ -711,21 +706,21 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
continue;
else if (!vars)
{
- int i = VEC_length (tree, fewvars);
+ int i = fewvars.length ();
while (i--)
- if (VEC_index (tree, fewvars, i) == var)
+ if (fewvars[i] == var)
break;
if (i >= 0)
continue;
- if (VEC_length (tree, fewvars) < alloc_count)
- VEC_quick_push (tree, fewvars, var);
+ if (fewvars.length () < alloc_count)
+ fewvars.quick_push (var);
else
{
vars = pointer_set_create ();
for (i = 0; i < alloc_count; i++)
- pointer_set_insert (vars, VEC_index (tree, fewvars, i));
- VEC_free (tree, stack, fewvars);
+ pointer_set_insert (vars, fewvars[i]);
+ fewvars.release ();
pointer_set_insert (vars, var);
}
}
@@ -740,8 +735,8 @@ propagate_threaded_block_debug_into (basic_block dest, basic_block src)
if (vars)
pointer_set_destroy (vars);
- else if (fewvars)
- VEC_free (tree, stack, fewvars);
+ else if (fewvars.exists ())
+ fewvars.release ();
}
/* TAKEN_EDGE represents the an edge taken as a result of jump threading.
@@ -866,7 +861,7 @@ void
thread_across_edge (gimple dummy_cond,
edge e,
bool handle_dominating_asserts,
- VEC(tree, heap) **stack,
+ vec<tree> *stack,
tree (*simplify) (gimple, gimple))
{
gimple stmt;
diff --git a/gcc/tree-ssa-threadupdate.c b/gcc/tree-ssa-threadupdate.c
index eca88a910c1..b97be8156df 100644
--- a/gcc/tree-ssa-threadupdate.c
+++ b/gcc/tree-ssa-threadupdate.c
@@ -168,7 +168,7 @@ struct ssa_local_info_t
opportunities as they are discovered. We keep the registered
jump threading opportunities in this vector as edge pairs
(original_edge, target_edge). */
-static VEC(edge,heap) *threaded_edges;
+static vec<edge> threaded_edges;
/* When we start updating the CFG for threading, data necessary for jump
threading is attached to the AUX field for the incoming edge. Use these
@@ -1147,14 +1147,14 @@ mark_threaded_blocks (bitmap threaded_blocks)
edge e;
edge_iterator ei;
- for (i = 0; i < VEC_length (edge, threaded_edges); i += 3)
+ for (i = 0; i < threaded_edges.length (); i += 3)
{
- edge e = VEC_index (edge, threaded_edges, i);
+ edge e = threaded_edges[i];
edge *x = XNEWVEC (edge, 2);
e->aux = x;
- THREAD_TARGET (e) = VEC_index (edge, threaded_edges, i + 1);
- THREAD_TARGET2 (e) = VEC_index (edge, threaded_edges, i + 2);
+ THREAD_TARGET (e) = threaded_edges[i + 1];
+ THREAD_TARGET2 (e) = threaded_edges[i + 2];
bitmap_set_bit (tmp, e->dest->index);
}
@@ -1209,7 +1209,7 @@ thread_through_all_blocks (bool may_peel_loop_headers)
/* We must know about loops in order to preserve them. */
gcc_assert (current_loops != NULL);
- if (threaded_edges == NULL)
+ if (!threaded_edges.exists ())
return false;
threaded_blocks = BITMAP_ALLOC (NULL);
@@ -1248,8 +1248,7 @@ thread_through_all_blocks (bool may_peel_loop_headers)
BITMAP_FREE (threaded_blocks);
threaded_blocks = NULL;
- VEC_free (edge, heap, threaded_edges);
- threaded_edges = NULL;
+ threaded_edges.release ();
if (retval)
loops_state_set (LOOPS_NEED_FIXUP);
@@ -1273,15 +1272,15 @@ register_jump_thread (edge e, edge e2, edge e3)
if (e2 == NULL)
return;
- if (threaded_edges == NULL)
- threaded_edges = VEC_alloc (edge, heap, 15);
+ if (!threaded_edges.exists ())
+ threaded_edges.create (15);
if (dump_file && (dump_flags & TDF_DETAILS)
&& e->dest != e2->src)
fprintf (dump_file,
" Registering jump thread around one or more intermediate blocks\n");
- VEC_safe_push (edge, heap, threaded_edges, e);
- VEC_safe_push (edge, heap, threaded_edges, e2);
- VEC_safe_push (edge, heap, threaded_edges, e3);
+ threaded_edges.safe_push (e);
+ threaded_edges.safe_push (e2);
+ threaded_edges.safe_push (e3);
}
diff --git a/gcc/tree-ssa-uncprop.c b/gcc/tree-ssa-uncprop.c
index aeeaa04fee6..1df7d07308c 100644
--- a/gcc/tree-ssa-uncprop.c
+++ b/gcc/tree-ssa-uncprop.c
@@ -264,7 +264,7 @@ associate_equivalences_with_edges (void)
leading to this block. If no such edge equivalency exists, then we
record NULL. These equivalences are live until we leave the dominator
subtree rooted at the block where we record the equivalency. */
-static VEC(tree,heap) *equiv_stack;
+static vec<tree> equiv_stack;
/* Global hash table implementing a mapping from invariant values
to a list of SSA_NAMEs which have the same value. We might be
@@ -278,7 +278,7 @@ struct equiv_hash_elt
tree value;
/* List of SSA_NAMEs which have the same value/key. */
- VEC(tree,heap) *equivalences;
+ vec<tree> equivalences;
};
static void uncprop_enter_block (struct dom_walk_data *, basic_block);
@@ -309,7 +309,7 @@ static void
equiv_free (void *p)
{
struct equiv_hash_elt *elt = (struct equiv_hash_elt *) p;
- VEC_free (tree, heap, elt->equivalences);
+ elt->equivalences.release ();
free (elt);
}
@@ -322,12 +322,12 @@ remove_equivalence (tree value)
void **slot;
equiv_hash_elt.value = value;
- equiv_hash_elt.equivalences = NULL;
+ equiv_hash_elt.equivalences.create (0);
slot = htab_find_slot (equiv, &equiv_hash_elt, NO_INSERT);
equiv_hash_elt_p = (struct equiv_hash_elt *) *slot;
- VEC_pop (tree, equiv_hash_elt_p->equivalences);
+ equiv_hash_elt_p->equivalences.pop ();
}
/* Record EQUIVALENCE = VALUE into our hash table. */
@@ -340,7 +340,7 @@ record_equiv (tree value, tree equivalence)
equiv_hash_elt = XNEW (struct equiv_hash_elt);
equiv_hash_elt->value = value;
- equiv_hash_elt->equivalences = NULL;
+ equiv_hash_elt->equivalences.create (0);
slot = htab_find_slot (equiv, equiv_hash_elt, INSERT);
@@ -351,7 +351,7 @@ record_equiv (tree value, tree equivalence)
equiv_hash_elt = (struct equiv_hash_elt *) *slot;
- VEC_safe_push (tree, heap, equiv_hash_elt->equivalences, equivalence);
+ equiv_hash_elt->equivalences.safe_push (equivalence);
}
/* Main driver for un-cprop. */
@@ -366,7 +366,7 @@ tree_ssa_uncprop (void)
/* Create our global data structures. */
equiv = htab_create (1024, equiv_hash, equiv_eq, equiv_free);
- equiv_stack = VEC_alloc (tree, heap, 2);
+ equiv_stack.create (2);
/* We're going to do a dominator walk, so ensure that we have
dominance information. */
@@ -394,7 +394,7 @@ tree_ssa_uncprop (void)
need to empty elements out of the hash table, free EQUIV_STACK,
and cleanup the AUX field on the edges. */
htab_delete (equiv);
- VEC_free (tree, heap, equiv_stack);
+ equiv_stack.release ();
FOR_EACH_BB (bb)
{
edge e;
@@ -422,7 +422,7 @@ uncprop_leave_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
basic_block bb ATTRIBUTE_UNUSED)
{
/* Pop the topmost value off the equiv stack. */
- tree value = VEC_pop (tree, equiv_stack);
+ tree value = equiv_stack.pop ();
/* If that value was non-null, then pop the topmost equivalency off
its equivalency stack. */
@@ -477,7 +477,7 @@ uncprop_into_successor_phis (basic_block bb)
/* Lookup this argument's value in the hash table. */
equiv_hash_elt.value = arg;
- equiv_hash_elt.equivalences = NULL;
+ equiv_hash_elt.equivalences.create (0);
slot = htab_find_slot (equiv, &equiv_hash_elt, NO_INSERT);
if (slot)
@@ -490,9 +490,9 @@ uncprop_into_successor_phis (basic_block bb)
then replace the value in the argument with its equivalent
SSA_NAME. Use the most recent equivalence as hopefully
that results in shortest lifetimes. */
- for (j = VEC_length (tree, elt->equivalences) - 1; j >= 0; j--)
+ for (j = elt->equivalences.length () - 1; j >= 0; j--)
{
- tree equiv = VEC_index (tree, elt->equivalences, j);
+ tree equiv = elt->equivalences[j];
if (SSA_NAME_VAR (equiv) == SSA_NAME_VAR (res)
&& TREE_TYPE (equiv) == TREE_TYPE (res))
@@ -563,13 +563,13 @@ uncprop_enter_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
struct edge_equivalency *equiv = (struct edge_equivalency *) e->aux;
record_equiv (equiv->rhs, equiv->lhs);
- VEC_safe_push (tree, heap, equiv_stack, equiv->rhs);
+ equiv_stack.safe_push (equiv->rhs);
recorded = true;
}
}
if (!recorded)
- VEC_safe_push (tree, heap, equiv_stack, NULL_TREE);
+ equiv_stack.safe_push (NULL_TREE);
uncprop_into_successor_phis (bb);
}
diff --git a/gcc/tree-ssa-uninit.c b/gcc/tree-ssa-uninit.c
index 407520b5f6e..c3b8df801e5 100644
--- a/gcc/tree-ssa-uninit.c
+++ b/gcc/tree-ssa-uninit.c
@@ -241,9 +241,9 @@ find_control_equiv_block (basic_block bb)
static bool
compute_control_dep_chain (basic_block bb, basic_block dep_bb,
- VEC(edge, heap) **cd_chains,
+ vec<edge> *cd_chains,
size_t *num_chains,
- VEC(edge, heap) **cur_cd_chain)
+ vec<edge> *cur_cd_chain)
{
edge_iterator ei;
edge e;
@@ -255,13 +255,13 @@ compute_control_dep_chain (basic_block bb, basic_block dep_bb,
return false;
/* Could use a set instead. */
- cur_chain_len = VEC_length (edge, *cur_cd_chain);
+ cur_chain_len = cur_cd_chain->length ();
if (cur_chain_len > MAX_CHAIN_LEN)
return false;
for (i = 0; i < cur_chain_len; i++)
{
- edge e = VEC_index (edge, *cur_cd_chain, i);
+ edge e = (*cur_cd_chain)[i];
/* cycle detected. */
if (e->src == bb)
return false;
@@ -274,7 +274,7 @@ compute_control_dep_chain (basic_block bb, basic_block dep_bb,
continue;
cd_bb = e->dest;
- VEC_safe_push (edge, heap, *cur_cd_chain, e);
+ cur_cd_chain->safe_push (e);
while (!is_non_loop_exit_postdominating (cd_bb, bb))
{
if (cd_bb == dep_bb)
@@ -282,8 +282,7 @@ compute_control_dep_chain (basic_block bb, basic_block dep_bb,
/* Found a direct control dependence. */
if (*num_chains < MAX_NUM_CHAINS)
{
- cd_chains[*num_chains]
- = VEC_copy (edge, heap, *cur_cd_chain);
+ cd_chains[*num_chains] = cur_cd_chain->copy ();
(*num_chains)++;
}
found_cd_chain = true;
@@ -303,10 +302,10 @@ compute_control_dep_chain (basic_block bb, basic_block dep_bb,
if (cd_bb == EXIT_BLOCK_PTR)
break;
}
- VEC_pop (edge, *cur_cd_chain);
- gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
+ cur_cd_chain->pop ();
+ gcc_assert (cur_cd_chain->length () == cur_chain_len);
}
- gcc_assert (VEC_length (edge, *cur_cd_chain) == cur_chain_len);
+ gcc_assert (cur_cd_chain->length () == cur_chain_len);
return found_cd_chain;
}
@@ -317,8 +316,6 @@ typedef struct use_pred_info
bool invert;
} *use_pred_info_t;
-DEF_VEC_P(use_pred_info_t);
-DEF_VEC_ALLOC_P(use_pred_info_t, heap);
/* Converts the chains of control dependence edges into a set of
@@ -333,9 +330,9 @@ DEF_VEC_ALLOC_P(use_pred_info_t, heap);
*NUM_PREDS is the number of composite predictes. */
static bool
-convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
+convert_control_dep_chain_into_preds (vec<edge> *dep_chains,
size_t num_chains,
- VEC(use_pred_info_t, heap) ***preds,
+ vec<use_pred_info_t> **preds,
size_t *num_preds)
{
bool has_valid_pred = false;
@@ -345,16 +342,16 @@ convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
/* Now convert the control dep chain into a set
of predicates. */
- *preds = XCNEWVEC (VEC(use_pred_info_t, heap) *,
- num_chains);
+ typedef vec<use_pred_info_t> vec_use_pred_info_t_heap;
+ *preds = XCNEWVEC (vec_use_pred_info_t_heap, num_chains);
*num_preds = num_chains;
for (i = 0; i < num_chains; i++)
{
- VEC(edge, heap) *one_cd_chain = dep_chains[i];
+ vec<edge> one_cd_chain = dep_chains[i];
has_valid_pred = false;
- for (j = 0; j < VEC_length (edge, one_cd_chain); j++)
+ for (j = 0; j < one_cd_chain.length (); j++)
{
gimple cond_stmt;
gimple_stmt_iterator gsi;
@@ -362,7 +359,7 @@ convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
use_pred_info_t one_pred;
edge e;
- e = VEC_index (edge, one_cd_chain, j);
+ e = one_cd_chain[j];
guard_bb = e->src;
gsi = gsi_last_bb (guard_bb);
if (gsi_end_p (gsi))
@@ -404,7 +401,7 @@ convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
one_pred = XNEW (struct use_pred_info);
one_pred->cond = cond_stmt;
one_pred->invert = !!(e->flags & EDGE_FALSE_VALUE);
- VEC_safe_push (use_pred_info_t, heap, (*preds)[i], one_pred);
+ (*preds)[i].safe_push (one_pred);
has_valid_pred = true;
}
@@ -420,18 +417,19 @@ convert_control_dep_chain_into_preds (VEC(edge, heap) **dep_chains,
the phi whose result is used in USE_BB. */
static bool
-find_predicates (VEC(use_pred_info_t, heap) ***preds,
+find_predicates (vec<use_pred_info_t> **preds,
size_t *num_preds,
basic_block phi_bb,
basic_block use_bb)
{
size_t num_chains = 0, i;
- VEC(edge, heap) **dep_chains = 0;
- VEC(edge, heap) *cur_chain = 0;
+ vec<edge> *dep_chains = 0;
+ vec<edge> cur_chain = vec<edge>();
bool has_valid_pred = false;
basic_block cd_root = 0;
- dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
+ typedef vec<edge> vec_edge_heap;
+ dep_chains = XCNEWVEC (vec_edge_heap, MAX_NUM_CHAINS);
/* First find the closest bb that is control equivalent to PHI_BB
that also dominates USE_BB. */
@@ -455,9 +453,9 @@ find_predicates (VEC(use_pred_info_t, heap) ***preds,
preds,
num_preds);
/* Free individual chain */
- VEC_free (edge, heap, cur_chain);
+ cur_chain.release ();
for (i = 0; i < num_chains; i++)
- VEC_free (edge, heap, dep_chains[i]);
+ dep_chains[i].release ();
free (dep_chains);
return has_valid_pred;
}
@@ -470,7 +468,7 @@ find_predicates (VEC(use_pred_info_t, heap) ***preds,
static void
collect_phi_def_edges (gimple phi, basic_block cd_root,
- VEC(edge, heap) **edges,
+ vec<edge> *edges,
struct pointer_set_t *visited_phis)
{
size_t i, n;
@@ -493,7 +491,7 @@ collect_phi_def_edges (gimple phi, basic_block cd_root,
fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
print_gimple_stmt (dump_file, phi, 0, 0);
}
- VEC_safe_push (edge, heap, *edges, opnd_edge);
+ edges->safe_push (opnd_edge);
}
else
{
@@ -511,7 +509,7 @@ collect_phi_def_edges (gimple phi, basic_block cd_root,
fprintf (dump_file, "\n[CHECK] Found def edge %d in ", (int)i);
print_gimple_stmt (dump_file, phi, 0, 0);
}
- VEC_safe_push (edge, heap, *edges, opnd_edge);
+ edges->safe_push (opnd_edge);
}
}
}
@@ -522,18 +520,19 @@ collect_phi_def_edges (gimple phi, basic_block cd_root,
composite predicates pointed to by PREDS. */
static bool
-find_def_preds (VEC(use_pred_info_t, heap) ***preds,
+find_def_preds (vec<use_pred_info_t> **preds,
size_t *num_preds, gimple phi)
{
size_t num_chains = 0, i, n;
- VEC(edge, heap) **dep_chains = 0;
- VEC(edge, heap) *cur_chain = 0;
- VEC(edge, heap) *def_edges = 0;
+ vec<edge> *dep_chains = 0;
+ vec<edge> cur_chain = vec<edge>();
+ vec<edge> def_edges = vec<edge>();
bool has_valid_pred = false;
basic_block phi_bb, cd_root = 0;
struct pointer_set_t *visited_phis;
- dep_chains = XCNEWVEC (VEC(edge, heap) *, MAX_NUM_CHAINS);
+ typedef vec<edge> vec_edge_heap;
+ dep_chains = XCNEWVEC (vec_edge_heap, MAX_NUM_CHAINS);
phi_bb = gimple_bb (phi);
/* First find the closest dominating bb to be
@@ -546,7 +545,7 @@ find_def_preds (VEC(use_pred_info_t, heap) ***preds,
collect_phi_def_edges (phi, cd_root, &def_edges, visited_phis);
pointer_set_destroy (visited_phis);
- n = VEC_length (edge, def_edges);
+ n = def_edges.length ();
if (n == 0)
return false;
@@ -555,14 +554,13 @@ find_def_preds (VEC(use_pred_info_t, heap) ***preds,
size_t prev_nc, j;
edge opnd_edge;
- opnd_edge = VEC_index (edge, def_edges, i);
+ opnd_edge = def_edges[i];
prev_nc = num_chains;
compute_control_dep_chain (cd_root, opnd_edge->src,
dep_chains, &num_chains,
&cur_chain);
/* Free individual chain */
- VEC_free (edge, heap, cur_chain);
- cur_chain = 0;
+ cur_chain.release ();
/* Now update the newly added chains with
the phi operand edge: */
@@ -573,7 +571,7 @@ find_def_preds (VEC(use_pred_info_t, heap) ***preds,
num_chains++;
for (j = prev_nc; j < num_chains; j++)
{
- VEC_safe_push (edge, heap, dep_chains[j], opnd_edge);
+ dep_chains[j].safe_push (opnd_edge);
}
}
}
@@ -584,7 +582,7 @@ find_def_preds (VEC(use_pred_info_t, heap) ***preds,
preds,
num_preds);
for (i = 0; i < num_chains; i++)
- VEC_free (edge, heap, dep_chains[i]);
+ dep_chains[i].release ();
free (dep_chains);
return has_valid_pred;
}
@@ -593,11 +591,11 @@ find_def_preds (VEC(use_pred_info_t, heap) ***preds,
static void
dump_predicates (gimple usestmt, size_t num_preds,
- VEC(use_pred_info_t, heap) **preds,
+ vec<use_pred_info_t> *preds,
const char* msg)
{
size_t i, j;
- VEC(use_pred_info_t, heap) *one_pred_chain;
+ vec<use_pred_info_t> one_pred_chain;
fprintf (dump_file, msg);
print_gimple_stmt (dump_file, usestmt, 0, 0);
fprintf (dump_file, "is guarded by :\n");
@@ -607,12 +605,12 @@ dump_predicates (gimple usestmt, size_t num_preds,
size_t np;
one_pred_chain = preds[i];
- np = VEC_length (use_pred_info_t, one_pred_chain);
+ np = one_pred_chain.length ();
for (j = 0; j < np; j++)
{
use_pred_info_t one_pred
- = VEC_index (use_pred_info_t, one_pred_chain, j);
+ = one_pred_chain[j];
if (one_pred->invert)
fprintf (dump_file, " (.NOT.) ");
print_gimple_stmt (dump_file, one_pred->cond, 0, 0);
@@ -628,14 +626,14 @@ dump_predicates (gimple usestmt, size_t num_preds,
static void
destroy_predicate_vecs (size_t n,
- VEC(use_pred_info_t, heap) ** preds)
+ vec<use_pred_info_t> * preds)
{
size_t i, j;
for (i = 0; i < n; i++)
{
- for (j = 0; j < VEC_length (use_pred_info_t, preds[i]); j++)
- free (VEC_index (use_pred_info_t, preds[i], j));
- VEC_free (use_pred_info_t, heap, preds[i]);
+ for (j = 0; j < preds[i].length (); j++)
+ free (preds[i][j]);
+ preds[i].release ();
}
free (preds);
}
@@ -733,7 +731,7 @@ is_value_included_in (tree val, tree boundary, enum tree_code cmpc)
static bool
find_matching_predicate_in_rest_chains (use_pred_info_t pred,
- VEC(use_pred_info_t, heap) **preds,
+ vec<use_pred_info_t> *preds,
size_t num_pred_chains)
{
size_t i, j, n;
@@ -745,12 +743,12 @@ find_matching_predicate_in_rest_chains (use_pred_info_t pred,
for (i = 1; i < num_pred_chains; i++)
{
bool found = false;
- VEC(use_pred_info_t, heap) *one_chain = preds[i];
- n = VEC_length (use_pred_info_t, one_chain);
+ vec<use_pred_info_t> one_chain = preds[i];
+ n = one_chain.length ();
for (j = 0; j < n; j++)
{
use_pred_info_t pred2
- = VEC_index (use_pred_info_t, one_chain, j);
+ = one_chain[j];
/* can relax the condition comparison to not
use address comparison. However, the most common
case is that multiple control dependent paths share
@@ -988,7 +986,7 @@ prune_uninit_phi_opnds_in_unrealizable_paths (
static bool
use_pred_not_overlap_with_undef_path_pred (
size_t num_preds,
- VEC(use_pred_info_t, heap) **preds,
+ vec<use_pred_info_t> *preds,
gimple phi, unsigned uninit_opnds,
struct pointer_set_t *visited_phis)
{
@@ -998,7 +996,7 @@ use_pred_not_overlap_with_undef_path_pred (
enum tree_code cmp_code;
bool swap_cond = false;
bool invert = false;
- VEC(use_pred_info_t, heap) *the_pred_chain;
+ vec<use_pred_info_t> the_pred_chain;
bitmap visited_flag_phis = NULL;
bool all_pruned = false;
@@ -1007,14 +1005,14 @@ use_pred_not_overlap_with_undef_path_pred (
a predicate that is a comparison of a flag variable against
a constant. */
the_pred_chain = preds[0];
- n = VEC_length (use_pred_info_t, the_pred_chain);
+ n = the_pred_chain.length ();
for (i = 0; i < n; i++)
{
gimple cond;
tree cond_lhs, cond_rhs, flag = 0;
use_pred_info_t the_pred
- = VEC_index (use_pred_info_t, the_pred_chain, i);
+ = the_pred_chain[i];
cond = the_pred->cond;
invert = the_pred->invert;
@@ -1089,7 +1087,7 @@ is_and_or_or (enum tree_code tc, tree typ)
typedef struct norm_cond
{
- VEC(gimple, heap) *conds;
+ vec<gimple> conds;
enum tree_code cond_code;
bool invert;
} *norm_cond_t;
@@ -1112,7 +1110,7 @@ normalize_cond_1 (gimple cond,
gc = gimple_code (cond);
if (gc != GIMPLE_ASSIGN)
{
- VEC_safe_push (gimple, heap, norm_cond->conds, cond);
+ norm_cond->conds.safe_push (cond);
return;
}
@@ -1132,7 +1130,7 @@ normalize_cond_1 (gimple cond,
SSA_NAME_DEF_STMT (rhs2),
norm_cond, cond_code);
else
- VEC_safe_push (gimple, heap, norm_cond->conds, cond);
+ norm_cond->conds.safe_push (cond);
return;
}
@@ -1148,7 +1146,7 @@ normalize_cond_1 (gimple cond,
norm_cond->cond_code = cur_cond_code;
}
else
- VEC_safe_push (gimple, heap, norm_cond->conds, cond);
+ norm_cond->conds.safe_push (cond);
}
/* See normalize_cond_1 for details. INVERT is a flag to indicate
@@ -1161,7 +1159,7 @@ normalize_cond (gimple cond, norm_cond_t norm_cond, bool invert)
norm_cond->cond_code = ERROR_MARK;
norm_cond->invert = false;
- norm_cond->conds = NULL;
+ norm_cond->conds.create (0);
gcc_assert (gimple_code (cond) == GIMPLE_COND);
cond_code = gimple_cond_code (cond);
if (invert)
@@ -1181,17 +1179,17 @@ normalize_cond (gimple cond, norm_cond_t norm_cond, bool invert)
norm_cond, ERROR_MARK);
else
{
- VEC_safe_push (gimple, heap, norm_cond->conds, cond);
+ norm_cond->conds.safe_push (cond);
norm_cond->invert = invert;
}
}
else
{
- VEC_safe_push (gimple, heap, norm_cond->conds, cond);
+ norm_cond->conds.safe_push (cond);
norm_cond->invert = invert;
}
- gcc_assert (VEC_length (gimple, norm_cond->conds) == 1
+ gcc_assert (norm_cond->conds.length () == 1
|| is_and_or_or (norm_cond->cond_code, NULL));
}
@@ -1337,12 +1335,12 @@ is_subset_of_any (gimple cond, bool invert,
norm_cond_t norm_cond, bool reverse)
{
size_t i;
- size_t len = VEC_length (gimple, norm_cond->conds);
+ size_t len = norm_cond->conds.length ();
for (i = 0; i < len; i++)
{
if (is_gcond_subset_of (cond, invert,
- VEC_index (gimple, norm_cond->conds, i),
+ norm_cond->conds[i],
false, reverse))
return true;
}
@@ -1361,11 +1359,11 @@ is_or_set_subset_of (norm_cond_t norm_cond1,
norm_cond_t norm_cond2)
{
size_t i;
- size_t len = VEC_length (gimple, norm_cond1->conds);
+ size_t len = norm_cond1->conds.length ();
for (i = 0; i < len; i++)
{
- if (!is_subset_of_any (VEC_index (gimple, norm_cond1->conds, i),
+ if (!is_subset_of_any (norm_cond1->conds[i],
false, norm_cond2, false))
return false;
}
@@ -1382,11 +1380,11 @@ is_and_set_subset_of (norm_cond_t norm_cond1,
norm_cond_t norm_cond2)
{
size_t i;
- size_t len = VEC_length (gimple, norm_cond2->conds);
+ size_t len = norm_cond2->conds.length ();
for (i = 0; i < len; i++)
{
- if (!is_subset_of_any (VEC_index (gimple, norm_cond2->conds, i),
+ if (!is_subset_of_any (norm_cond2->conds[i],
false, norm_cond1, true))
return false;
}
@@ -1418,10 +1416,10 @@ is_norm_cond_subset_of (norm_cond_t norm_cond1,
else if (code2 == BIT_IOR_EXPR)
{
size_t len1;
- len1 = VEC_length (gimple, norm_cond1->conds);
+ len1 = norm_cond1->conds.length ();
for (i = 0; i < len1; i++)
{
- gimple cond1 = VEC_index (gimple, norm_cond1->conds, i);
+ gimple cond1 = norm_cond1->conds[i];
if (is_subset_of_any (cond1, false, norm_cond2, false))
return true;
}
@@ -1430,8 +1428,8 @@ is_norm_cond_subset_of (norm_cond_t norm_cond1,
else
{
gcc_assert (code2 == ERROR_MARK);
- gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
- return is_subset_of_any (VEC_index (gimple, norm_cond2->conds, 0),
+ gcc_assert (norm_cond2->conds.length () == 1);
+ return is_subset_of_any (norm_cond2->conds[0],
norm_cond2->invert, norm_cond1, true);
}
}
@@ -1446,21 +1444,21 @@ is_norm_cond_subset_of (norm_cond_t norm_cond1,
else
{
gcc_assert (code1 == ERROR_MARK);
- gcc_assert (VEC_length (gimple, norm_cond1->conds) == 1);
+ gcc_assert (norm_cond1->conds.length () == 1);
/* Conservatively returns false if NORM_COND1 is non-decomposible
and NORM_COND2 is an AND expression. */
if (code2 == BIT_AND_EXPR)
return false;
if (code2 == BIT_IOR_EXPR)
- return is_subset_of_any (VEC_index (gimple, norm_cond1->conds, 0),
+ return is_subset_of_any (norm_cond1->conds[0],
norm_cond1->invert, norm_cond2, false);
gcc_assert (code2 == ERROR_MARK);
- gcc_assert (VEC_length (gimple, norm_cond2->conds) == 1);
- return is_gcond_subset_of (VEC_index (gimple, norm_cond1->conds, 0),
+ gcc_assert (norm_cond2->conds.length () == 1);
+ return is_gcond_subset_of (norm_cond1->conds[0],
norm_cond1->invert,
- VEC_index (gimple, norm_cond2->conds, 0),
+ norm_cond2->conds[0],
norm_cond2->invert, false);
}
}
@@ -1502,8 +1500,8 @@ is_pred_expr_subset_of (use_pred_info_t expr1,
is_subset = is_norm_cond_subset_of (&norm_cond1, &norm_cond2);
/* Free memory */
- VEC_free (gimple, heap, norm_cond1.conds);
- VEC_free (gimple, heap, norm_cond2.conds);
+ norm_cond1.conds.release ();
+ norm_cond2.conds.release ();
return is_subset ;
}
@@ -1511,23 +1509,23 @@ is_pred_expr_subset_of (use_pred_info_t expr1,
of that of PRED2. Returns false if it can not be proved so. */
static bool
-is_pred_chain_subset_of (VEC(use_pred_info_t, heap) *pred1,
- VEC(use_pred_info_t, heap) *pred2)
+is_pred_chain_subset_of (vec<use_pred_info_t> pred1,
+ vec<use_pred_info_t> pred2)
{
size_t np1, np2, i1, i2;
- np1 = VEC_length (use_pred_info_t, pred1);
- np2 = VEC_length (use_pred_info_t, pred2);
+ np1 = pred1.length ();
+ np2 = pred2.length ();
for (i2 = 0; i2 < np2; i2++)
{
bool found = false;
use_pred_info_t info2
- = VEC_index (use_pred_info_t, pred2, i2);
+ = pred2[i2];
for (i1 = 0; i1 < np1; i1++)
{
use_pred_info_t info1
- = VEC_index (use_pred_info_t, pred1, i1);
+ = pred1[i1];
if (is_pred_expr_subset_of (info1, info2))
{
found = true;
@@ -1550,8 +1548,8 @@ is_pred_chain_subset_of (VEC(use_pred_info_t, heap) *pred1,
In other words, the result is conservative. */
static bool
-is_included_in (VEC(use_pred_info_t, heap) *one_pred,
- VEC(use_pred_info_t, heap) **preds,
+is_included_in (vec<use_pred_info_t> one_pred,
+ vec<use_pred_info_t> *preds,
size_t n)
{
size_t i;
@@ -1579,13 +1577,13 @@ is_included_in (VEC(use_pred_info_t, heap) *one_pred,
emitted. */
static bool
-is_superset_of (VEC(use_pred_info_t, heap) **preds1,
+is_superset_of (vec<use_pred_info_t> *preds1,
size_t n1,
- VEC(use_pred_info_t, heap) **preds2,
+ vec<use_pred_info_t> *preds2,
size_t n2)
{
size_t i;
- VEC(use_pred_info_t, heap) *one_pred_chain;
+ vec<use_pred_info_t> one_pred_chain;
for (i = 0; i < n2; i++)
{
@@ -1605,18 +1603,16 @@ static int
pred_chain_length_cmp (const void *p1, const void *p2)
{
use_pred_info_t i1, i2;
- VEC(use_pred_info_t, heap) * const *chain1
- = (VEC(use_pred_info_t, heap) * const *)p1;
- VEC(use_pred_info_t, heap) * const *chain2
- = (VEC(use_pred_info_t, heap) * const *)p2;
+ vec<use_pred_info_t> const *chain1
+ = (vec<use_pred_info_t> const *)p1;
+ vec<use_pred_info_t> const *chain2
+ = (vec<use_pred_info_t> const *)p2;
- if (VEC_length (use_pred_info_t, *chain1)
- != VEC_length (use_pred_info_t, *chain2))
- return (VEC_length (use_pred_info_t, *chain1)
- - VEC_length (use_pred_info_t, *chain2));
+ if (chain1->length () != chain2->length ())
+ return (chain1->length () - chain2->length ());
- i1 = VEC_index (use_pred_info_t, *chain1, 0);
- i2 = VEC_index (use_pred_info_t, *chain2, 0);
+ i1 = (*chain1)[0];
+ i2 = (*chain2)[0];
/* Allow predicates with similar prefix come together. */
if (!i1->invert && i2->invert)
@@ -1633,11 +1629,11 @@ pred_chain_length_cmp (const void *p1, const void *p2)
the number of chains. Returns true if normalization happens. */
static bool
-normalize_preds (VEC(use_pred_info_t, heap) **preds, size_t *n)
+normalize_preds (vec<use_pred_info_t> *preds, size_t *n)
{
size_t i, j, ll;
- VEC(use_pred_info_t, heap) *pred_chain;
- VEC(use_pred_info_t, heap) *x = 0;
+ vec<use_pred_info_t> pred_chain;
+ vec<use_pred_info_t> x = vec<use_pred_info_t>();
use_pred_info_t xj = 0, nxj = 0;
if (*n < 2)
@@ -1646,21 +1642,21 @@ normalize_preds (VEC(use_pred_info_t, heap) **preds, size_t *n)
/* First sort the chains in ascending order of lengths. */
qsort (preds, *n, sizeof (void *), pred_chain_length_cmp);
pred_chain = preds[0];
- ll = VEC_length (use_pred_info_t, pred_chain);
+ ll = pred_chain.length ();
if (ll != 1)
{
if (ll == 2)
{
use_pred_info_t xx, yy, xx2, nyy;
- VEC(use_pred_info_t, heap) *pred_chain2 = preds[1];
- if (VEC_length (use_pred_info_t, pred_chain2) != 2)
+ vec<use_pred_info_t> pred_chain2 = preds[1];
+ if (pred_chain2.length () != 2)
return false;
/* See if simplification x AND y OR x AND !y is possible. */
- xx = VEC_index (use_pred_info_t, pred_chain, 0);
- yy = VEC_index (use_pred_info_t, pred_chain, 1);
- xx2 = VEC_index (use_pred_info_t, pred_chain2, 0);
- nyy = VEC_index (use_pred_info_t, pred_chain2, 1);
+ xx = pred_chain[0];
+ yy = pred_chain[1];
+ xx2 = pred_chain2[0];
+ nyy = pred_chain2[1];
if (gimple_cond_lhs (xx->cond) != gimple_cond_lhs (xx2->cond)
|| gimple_cond_rhs (xx->cond) != gimple_cond_rhs (xx2->cond)
|| gimple_cond_code (xx->cond) != gimple_cond_code (xx2->cond)
@@ -1676,36 +1672,34 @@ normalize_preds (VEC(use_pred_info_t, heap) **preds, size_t *n)
free (yy);
free (nyy);
free (xx2);
- VEC_free (use_pred_info_t, heap, pred_chain);
- VEC_free (use_pred_info_t, heap, pred_chain2);
- pred_chain = 0;
- VEC_safe_push (use_pred_info_t, heap, pred_chain, xx);
+ pred_chain.release ();
+ pred_chain2.release ();
+ pred_chain.safe_push (xx);
preds[0] = pred_chain;
for (i = 1; i < *n - 1; i++)
preds[i] = preds[i + 1];
- preds[*n - 1] = 0;
+ preds[*n - 1].create (0);
*n = *n - 1;
}
else
return false;
}
- VEC_safe_push (use_pred_info_t, heap, x,
- VEC_index (use_pred_info_t, pred_chain, 0));
+ x.safe_push (pred_chain[0]);
/* The loop extracts x1, x2, x3, etc from chains
x1 OR (!x1 AND x2) OR (!x1 AND !x2 AND x3) OR ... */
for (i = 1; i < *n; i++)
{
pred_chain = preds[i];
- if (VEC_length (use_pred_info_t, pred_chain) != i + 1)
+ if (pred_chain.length () != i + 1)
return false;
for (j = 0; j < i; j++)
{
- xj = VEC_index (use_pred_info_t, x, j);
- nxj = VEC_index (use_pred_info_t, pred_chain, j);
+ xj = x[j];
+ nxj = pred_chain[j];
/* Check if nxj is !xj */
if (gimple_cond_lhs (xj->cond) != gimple_cond_lhs (nxj->cond)
@@ -1715,32 +1709,29 @@ normalize_preds (VEC(use_pred_info_t, heap) **preds, size_t *n)
return false;
}
- VEC_safe_push (use_pred_info_t, heap, x,
- VEC_index (use_pred_info_t, pred_chain, i));
+ x.safe_push (pred_chain[i]);
}
/* Now normalize the pred chains using the extraced x1, x2, x3 etc. */
for (j = 0; j < *n; j++)
{
use_pred_info_t t;
- xj = VEC_index (use_pred_info_t, x, j);
+ xj = x[j];
t = XNEW (struct use_pred_info);
*t = *xj;
- VEC_replace (use_pred_info_t, x, j, t);
+ x[j] = t;
}
for (i = 0; i < *n; i++)
{
pred_chain = preds[i];
- for (j = 0; j < VEC_length (use_pred_info_t, pred_chain); j++)
- free (VEC_index (use_pred_info_t, pred_chain, j));
- VEC_free (use_pred_info_t, heap, pred_chain);
- pred_chain = 0;
+ for (j = 0; j < pred_chain.length (); j++)
+ free (pred_chain[j]);
+ pred_chain.release ();
/* A new chain. */
- VEC_safe_push (use_pred_info_t, heap, pred_chain,
- VEC_index (use_pred_info_t, x, i));
+ pred_chain.safe_push (x[i]);
preds[i] = pred_chain;
}
return true;
@@ -1769,8 +1760,8 @@ is_use_properly_guarded (gimple use_stmt,
struct pointer_set_t *visited_phis)
{
basic_block phi_bb;
- VEC(use_pred_info_t, heap) **preds = 0;
- VEC(use_pred_info_t, heap) **def_preds = 0;
+ vec<use_pred_info_t> *preds = 0;
+ vec<use_pred_info_t> *def_preds = 0;
size_t num_preds = 0, num_def_preds = 0;
bool has_valid_preds = false;
bool is_properly_guarded = false;
@@ -1840,7 +1831,7 @@ is_use_properly_guarded (gimple use_stmt,
static gimple
find_uninit_use (gimple phi, unsigned uninit_opnds,
- VEC(gimple, heap) **worklist,
+ vec<gimple> *worklist,
struct pointer_set_t *added_to_worklist)
{
tree phi_result;
@@ -1898,9 +1889,8 @@ find_uninit_use (gimple phi, unsigned uninit_opnds,
print_gimple_stmt (dump_file, use_stmt, 0, 0);
}
- VEC_safe_push (gimple, heap, *worklist, use_stmt);
- pointer_set_insert (possibly_undefined_names,
- phi_result);
+ worklist->safe_push (use_stmt);
+ pointer_set_insert (possibly_undefined_names, phi_result);
}
}
@@ -1916,7 +1906,7 @@ find_uninit_use (gimple phi, unsigned uninit_opnds,
a pointer set tracking if the new phi is added to the worklist or not. */
static void
-warn_uninitialized_phi (gimple phi, VEC(gimple, heap) **worklist,
+warn_uninitialized_phi (gimple phi, vec<gimple> *worklist,
struct pointer_set_t *added_to_worklist)
{
unsigned uninit_opnds;
@@ -1964,7 +1954,7 @@ execute_late_warn_uninitialized (void)
{
basic_block bb;
gimple_stmt_iterator gsi;
- VEC(gimple, heap) *worklist = 0;
+ vec<gimple> worklist = vec<gimple>();
struct pointer_set_t *added_to_worklist;
calculate_dominance_info (CDI_DOMINATORS);
@@ -1998,7 +1988,7 @@ execute_late_warn_uninitialized (void)
if (TREE_CODE (op) == SSA_NAME
&& ssa_undefined_value_p (op))
{
- VEC_safe_push (gimple, heap, worklist, phi);
+ worklist.safe_push (phi);
pointer_set_insert (added_to_worklist, phi);
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -2010,14 +2000,14 @@ execute_late_warn_uninitialized (void)
}
}
- while (VEC_length (gimple, worklist) != 0)
+ while (worklist.length () != 0)
{
gimple cur_phi = 0;
- cur_phi = VEC_pop (gimple, worklist);
+ cur_phi = worklist.pop ();
warn_uninitialized_phi (cur_phi, &worklist, added_to_worklist);
}
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
pointer_set_destroy (added_to_worklist);
pointer_set_destroy (possibly_undefined_names);
possibly_undefined_names = NULL;
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index fab0a3ba5b9..c0313c8c275 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -51,29 +51,25 @@ void
redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
{
void **slot;
- edge_var_map_vector old_head, head;
+ edge_var_map_vector *head;
edge_var_map new_node;
if (edge_var_maps == NULL)
edge_var_maps = pointer_map_create ();
slot = pointer_map_insert (edge_var_maps, e);
- old_head = head = (edge_var_map_vector) *slot;
+ head = (edge_var_map_vector *) *slot;
if (!head)
{
- head = VEC_alloc (edge_var_map, heap, 5);
+ head = new edge_var_map_vector;
+ head->create (5);
*slot = head;
}
new_node.def = def;
new_node.result = result;
new_node.locus = locus;
- VEC_safe_push (edge_var_map, heap, head, new_node);
- if (old_head != head)
- {
- /* The push did some reallocation. Update the pointer map. */
- *slot = head;
- }
+ head->safe_push (new_node);
}
@@ -83,7 +79,7 @@ void
redirect_edge_var_map_clear (edge e)
{
void **slot;
- edge_var_map_vector head;
+ edge_var_map_vector *head;
if (!edge_var_maps)
return;
@@ -92,8 +88,8 @@ redirect_edge_var_map_clear (edge e)
if (slot)
{
- head = (edge_var_map_vector) *slot;
- VEC_free (edge_var_map, heap, head);
+ head = (edge_var_map_vector *) *slot;
+ delete head;
*slot = NULL;
}
}
@@ -109,7 +105,7 @@ void
redirect_edge_var_map_dup (edge newe, edge olde)
{
void **new_slot, **old_slot;
- edge_var_map_vector head;
+ edge_var_map_vector *head;
if (!edge_var_maps)
return;
@@ -118,19 +114,21 @@ redirect_edge_var_map_dup (edge newe, edge olde)
old_slot = pointer_map_contains (edge_var_maps, olde);
if (!old_slot)
return;
- head = (edge_var_map_vector) *old_slot;
+ head = (edge_var_map_vector *) *old_slot;
+ edge_var_map_vector *new_head = new edge_var_map_vector;
if (head)
- *new_slot = VEC_copy (edge_var_map, heap, head);
+ *new_head = head->copy ();
else
- *new_slot = VEC_alloc (edge_var_map, heap, 5);
+ new_head->create (5);
+ *new_slot = new_head;
}
/* Return the variable mappings for a given edge. If there is none, return
NULL. */
-edge_var_map_vector
+edge_var_map_vector *
redirect_edge_var_map_vector (edge e)
{
void **slot;
@@ -143,7 +141,7 @@ redirect_edge_var_map_vector (edge e)
if (!slot)
return NULL;
- return (edge_var_map_vector) *slot;
+ return (edge_var_map_vector *) *slot;
}
/* Used by redirect_edge_var_map_destroy to free all memory. */
@@ -153,8 +151,8 @@ free_var_map_entry (const void *key ATTRIBUTE_UNUSED,
void **value,
void *data ATTRIBUTE_UNUSED)
{
- edge_var_map_vector head = (edge_var_map_vector) *value;
- VEC_free (edge_var_map, heap, head);
+ edge_var_map_vector *head = (edge_var_map_vector *) *value;
+ delete head;
return true;
}
@@ -214,7 +212,7 @@ void
flush_pending_stmts (edge e)
{
gimple phi;
- edge_var_map_vector v;
+ edge_var_map_vector *v;
edge_var_map *vm;
int i;
gimple_stmt_iterator gsi;
@@ -224,7 +222,7 @@ flush_pending_stmts (edge e)
return;
for (gsi = gsi_start_phis (e->dest), i = 0;
- !gsi_end_p (gsi) && VEC_iterate (edge_var_map, v, i, vm);
+ !gsi_end_p (gsi) && v->iterate (i, &vm);
gsi_next (&gsi), i++)
{
tree def;
@@ -2043,7 +2041,7 @@ execute_update_addresses_taken (void)
maybe_optimize_var (var, addresses_taken, not_reg_needs,
suitable_for_renaming);
- FOR_EACH_VEC_ELT (tree, cfun->local_decls, i, var)
+ FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
maybe_optimize_var (var, addresses_taken, not_reg_needs,
suitable_for_renaming);
diff --git a/gcc/tree-ssanames.c b/gcc/tree-ssanames.c
index 6eb645130ae..8738de31b0b 100644
--- a/gcc/tree-ssanames.c
+++ b/gcc/tree-ssanames.c
@@ -73,16 +73,16 @@ init_ssanames (struct function *fn, int size)
if (size < 50)
size = 50;
- SSANAMES (fn) = VEC_alloc (tree, gc, size);
+ vec_alloc (SSANAMES (fn), size);
/* Version 0 is special, so reserve the first slot in the table. Though
currently unused, we may use version 0 in alias analysis as part of
the heuristics used to group aliases when the alias sets are too
large.
- We use VEC_quick_push here because we know that SSA_NAMES has at
+ We use vec::quick_push here because we know that SSA_NAMES has at
least 50 elements reserved in it. */
- VEC_quick_push (tree, SSANAMES (fn), NULL_TREE);
+ SSANAMES (fn)->quick_push (NULL_TREE);
FREE_SSANAMES (fn) = NULL;
fn->gimple_df->ssa_renaming_needed = 0;
@@ -94,8 +94,8 @@ init_ssanames (struct function *fn, int size)
void
fini_ssanames (void)
{
- VEC_free (tree, gc, SSANAMES (cfun));
- VEC_free (tree, gc, FREE_SSANAMES (cfun));
+ vec_free (SSANAMES (cfun));
+ vec_free (FREE_SSANAMES (cfun));
}
/* Dump some simple statistics regarding the re-use of SSA_NAME nodes. */
@@ -124,22 +124,22 @@ make_ssa_name_fn (struct function *fn, tree var, gimple stmt)
|| (TYPE_P (var) && is_gimple_reg_type (var)));
/* If our free list has an element, then use it. */
- if (!VEC_empty (tree, FREE_SSANAMES (fn)))
+ if (!vec_safe_is_empty (FREE_SSANAMES (fn)))
{
- t = VEC_pop (tree, FREE_SSANAMES (fn));
+ t = FREE_SSANAMES (fn)->pop ();
if (GATHER_STATISTICS)
ssa_name_nodes_reused++;
/* The node was cleared out when we put it on the free list, so
there is no need to do so again here. */
gcc_assert (ssa_name (SSA_NAME_VERSION (t)) == NULL);
- VEC_replace (tree, SSANAMES (fn), SSA_NAME_VERSION (t), t);
+ (*SSANAMES (fn))[SSA_NAME_VERSION (t)] = t;
}
else
{
t = make_node (SSA_NAME);
- SSA_NAME_VERSION (t) = VEC_length (tree, SSANAMES (fn));
- VEC_safe_push (tree, gc, SSANAMES (fn), t);
+ SSA_NAME_VERSION (t) = SSANAMES (fn)->length ();
+ vec_safe_push (SSANAMES (fn), t);
if (GATHER_STATISTICS)
ssa_name_nodes_created++;
}
@@ -217,8 +217,7 @@ release_ssa_name (tree var)
while (imm->next != imm)
delink_imm_use (imm->next);
- VEC_replace (tree, SSANAMES (cfun),
- SSA_NAME_VERSION (var), NULL_TREE);
+ (*SSANAMES (cfun))[SSA_NAME_VERSION (var)] = NULL_TREE;
memset (var, 0, tree_size (var));
imm->prev = imm;
@@ -240,7 +239,7 @@ release_ssa_name (tree var)
SSA_NAME_IN_FREE_LIST (var) = 1;
/* And finally put it on the free list. */
- VEC_safe_push (tree, gc, FREE_SSANAMES (cfun), var);
+ vec_safe_push (FREE_SSANAMES (cfun), var);
}
}
@@ -415,15 +414,14 @@ static unsigned int
release_dead_ssa_names (void)
{
unsigned i, j;
- int n = VEC_length (tree, FREE_SSANAMES (cfun));
+ int n = vec_safe_length (FREE_SSANAMES (cfun));
/* Now release the freelist. */
- VEC_free (tree, gc, FREE_SSANAMES (cfun));
- FREE_SSANAMES (cfun) = NULL;
+ vec_free (FREE_SSANAMES (cfun));
/* And compact the SSA number space. We make sure to not change the
relative order of SSA versions. */
- for (i = 1, j = 1; i < VEC_length (tree, cfun->gimple_df->ssa_names); ++i)
+ for (i = 1, j = 1; i < cfun->gimple_df->ssa_names->length (); ++i)
{
tree name = ssa_name (i);
if (name)
@@ -431,12 +429,12 @@ release_dead_ssa_names (void)
if (i != j)
{
SSA_NAME_VERSION (name) = j;
- VEC_replace (tree, cfun->gimple_df->ssa_names, j, name);
+ (*cfun->gimple_df->ssa_names)[j] = name;
}
j++;
}
}
- VEC_truncate (tree, cfun->gimple_df->ssa_names, j);
+ cfun->gimple_df->ssa_names->truncate (j);
statistics_counter_event (cfun, "SSA names released", n);
statistics_counter_event (cfun, "SSA name holes removed", i - j);
diff --git a/gcc/tree-stdarg.c b/gcc/tree-stdarg.c
index 87818026cb6..8753f4f93da 100644
--- a/gcc/tree-stdarg.c
+++ b/gcc/tree-stdarg.c
@@ -47,7 +47,7 @@ along with GCC; see the file COPYING3. If not see
static bool
reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
{
- VEC (edge, heap) *stack = NULL;
+ vec<edge> stack = vec<edge>();
edge e;
edge_iterator ei;
sbitmap visited;
@@ -64,13 +64,13 @@ reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
ret = true;
FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
- VEC_safe_push (edge, heap, stack, e);
+ stack.safe_push (e);
- while (! VEC_empty (edge, stack))
+ while (! stack.is_empty ())
{
basic_block src;
- e = VEC_pop (edge, stack);
+ e = stack.pop ();
src = e->src;
if (e->flags & EDGE_COMPLEX)
@@ -95,11 +95,11 @@ reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
{
bitmap_set_bit (visited, src->index);
FOR_EACH_EDGE (e, ei, src->preds)
- VEC_safe_push (edge, heap, stack, e);
+ stack.safe_push (e);
}
}
- VEC_free (edge, heap, stack);
+ stack.release ();
sbitmap_free (visited);
return ret;
}
diff --git a/gcc/tree-streamer-in.c b/gcc/tree-streamer-in.c
index 57c86265767..62649d18eec 100644
--- a/gcc/tree-streamer-in.c
+++ b/gcc/tree-streamer-in.c
@@ -380,7 +380,7 @@ unpack_ts_translation_unit_decl_value_fields (struct data_in *data_in,
struct bitpack_d *bp, tree expr)
{
TRANSLATION_UNIT_LANGUAGE (expr) = xstrdup (bp_unpack_string (data_in, bp));
- VEC_safe_push (tree, gc, all_translation_units, expr);
+ vec_safe_push (all_translation_units, expr);
}
/* Unpack a TS_TARGET_OPTION tree from BP into EXPR. */
@@ -473,14 +473,14 @@ unpack_value_fields (struct data_in *data_in, struct bitpack_d *bp, tree expr)
{
unsigned HOST_WIDE_INT length = bp_unpack_var_len_unsigned (bp);
if (length > 0)
- VEC_safe_grow (tree, gc, BINFO_BASE_ACCESSES (expr), length);
+ vec_safe_grow (BINFO_BASE_ACCESSES (expr), length);
}
if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
{
unsigned HOST_WIDE_INT length = bp_unpack_var_len_unsigned (bp);
if (length > 0)
- VEC_safe_grow (constructor_elt, gc, CONSTRUCTOR_ELTS (expr), length);
+ vec_safe_grow (CONSTRUCTOR_ELTS (expr), length);
}
}
@@ -907,14 +907,14 @@ lto_input_ts_binfo_tree_pointers (struct lto_input_block *ib,
/* Note that the number of slots in EXPR was read in
streamer_alloc_tree when instantiating EXPR. However, the
- vector is empty so we cannot rely on VEC_length to know how many
+ vector is empty so we cannot rely on vec::length to know how many
elements to read. So, this list is emitted as a 0-terminated
list on the writer side. */
do
{
t = stream_read_tree (ib, data_in);
if (t)
- VEC_quick_push (tree, BINFO_BASE_BINFOS (expr), t);
+ BINFO_BASE_BINFOS (expr)->quick_push (t);
}
while (t);
@@ -924,10 +924,10 @@ lto_input_ts_binfo_tree_pointers (struct lto_input_block *ib,
/* The vector of BINFO_BASE_ACCESSES is pre-allocated during
unpacking the bitfield section. */
- for (i = 0; i < VEC_length (tree, BINFO_BASE_ACCESSES (expr)); i++)
+ for (i = 0; i < vec_safe_length (BINFO_BASE_ACCESSES (expr)); i++)
{
tree a = stream_read_tree (ib, data_in);
- VEC_replace (tree, BINFO_BASE_ACCESSES (expr), i, a);
+ (*BINFO_BASE_ACCESSES (expr))[i] = a;
}
BINFO_INHERITANCE_CHAIN (expr) = stream_read_tree (ib, data_in);
@@ -951,7 +951,7 @@ lto_input_ts_constructor_tree_pointers (struct lto_input_block *ib,
constructor_elt e;
e.index = stream_read_tree (ib, data_in);
e.value = stream_read_tree (ib, data_in);
- VEC_replace (constructor_elt, CONSTRUCTOR_ELTS (expr), i, e);
+ (*CONSTRUCTOR_ELTS (expr))[i] = e;
}
}
diff --git a/gcc/tree-streamer-out.c b/gcc/tree-streamer-out.c
index 1f0eb55ec44..f1470c1736f 100644
--- a/gcc/tree-streamer-out.c
+++ b/gcc/tree-streamer-out.c
@@ -427,7 +427,7 @@ streamer_pack_tree_bitfields (struct output_block *ob,
pack_ts_optimization (bp, expr);
if (CODE_CONTAINS_STRUCT (code, TS_BINFO))
- bp_pack_var_len_unsigned (bp, VEC_length (tree, BINFO_BASE_ACCESSES (expr)));
+ bp_pack_var_len_unsigned (bp, vec_safe_length (BINFO_BASE_ACCESSES (expr)));
if (CODE_CONTAINS_STRUCT (code, TS_CONSTRUCTOR))
bp_pack_var_len_unsigned (bp, CONSTRUCTOR_NELTS (expr));
@@ -795,7 +795,7 @@ write_ts_binfo_tree_pointers (struct output_block *ob, tree expr, bool ref_p)
/* Note that the number of BINFO slots has already been emitted in
EXPR's header (see streamer_write_tree_header) because this length
is needed to build the empty BINFO node on the reader side. */
- FOR_EACH_VEC_ELT (tree, BINFO_BASE_BINFOS (expr), i, t)
+ FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (expr), i, t)
stream_write_tree (ob, t, ref_p);
stream_write_tree (ob, NULL_TREE, false);
@@ -805,7 +805,7 @@ write_ts_binfo_tree_pointers (struct output_block *ob, tree expr, bool ref_p)
/* The number of BINFO_BASE_ACCESSES has already been emitted in
EXPR's bitfield section. */
- FOR_EACH_VEC_ELT (tree, BINFO_BASE_ACCESSES (expr), i, t)
+ FOR_EACH_VEC_SAFE_ELT (BINFO_BASE_ACCESSES (expr), i, t)
stream_write_tree (ob, t, ref_p);
stream_write_tree (ob, BINFO_INHERITANCE_CHAIN (expr), ref_p);
diff --git a/gcc/tree-streamer.c b/gcc/tree-streamer.c
index 1f4fe96fa03..a6b73161085 100644
--- a/gcc/tree-streamer.c
+++ b/gcc/tree-streamer.c
@@ -96,12 +96,12 @@ streamer_tree_cache_add_to_node_array (struct streamer_tree_cache_d *cache,
{
/* Make sure we're either replacing an old element or
appending consecutively. */
- gcc_assert (ix <= VEC_length (tree, cache->nodes));
+ gcc_assert (ix <= cache->nodes.length ());
- if (ix == VEC_length (tree, cache->nodes))
- VEC_safe_push (tree, heap, cache->nodes, t);
+ if (ix == cache->nodes.length ())
+ cache->nodes.safe_push (t);
else
- VEC_replace (tree, cache->nodes, ix, t);
+ cache->nodes[ix] = t;
}
@@ -131,7 +131,7 @@ streamer_tree_cache_insert_1 (struct streamer_tree_cache_d *cache,
{
/* Determine the next slot to use in the cache. */
if (insert_at_next_slot_p)
- ix = VEC_length (tree, cache->nodes);
+ ix = cache->nodes.length ();
else
ix = *ix_p;
*slot = (void *)(size_t) (ix + 1);
@@ -195,7 +195,7 @@ streamer_tree_cache_insert_at (struct streamer_tree_cache_d *cache,
void
streamer_tree_cache_append (struct streamer_tree_cache_d *cache, tree t)
{
- unsigned ix = VEC_length (tree, cache->nodes);
+ unsigned ix = cache->nodes.length ();
streamer_tree_cache_insert_1 (cache, t, &ix, false);
}
@@ -320,6 +320,6 @@ streamer_tree_cache_delete (struct streamer_tree_cache_d *c)
return;
pointer_map_destroy (c->node_map);
- VEC_free (tree, heap, c->nodes);
+ c->nodes.release ();
free (c);
}
diff --git a/gcc/tree-streamer.h b/gcc/tree-streamer.h
index 778712b3e20..c687f032821 100644
--- a/gcc/tree-streamer.h
+++ b/gcc/tree-streamer.h
@@ -49,7 +49,7 @@ struct streamer_tree_cache_d
struct pointer_map_t *node_map;
/* The nodes pickled so far. */
- VEC(tree,heap) *nodes;
+ vec<tree> nodes;
};
/* Return true if tree node EXPR should be streamed as a builtin. For
@@ -103,7 +103,7 @@ void streamer_tree_cache_delete (struct streamer_tree_cache_d *);
static inline tree
streamer_tree_cache_get (struct streamer_tree_cache_d *cache, unsigned ix)
{
- return VEC_index (tree, cache->nodes, ix);
+ return cache->nodes[ix];
}
diff --git a/gcc/tree-switch-conversion.c b/gcc/tree-switch-conversion.c
index a35df7c7196..611b6b93d31 100644
--- a/gcc/tree-switch-conversion.c
+++ b/gcc/tree-switch-conversion.c
@@ -301,7 +301,7 @@ emit_case_bit_tests (gimple swtch, tree index_expr,
edge default_edge;
bool update_dom = dom_info_available_p (CDI_DOMINATORS);
- VEC (basic_block, heap) *bbs_to_fix_dom = NULL;
+ vec<basic_block> bbs_to_fix_dom = vec<basic_block>();
tree index_type = TREE_TYPE (index_expr);
tree unsigned_index_type = unsigned_type_for (index_type);
@@ -374,10 +374,10 @@ emit_case_bit_tests (gimple swtch, tree index_expr,
if (update_dom)
{
- bbs_to_fix_dom = VEC_alloc (basic_block, heap, 10);
- VEC_quick_push (basic_block, bbs_to_fix_dom, switch_bb);
- VEC_quick_push (basic_block, bbs_to_fix_dom, default_bb);
- VEC_quick_push (basic_block, bbs_to_fix_dom, new_default_bb);
+ bbs_to_fix_dom.create (10);
+ bbs_to_fix_dom.quick_push (switch_bb);
+ bbs_to_fix_dom.quick_push (default_bb);
+ bbs_to_fix_dom.quick_push (new_default_bb);
}
/* Now build the test-and-branch code. */
@@ -400,7 +400,7 @@ emit_case_bit_tests (gimple swtch, tree index_expr,
tmp = fold_build2 (GT_EXPR, boolean_type_node, idx, range);
new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, default_edge, update_dom);
if (update_dom)
- VEC_quick_push (basic_block, bbs_to_fix_dom, new_bb);
+ bbs_to_fix_dom.quick_push (new_bb);
gcc_assert (gimple_bb (swtch) == new_bb);
gsi = gsi_last_bb (new_bb);
@@ -408,19 +408,19 @@ emit_case_bit_tests (gimple swtch, tree index_expr,
of NEW_BB, are still immediately dominated by SWITCH_BB. Make it so. */
if (update_dom)
{
- VEC (basic_block, heap) *dom_bbs;
+ vec<basic_block> dom_bbs;
basic_block dom_son;
dom_bbs = get_dominated_by (CDI_DOMINATORS, new_bb);
- FOR_EACH_VEC_ELT (basic_block, dom_bbs, i, dom_son)
+ FOR_EACH_VEC_ELT (dom_bbs, i, dom_son)
{
edge e = find_edge (new_bb, dom_son);
if (e && single_pred_p (e->dest))
continue;
set_immediate_dominator (CDI_DOMINATORS, dom_son, switch_bb);
- VEC_safe_push (basic_block, heap, bbs_to_fix_dom, dom_son);
+ bbs_to_fix_dom.safe_push (dom_son);
}
- VEC_free (basic_block, heap, dom_bbs);
+ dom_bbs.release ();
}
/* csui = (1 << (word_mode) idx) */
@@ -447,7 +447,7 @@ emit_case_bit_tests (gimple swtch, tree index_expr,
new_bb = hoist_edge_and_branch_if_true (&gsi, tmp, test[k].target_edge,
update_dom);
if (update_dom)
- VEC_safe_push (basic_block, heap, bbs_to_fix_dom, new_bb);
+ bbs_to_fix_dom.safe_push (new_bb);
gcc_assert (gimple_bb (swtch) == new_bb);
gsi = gsi_last_bb (new_bb);
}
@@ -465,7 +465,7 @@ emit_case_bit_tests (gimple swtch, tree index_expr,
{
/* Fix up the dominator tree. */
iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
- VEC_free (basic_block, heap, bbs_to_fix_dom);
+ bbs_to_fix_dom.release ();
}
}
@@ -571,7 +571,7 @@ struct switch_conv_info
tree *default_values;
/* Constructors of new static arrays. */
- VEC (constructor_elt, gc) **constructors;
+ vec<constructor_elt, va_gc> **constructors;
/* Array of ssa names that are initialized with a value from a new static
array. */
@@ -792,12 +792,14 @@ create_temp_arrays (struct switch_conv_info *info)
int i;
info->default_values = XCNEWVEC (tree, info->phi_count * 3);
- info->constructors = XCNEWVEC (VEC (constructor_elt, gc) *, info->phi_count);
+ /* ??? Macros do not support multi argument templates in their
+ argument list. We create a typedef to work around that problem. */
+ typedef vec<constructor_elt, va_gc> *vec_constructor_elt_gc;
+ info->constructors = XCNEWVEC (vec_constructor_elt_gc, info->phi_count);
info->target_inbound_names = info->default_values + info->phi_count;
info->target_outbound_names = info->target_inbound_names + info->phi_count;
for (i = 0; i < info->phi_count; i++)
- info->constructors[i]
- = VEC_alloc (constructor_elt, gc, tree_low_cst (info->range_size, 1) + 1);
+ vec_alloc (info->constructors[i], tree_low_cst (info->range_size, 1) + 1);
}
/* Free the arrays created by create_temp_arrays(). The vectors that are
@@ -872,7 +874,7 @@ build_constructors (gimple swtch, struct switch_conv_info *info)
elt.index = int_const_binop (MINUS_EXPR, pos, info->range_min);
elt.value = info->default_values[k];
- VEC_quick_push (constructor_elt, info->constructors[k], elt);
+ info->constructors[k]->quick_push (elt);
}
pos = int_const_binop (PLUS_EXPR, pos, integer_one_node);
@@ -898,7 +900,7 @@ build_constructors (gimple swtch, struct switch_conv_info *info)
elt.index = int_const_binop (MINUS_EXPR, pos, info->range_min);
elt.value = val;
- VEC_quick_push (constructor_elt, info->constructors[j], elt);
+ info->constructors[j]->quick_push (elt);
pos = int_const_binop (PLUS_EXPR, pos, integer_one_node);
} while (!tree_int_cst_lt (high, pos)
@@ -913,13 +915,13 @@ build_constructors (gimple swtch, struct switch_conv_info *info)
vectors. */
static tree
-constructor_contains_same_values_p (VEC (constructor_elt, gc) *vec)
+constructor_contains_same_values_p (vec<constructor_elt, va_gc> *vec)
{
unsigned int i;
tree prev = NULL_TREE;
constructor_elt *elt;
- FOR_EACH_VEC_ELT (constructor_elt, vec, i, elt)
+ FOR_EACH_VEC_SAFE_ELT (vec, i, elt)
{
if (!prev)
prev = elt->value;
@@ -937,7 +939,7 @@ static tree
array_value_type (gimple swtch, tree type, int num,
struct switch_conv_info *info)
{
- unsigned int i, len = VEC_length (constructor_elt, info->constructors[num]);
+ unsigned int i, len = vec_safe_length (info->constructors[num]);
constructor_elt *elt;
enum machine_mode mode;
int sign = 0;
@@ -953,7 +955,7 @@ array_value_type (gimple swtch, tree type, int num,
if (len < (optimize_bb_for_size_p (gimple_bb (swtch)) ? 2 : 32))
return type;
- FOR_EACH_VEC_ELT (constructor_elt, info->constructors[num], i, elt)
+ FOR_EACH_VEC_SAFE_ELT (info->constructors[num], i, elt)
{
double_int cst;
@@ -1039,7 +1041,7 @@ build_one_array (gimple swtch, int num, tree arr_index_type, gimple phi,
unsigned int i;
constructor_elt *elt;
- FOR_EACH_VEC_ELT (constructor_elt, info->constructors[num], i, elt)
+ FOR_EACH_VEC_SAFE_ELT (info->constructors[num], i, elt)
elt->value = fold_convert (value_type, elt->value);
}
ctor = build_constructor (array_type, info->constructors[num]);
@@ -1292,7 +1294,7 @@ gen_inbound_check (gimple swtch, struct switch_conv_info *info)
/* Fix the dominator tree, if it is available. */
if (dom_info_available_p (CDI_DOMINATORS))
{
- VEC (basic_block, heap) *bbs_to_fix_dom;
+ vec<basic_block> bbs_to_fix_dom;
set_immediate_dominator (CDI_DOMINATORS, bb1, bb0);
set_immediate_dominator (CDI_DOMINATORS, bb2, bb0);
@@ -1300,14 +1302,14 @@ gen_inbound_check (gimple swtch, struct switch_conv_info *info)
/* If bbD was the immediate dominator ... */
set_immediate_dominator (CDI_DOMINATORS, bbf, bb0);
- bbs_to_fix_dom = VEC_alloc (basic_block, heap, 4);
- VEC_quick_push (basic_block, bbs_to_fix_dom, bb0);
- VEC_quick_push (basic_block, bbs_to_fix_dom, bb1);
- VEC_quick_push (basic_block, bbs_to_fix_dom, bb2);
- VEC_quick_push (basic_block, bbs_to_fix_dom, bbf);
+ bbs_to_fix_dom.create (4);
+ bbs_to_fix_dom.quick_push (bb0);
+ bbs_to_fix_dom.quick_push (bb1);
+ bbs_to_fix_dom.quick_push (bb2);
+ bbs_to_fix_dom.quick_push (bbf);
iterate_fix_dominators (CDI_DOMINATORS, bbs_to_fix_dom, true);
- VEC_free (basic_block, heap, bbs_to_fix_dom);
+ bbs_to_fix_dom.release ();
}
}
diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c
index 445f2cbe712..49dd3c0587f 100644
--- a/gcc/tree-vect-data-refs.c
+++ b/gcc/tree-vect-data-refs.c
@@ -562,7 +562,7 @@ vect_mark_for_runtime_alias_test (ddr_p ddr, loop_vec_info loop_vinfo)
return false;
}
- VEC_safe_push (ddr_p, heap, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo), ddr);
+ LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo).safe_push (ddr);
return true;
}
@@ -700,7 +700,7 @@ vect_analyze_data_ref_dependence (struct data_dependence_relation *ddr,
}
loop_depth = index_in_loop_nest (loop->num, DDR_LOOP_NEST (ddr));
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIST_VECTS (ddr), i, dist_v)
+ FOR_EACH_VEC_ELT (DDR_DIST_VECTS (ddr), i, dist_v)
{
int dist = dist_v[loop_depth];
@@ -792,7 +792,7 @@ vect_analyze_data_ref_dependences (loop_vec_info loop_vinfo,
bb_vec_info bb_vinfo, int *max_vf)
{
unsigned int i;
- VEC (ddr_p, heap) *ddrs = NULL;
+ vec<ddr_p> ddrs = vec<ddr_p>();
struct data_dependence_relation *ddr;
if (dump_enabled_p ())
@@ -803,7 +803,7 @@ vect_analyze_data_ref_dependences (loop_vec_info loop_vinfo,
else
ddrs = BB_VINFO_DDRS (bb_vinfo);
- FOR_EACH_VEC_ELT (ddr_p, ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (ddrs, i, ddr)
if (vect_analyze_data_ref_dependence (ddr, loop_vinfo, max_vf))
return false;
@@ -1015,7 +1015,7 @@ static bool
vect_compute_data_refs_alignment (loop_vec_info loop_vinfo,
bb_vec_info bb_vinfo)
{
- VEC (data_reference_p, heap) *datarefs;
+ vec<data_reference_p> datarefs;
struct data_reference *dr;
unsigned int i;
@@ -1024,7 +1024,7 @@ vect_compute_data_refs_alignment (loop_vec_info loop_vinfo,
else
datarefs = BB_VINFO_DATAREFS (bb_vinfo);
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
if (STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr)))
&& !vect_compute_data_ref_alignment (dr))
{
@@ -1055,7 +1055,7 @@ vect_update_misalignment_for_peel (struct data_reference *dr,
struct data_reference *dr_peel, int npeel)
{
unsigned int i;
- VEC(dr_p,heap) *same_align_drs;
+ vec<dr_p> same_align_drs;
struct data_reference *current_dr;
int dr_size = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (DR_REF (dr))));
int dr_peel_size = GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (DR_REF (dr_peel))));
@@ -1073,7 +1073,7 @@ vect_update_misalignment_for_peel (struct data_reference *dr,
are aligned in the vector loop. */
same_align_drs
= STMT_VINFO_SAME_ALIGN_REFS (vinfo_for_stmt (DR_STMT (dr_peel)));
- FOR_EACH_VEC_ELT (dr_p, same_align_drs, i, current_dr)
+ FOR_EACH_VEC_ELT (same_align_drs, i, current_dr)
{
if (current_dr != dr)
continue;
@@ -1109,7 +1109,7 @@ vect_update_misalignment_for_peel (struct data_reference *dr,
bool
vect_verify_datarefs_alignment (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
{
- VEC (data_reference_p, heap) *datarefs;
+ vec<data_reference_p> datarefs;
struct data_reference *dr;
enum dr_alignment_support supportable_dr_alignment;
unsigned int i;
@@ -1119,7 +1119,7 @@ vect_verify_datarefs_alignment (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
else
datarefs = BB_VINFO_DATAREFS (bb_vinfo);
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
gimple stmt = DR_STMT (dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
@@ -1362,16 +1362,16 @@ vect_peeling_hash_get_lowest_cost (void **slot, void *data)
gimple stmt = DR_STMT (elem->dr);
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
- VEC (data_reference_p, heap) *datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
+ vec<data_reference_p> datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
struct data_reference *dr;
stmt_vector_for_cost prologue_cost_vec, body_cost_vec, epilogue_cost_vec;
int single_iter_cost;
- prologue_cost_vec = VEC_alloc (stmt_info_for_cost, heap, 2);
- body_cost_vec = VEC_alloc (stmt_info_for_cost, heap, 2);
- epilogue_cost_vec = VEC_alloc (stmt_info_for_cost, heap, 2);
+ prologue_cost_vec.create (2);
+ body_cost_vec.create (2);
+ epilogue_cost_vec.create (2);
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
stmt = DR_STMT (dr);
stmt_info = vinfo_for_stmt (stmt);
@@ -1398,21 +1398,21 @@ vect_peeling_hash_get_lowest_cost (void **slot, void *data)
These costs depend only on the scalar iteration cost, the
number of peeling iterations finally chosen, and the number of
misaligned statements. So discard the information found here. */
- VEC_free (stmt_info_for_cost, heap, prologue_cost_vec);
- VEC_free (stmt_info_for_cost, heap, epilogue_cost_vec);
+ prologue_cost_vec.release ();
+ epilogue_cost_vec.release ();
if (inside_cost < min->inside_cost
|| (inside_cost == min->inside_cost && outside_cost < min->outside_cost))
{
min->inside_cost = inside_cost;
min->outside_cost = outside_cost;
- VEC_free (stmt_info_for_cost, heap, min->body_cost_vec);
+ min->body_cost_vec.release ();
min->body_cost_vec = body_cost_vec;
min->peel_info.dr = elem->dr;
min->peel_info.npeel = elem->npeel;
}
else
- VEC_free (stmt_info_for_cost, heap, body_cost_vec);
+ body_cost_vec.release ();
return 1;
}
@@ -1430,7 +1430,7 @@ vect_peeling_hash_choose_best_peeling (loop_vec_info loop_vinfo,
struct _vect_peel_extended_info res;
res.peel_info.dr = NULL;
- res.body_cost_vec = NULL;
+ res.body_cost_vec = stmt_vector_for_cost();
if (flag_vect_cost_model)
{
@@ -1546,7 +1546,7 @@ vect_peeling_hash_choose_best_peeling (loop_vec_info loop_vinfo,
bool
vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
{
- VEC (data_reference_p, heap) *datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
+ vec<data_reference_p> datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
enum dr_alignment_support supportable_dr_alignment;
struct data_reference *dr0 = NULL, *first_store = NULL;
@@ -1564,7 +1564,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
unsigned possible_npeel_number = 1;
tree vectype;
unsigned int nelements, mis, same_align_drs_max = 0;
- stmt_vector_for_cost body_cost_vec = NULL;
+ stmt_vector_for_cost body_cost_vec = stmt_vector_for_cost();
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
@@ -1602,7 +1602,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
- The cost of peeling (the extra runtime checks, the increase
in code size). */
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
stmt = DR_STMT (dr);
stmt_info = vinfo_for_stmt (stmt);
@@ -1707,12 +1707,12 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
stores over load. */
if (all_misalignments_unknown)
{
- if (same_align_drs_max < VEC_length (dr_p,
- STMT_VINFO_SAME_ALIGN_REFS (stmt_info))
+ if (same_align_drs_max
+ < STMT_VINFO_SAME_ALIGN_REFS (stmt_info).length ()
|| !dr0)
{
- same_align_drs_max = VEC_length (dr_p,
- STMT_VINFO_SAME_ALIGN_REFS (stmt_info));
+ same_align_drs_max
+ = STMT_VINFO_SAME_ALIGN_REFS (stmt_info).length ();
dr0 = dr;
}
@@ -1770,22 +1770,23 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
unsigned int store_inside_cost = 0, store_outside_cost = 0;
unsigned int load_inside_penalty = 0, load_outside_penalty = 0;
unsigned int store_inside_penalty = 0, store_outside_penalty = 0;
- stmt_vector_for_cost dummy = VEC_alloc (stmt_info_for_cost, heap, 2);
+ stmt_vector_for_cost dummy;
+ dummy.create (2);
vect_get_data_access_cost (dr0, &load_inside_cost, &load_outside_cost,
&dummy);
vect_get_data_access_cost (first_store, &store_inside_cost,
&store_outside_cost, &dummy);
- VEC_free (stmt_info_for_cost, heap, dummy);
+ dummy.release ();
/* Calculate the penalty for leaving FIRST_STORE unaligned (by
aligning the load DR0). */
load_inside_penalty = store_inside_cost;
load_outside_penalty = store_outside_cost;
- for (i = 0; VEC_iterate (dr_p, STMT_VINFO_SAME_ALIGN_REFS
- (vinfo_for_stmt (DR_STMT (first_store))),
- i, dr);
+ for (i = 0;
+ STMT_VINFO_SAME_ALIGN_REFS (vinfo_for_stmt (
+ DR_STMT (first_store))).iterate (i, &dr);
i++)
if (DR_IS_READ (dr))
{
@@ -1802,9 +1803,9 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
aligning the FIRST_STORE). */
store_inside_penalty = load_inside_cost;
store_outside_penalty = load_outside_cost;
- for (i = 0; VEC_iterate (dr_p, STMT_VINFO_SAME_ALIGN_REFS
- (vinfo_for_stmt (DR_STMT (dr0))),
- i, dr);
+ for (i = 0;
+ STMT_VINFO_SAME_ALIGN_REFS (vinfo_for_stmt (
+ DR_STMT (dr0))).iterate (i, &dr);
i++)
if (DR_IS_READ (dr))
{
@@ -1825,8 +1826,9 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
/* In case there are only loads with different unknown misalignments, use
peeling only if it may help to align other accesses in the loop. */
- if (!first_store && !VEC_length (dr_p, STMT_VINFO_SAME_ALIGN_REFS
- (vinfo_for_stmt (DR_STMT (dr0))))
+ if (!first_store
+ && !STMT_VINFO_SAME_ALIGN_REFS (
+ vinfo_for_stmt (DR_STMT (dr0))).length ()
&& vect_supportable_dr_alignment (dr0, false)
!= dr_unaligned_supported)
do_peeling = false;
@@ -1884,7 +1886,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
}
/* Ensure that all data refs can be vectorized after the peel. */
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
int save_misalignment;
@@ -1923,7 +1925,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
do_peeling = false;
else
{
- VEC_free (stmt_info_for_cost, heap, body_cost_vec);
+ body_cost_vec.release ();
return stat;
}
}
@@ -1940,7 +1942,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
by the peeling factor times the element size of DR_i (MOD the
vectorization factor times the size). Otherwise, the
misalignment of DR_i must be set to unknown. */
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
if (dr != dr0)
vect_update_misalignment_for_peel (dr, dr0, npeel);
@@ -1960,16 +1962,16 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
/* We've delayed passing the inside-loop peeling costs to the
target cost model until we were sure peeling would happen.
Do so now. */
- if (body_cost_vec)
+ if (body_cost_vec.exists ())
{
- FOR_EACH_VEC_ELT (stmt_info_for_cost, body_cost_vec, i, si)
+ FOR_EACH_VEC_ELT (body_cost_vec, i, si)
{
struct _stmt_vec_info *stmt_info
= si->stmt ? vinfo_for_stmt (si->stmt) : NULL;
(void) add_stmt_cost (data, si->count, si->kind, stmt_info,
si->misalign, vect_body);
}
- VEC_free (stmt_info_for_cost, heap, body_cost_vec);
+ body_cost_vec.release ();
}
stat = vect_verify_datarefs_alignment (loop_vinfo, NULL);
@@ -1978,7 +1980,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
}
}
- VEC_free (stmt_info_for_cost, heap, body_cost_vec);
+ body_cost_vec.release ();
/* (2) Versioning to force alignment. */
@@ -1997,7 +1999,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
if (do_versioning)
{
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
stmt = DR_STMT (dr);
stmt_info = vinfo_for_stmt (stmt);
@@ -2023,8 +2025,7 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
tree vectype;
if (known_alignment_for_access_p (dr)
- || VEC_length (gimple,
- LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo))
+ || LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).length ()
>= (unsigned) PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIGNMENT_CHECKS))
{
do_versioning = false;
@@ -2049,9 +2050,8 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
gcc_assert (!LOOP_VINFO_PTR_MASK (loop_vinfo)
|| LOOP_VINFO_PTR_MASK (loop_vinfo) == mask);
LOOP_VINFO_PTR_MASK (loop_vinfo) = mask;
- VEC_safe_push (gimple, heap,
- LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo),
- DR_STMT (dr));
+ LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).safe_push (
+ DR_STMT (dr));
}
}
@@ -2059,19 +2059,19 @@ vect_enhance_data_refs_alignment (loop_vec_info loop_vinfo)
if (!LOOP_REQUIRES_VERSIONING_FOR_ALIGNMENT (loop_vinfo))
do_versioning = false;
else if (!do_versioning)
- VEC_truncate (gimple, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo), 0);
+ LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).truncate (0);
}
if (do_versioning)
{
- VEC(gimple,heap) *may_misalign_stmts
+ vec<gimple> may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
gimple stmt;
/* It can now be assumed that the data references in the statements
in LOOP_VINFO_MAY_MISALIGN_STMTS will be aligned in the version
of the loop being vectorized. */
- FOR_EACH_VEC_ELT (gimple, may_misalign_stmts, i, stmt)
+ FOR_EACH_VEC_ELT (may_misalign_stmts, i, stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
dr = STMT_VINFO_DATA_REF (stmt_info);
@@ -2143,7 +2143,7 @@ vect_find_same_alignment_drs (struct data_dependence_relation *ddr,
return;
loop_depth = index_in_loop_nest (loop->num, DDR_LOOP_NEST (ddr));
- FOR_EACH_VEC_ELT (lambda_vector, DDR_DIST_VECTS (ddr), i, dist_v)
+ FOR_EACH_VEC_ELT (DDR_DIST_VECTS (ddr), i, dist_v)
{
int dist = dist_v[loop_depth];
@@ -2156,8 +2156,8 @@ vect_find_same_alignment_drs (struct data_dependence_relation *ddr,
|| (dist % vectorization_factor == 0 && dra_size == drb_size))
{
/* Two references with distance zero have the same alignment. */
- VEC_safe_push (dr_p, heap, STMT_VINFO_SAME_ALIGN_REFS (stmtinfo_a), drb);
- VEC_safe_push (dr_p, heap, STMT_VINFO_SAME_ALIGN_REFS (stmtinfo_b), dra);
+ STMT_VINFO_SAME_ALIGN_REFS (stmtinfo_a).safe_push (drb);
+ STMT_VINFO_SAME_ALIGN_REFS (stmtinfo_b).safe_push (dra);
if (dump_enabled_p ())
{
dump_printf_loc (MSG_NOTE, vect_location,
@@ -2190,11 +2190,11 @@ vect_analyze_data_refs_alignment (loop_vec_info loop_vinfo,
data dependence information. */
if (loop_vinfo)
{
- VEC (ddr_p, heap) *ddrs = LOOP_VINFO_DDRS (loop_vinfo);
+ vec<ddr_p> ddrs = LOOP_VINFO_DDRS (loop_vinfo);
struct data_dependence_relation *ddr;
unsigned int i;
- FOR_EACH_VEC_ELT (ddr_p, ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (ddrs, i, ddr)
vect_find_same_alignment_drs (ddr, loop_vinfo);
}
@@ -2458,11 +2458,9 @@ vect_analyze_group_access (struct data_reference *dr)
if (DR_IS_WRITE (dr) && !slp_impossible)
{
if (loop_vinfo)
- VEC_safe_push (gimple, heap, LOOP_VINFO_GROUPED_STORES (loop_vinfo),
- stmt);
+ LOOP_VINFO_GROUPED_STORES (loop_vinfo).safe_push (stmt);
if (bb_vinfo)
- VEC_safe_push (gimple, heap, BB_VINFO_GROUPED_STORES (bb_vinfo),
- stmt);
+ BB_VINFO_GROUPED_STORES (bb_vinfo).safe_push (stmt);
}
/* There is a gap in the end of the group. */
@@ -2584,7 +2582,7 @@ bool
vect_analyze_data_ref_accesses (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
{
unsigned int i;
- VEC (data_reference_p, heap) *datarefs;
+ vec<data_reference_p> datarefs;
struct data_reference *dr;
if (dump_enabled_p ())
@@ -2596,7 +2594,7 @@ vect_analyze_data_ref_accesses (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
else
datarefs = BB_VINFO_DATAREFS (bb_vinfo);
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
if (STMT_VINFO_VECTORIZABLE (vinfo_for_stmt (DR_STMT (dr)))
&& !vect_analyze_data_ref_access (dr))
{
@@ -2626,7 +2624,7 @@ vect_analyze_data_ref_accesses (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
bool
vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo)
{
- VEC (ddr_p, heap) * ddrs =
+ vec<ddr_p> ddrs =
LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo);
unsigned i, j;
@@ -2634,17 +2632,17 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo)
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_prune_runtime_alias_test_list ===");
- for (i = 0; i < VEC_length (ddr_p, ddrs); )
+ for (i = 0; i < ddrs.length (); )
{
bool found;
ddr_p ddr_i;
- ddr_i = VEC_index (ddr_p, ddrs, i);
+ ddr_i = ddrs[i];
found = false;
for (j = 0; j < i; j++)
{
- ddr_p ddr_j = VEC_index (ddr_p, ddrs, j);
+ ddr_p ddr_j = ddrs[j];
if (vect_vfa_range_equal (ddr_i, ddr_j))
{
@@ -2667,13 +2665,13 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo)
if (found)
{
- VEC_ordered_remove (ddr_p, ddrs, i);
+ ddrs.ordered_remove (i);
continue;
}
i++;
}
- if (VEC_length (ddr_p, ddrs) >
+ if (ddrs.length () >
(unsigned) PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIAS_CHECKS))
{
if (dump_enabled_p ())
@@ -2683,7 +2681,7 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo)
"generated checks exceeded.");
}
- VEC_truncate (ddr_p, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo), 0);
+ LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo).truncate (0);
return false;
}
@@ -2958,7 +2956,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
struct loop *loop = NULL;
basic_block bb = NULL;
unsigned int i;
- VEC (data_reference_p, heap) *datarefs;
+ vec<data_reference_p> datarefs;
struct data_reference *dr;
tree scalar_type;
bool res, stop_bb_analysis = false;
@@ -3008,7 +3006,8 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
}
}
if (!compute_all_dependences (BB_VINFO_DATAREFS (bb_vinfo),
- &BB_VINFO_DDRS (bb_vinfo), NULL, true))
+ &BB_VINFO_DDRS (bb_vinfo),
+ vec<loop_p>(), true))
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -3024,7 +3023,7 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
/* Go through the data-refs, check that the analysis succeeded. Update
pointer from stmt_vec_info struct to DR and vectype. */
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
{
gimple stmt;
stmt_vec_info stmt_info;
@@ -3390,12 +3389,12 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
{
unsigned int j, k, n;
struct data_reference *olddr
- = VEC_index (data_reference_p, datarefs, i);
- VEC (ddr_p, heap) *ddrs = LOOP_VINFO_DDRS (loop_vinfo);
+ = datarefs[i];
+ vec<ddr_p> ddrs = LOOP_VINFO_DDRS (loop_vinfo);
struct data_dependence_relation *ddr, *newddr;
bool bad = false;
tree off;
- VEC (loop_p, heap) *nest = LOOP_VINFO_LOOP_NEST (loop_vinfo);
+ vec<loop_p> nest = LOOP_VINFO_LOOP_NEST (loop_vinfo);
gather = 0 != vect_check_gather (stmt, loop_vinfo, NULL, &off, NULL);
if (gather
@@ -3415,14 +3414,14 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
return false;
}
- n = VEC_length (data_reference_p, datarefs) - 1;
+ n = datarefs.length () - 1;
for (j = 0, k = i - 1; j < i; j++)
{
- ddr = VEC_index (ddr_p, ddrs, k);
+ ddr = ddrs[k];
gcc_assert (DDR_B (ddr) == olddr);
newddr = initialize_data_dependence_relation (DDR_A (ddr), dr,
nest);
- VEC_replace (ddr_p, ddrs, k, newddr);
+ ddrs[k] = newddr;
free_dependence_relation (ddr);
if (!bad
&& DR_IS_WRITE (DDR_A (newddr))
@@ -3432,14 +3431,14 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
}
k++;
- n = k + VEC_length (data_reference_p, datarefs) - i - 1;
+ n = k + datarefs.length () - i - 1;
for (; k < n; k++)
{
- ddr = VEC_index (ddr_p, ddrs, k);
+ ddr = ddrs[k];
gcc_assert (DDR_A (ddr) == olddr);
newddr = initialize_data_dependence_relation (dr, DDR_B (ddr),
nest);
- VEC_replace (ddr_p, ddrs, k, newddr);
+ ddrs[k] = newddr;
free_dependence_relation (ddr);
if (!bad
&& DR_IS_WRITE (DDR_B (newddr))
@@ -3447,14 +3446,14 @@ vect_analyze_data_refs (loop_vec_info loop_vinfo,
bad = true;
}
- k = VEC_length (ddr_p, ddrs)
- - VEC_length (data_reference_p, datarefs) + i;
- ddr = VEC_index (ddr_p, ddrs, k);
+ k = ddrs.length ()
+ - datarefs.length () + i;
+ ddr = ddrs[k];
gcc_assert (DDR_A (ddr) == olddr && DDR_B (ddr) == olddr);
newddr = initialize_data_dependence_relation (dr, dr, nest);
- VEC_replace (ddr_p, ddrs, k, newddr);
+ ddrs[k] = newddr;
free_dependence_relation (ddr);
- VEC_replace (data_reference_p, datarefs, i, dr);
+ datarefs[i] = dr;
if (bad)
{
@@ -4226,11 +4225,11 @@ vect_store_lanes_supported (tree vectype, unsigned HOST_WIDE_INT count)
I4: 6 14 22 30 7 15 23 31. */
void
-vect_permute_store_chain (VEC(tree,heap) *dr_chain,
+vect_permute_store_chain (vec<tree> dr_chain,
unsigned int length,
gimple stmt,
gimple_stmt_iterator *gsi,
- VEC(tree,heap) **result_chain)
+ vec<tree> *result_chain)
{
tree vect1, vect2, high, low;
gimple perm_stmt;
@@ -4240,7 +4239,7 @@ vect_permute_store_chain (VEC(tree,heap) *dr_chain,
unsigned int j, nelt = TYPE_VECTOR_SUBPARTS (vectype);
unsigned char *sel = XALLOCAVEC (unsigned char, nelt);
- *result_chain = VEC_copy (tree, heap, dr_chain);
+ *result_chain = dr_chain.copy ();
for (i = 0, n = nelt / 2; i < n; i++)
{
@@ -4259,8 +4258,8 @@ vect_permute_store_chain (VEC(tree,heap) *dr_chain,
{
for (j = 0; j < length/2; j++)
{
- vect1 = VEC_index (tree, dr_chain, j);
- vect2 = VEC_index (tree, dr_chain, j+length/2);
+ vect1 = dr_chain[j];
+ vect2 = dr_chain[j+length/2];
/* Create interleaving stmt:
high = VEC_PERM_EXPR <vect1, vect2, {0, nelt, 1, nelt+1, ...}> */
@@ -4269,7 +4268,7 @@ vect_permute_store_chain (VEC(tree,heap) *dr_chain,
= gimple_build_assign_with_ops (VEC_PERM_EXPR, high,
vect1, vect2, perm_mask_high);
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
- VEC_replace (tree, *result_chain, 2*j, high);
+ (*result_chain)[2*j] = high;
/* Create interleaving stmt:
low = VEC_PERM_EXPR <vect1, vect2, {nelt/2, nelt*3/2, nelt/2+1,
@@ -4279,9 +4278,9 @@ vect_permute_store_chain (VEC(tree,heap) *dr_chain,
= gimple_build_assign_with_ops (VEC_PERM_EXPR, low,
vect1, vect2, perm_mask_low);
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
- VEC_replace (tree, *result_chain, 2*j+1, low);
+ (*result_chain)[2*j+1] = low;
}
- dr_chain = VEC_copy (tree, heap, *result_chain);
+ dr_chain = result_chain->copy ();
}
}
@@ -4681,11 +4680,11 @@ vect_load_lanes_supported (tree vectype, unsigned HOST_WIDE_INT count)
4th vec (E4): 3 7 11 15 19 23 27 31. */
static void
-vect_permute_load_chain (VEC(tree,heap) *dr_chain,
+vect_permute_load_chain (vec<tree> dr_chain,
unsigned int length,
gimple stmt,
gimple_stmt_iterator *gsi,
- VEC(tree,heap) **result_chain)
+ vec<tree> *result_chain)
{
tree data_ref, first_vect, second_vect;
tree perm_mask_even, perm_mask_odd;
@@ -4695,7 +4694,7 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
unsigned nelt = TYPE_VECTOR_SUBPARTS (vectype);
unsigned char *sel = XALLOCAVEC (unsigned char, nelt);
- *result_chain = VEC_copy (tree, heap, dr_chain);
+ *result_chain = dr_chain.copy ();
for (i = 0; i < nelt; ++i)
sel[i] = i * 2;
@@ -4711,8 +4710,8 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
{
for (j = 0; j < length; j += 2)
{
- first_vect = VEC_index (tree, dr_chain, j);
- second_vect = VEC_index (tree, dr_chain, j+1);
+ first_vect = dr_chain[j];
+ second_vect = dr_chain[j+1];
/* data_ref = permute_even (first_data_ref, second_data_ref); */
data_ref = make_temp_ssa_name (vectype, NULL, "vect_perm_even");
@@ -4720,7 +4719,7 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
first_vect, second_vect,
perm_mask_even);
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
- VEC_replace (tree, *result_chain, j/2, data_ref);
+ (*result_chain)[j/2] = data_ref;
/* data_ref = permute_odd (first_data_ref, second_data_ref); */
data_ref = make_temp_ssa_name (vectype, NULL, "vect_perm_odd");
@@ -4728,9 +4727,9 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
first_vect, second_vect,
perm_mask_odd);
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
- VEC_replace (tree, *result_chain, j/2+length/2, data_ref);
+ (*result_chain)[j/2+length/2] = data_ref;
}
- dr_chain = VEC_copy (tree, heap, *result_chain);
+ dr_chain = result_chain->copy ();
}
}
@@ -4743,18 +4742,18 @@ vect_permute_load_chain (VEC(tree,heap) *dr_chain,
*/
void
-vect_transform_grouped_load (gimple stmt, VEC(tree,heap) *dr_chain, int size,
+vect_transform_grouped_load (gimple stmt, vec<tree> dr_chain, int size,
gimple_stmt_iterator *gsi)
{
- VEC(tree,heap) *result_chain = NULL;
+ vec<tree> result_chain = vec<tree>();
/* DR_CHAIN contains input data-refs that are a part of the interleaving.
RESULT_CHAIN is the output of vect_permute_load_chain, it contains permuted
vectors, that are ready for vector computation. */
- result_chain = VEC_alloc (tree, heap, size);
+ result_chain.create (size);
vect_permute_load_chain (dr_chain, size, stmt, gsi, &result_chain);
vect_record_grouped_load_vectors (stmt, result_chain);
- VEC_free (tree, heap, result_chain);
+ result_chain.release ();
}
/* RESULT_CHAIN contains the output of a group of grouped loads that were
@@ -4762,7 +4761,7 @@ vect_transform_grouped_load (gimple stmt, VEC(tree,heap) *dr_chain, int size,
for each vector to the associated scalar statement. */
void
-vect_record_grouped_load_vectors (gimple stmt, VEC(tree,heap) *result_chain)
+vect_record_grouped_load_vectors (gimple stmt, vec<tree> result_chain)
{
gimple first_stmt = GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt));
gimple next_stmt, new_stmt;
@@ -4774,7 +4773,7 @@ vect_record_grouped_load_vectors (gimple stmt, VEC(tree,heap) *result_chain)
corresponds the order of data-refs in RESULT_CHAIN. */
next_stmt = first_stmt;
gap_count = 1;
- FOR_EACH_VEC_ELT (tree, result_chain, i, tmp_data_ref)
+ FOR_EACH_VEC_ELT (result_chain, i, tmp_data_ref)
{
if (!next_stmt)
break;
diff --git a/gcc/tree-vect-generic.c b/gcc/tree-vect-generic.c
index 611666f2c86..fcbf5d3f005 100644
--- a/gcc/tree-vect-generic.c
+++ b/gcc/tree-vect-generic.c
@@ -231,7 +231,7 @@ expand_vector_piecewise (gimple_stmt_iterator *gsi, elem_op_func f,
tree type, tree inner_type,
tree a, tree b, enum tree_code code)
{
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
tree part_width = TYPE_SIZE (inner_type);
tree index = bitsize_int (0);
int nunits = TYPE_VECTOR_SUBPARTS (type);
@@ -247,13 +247,13 @@ expand_vector_piecewise (gimple_stmt_iterator *gsi, elem_op_func f,
warning_at (loc, OPT_Wvector_operation_performance,
"vector operation will be expanded in parallel");
- v = VEC_alloc(constructor_elt, gc, (nunits + delta - 1) / delta);
+ vec_alloc (v, (nunits + delta - 1) / delta);
for (i = 0; i < nunits;
i += delta, index = int_const_binop (PLUS_EXPR, index, part_width))
{
tree result = f (gsi, inner_type, a, b, index, part_width, code);
constructor_elt ce = {NULL_TREE, result};
- VEC_quick_push (constructor_elt, v, ce);
+ v->quick_push (ce);
}
return build_constructor (type, v);
@@ -881,7 +881,7 @@ expand_vector_condition (gimple_stmt_iterator *gsi)
bool a_is_comparison = false;
tree b = gimple_assign_rhs2 (stmt);
tree c = gimple_assign_rhs3 (stmt);
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
tree constr;
tree inner_type = TREE_TYPE (type);
tree cond_type = TREE_TYPE (TREE_TYPE (a));
@@ -909,7 +909,7 @@ expand_vector_condition (gimple_stmt_iterator *gsi)
warning_at (loc, OPT_Wvector_operation_performance,
"vector condition will be expanded piecewise");
- v = VEC_alloc(constructor_elt, gc, nunits);
+ vec_alloc (v, nunits);
for (i = 0; i < nunits;
i++, index = int_const_binop (PLUS_EXPR, index, width))
{
@@ -926,7 +926,7 @@ expand_vector_condition (gimple_stmt_iterator *gsi)
aa = tree_vec_extract (gsi, cond_type, a, width, index);
result = gimplify_build3 (gsi, COND_EXPR, inner_type, aa, bb, cc);
constructor_elt ce = {NULL_TREE, result};
- VEC_quick_push (constructor_elt, v, ce);
+ v->quick_push (ce);
}
constr = build_constructor (type, v);
@@ -1182,7 +1182,7 @@ lower_vec_perm (gimple_stmt_iterator *gsi)
tree vect_elt_type = TREE_TYPE (vect_type);
tree mask_elt_type = TREE_TYPE (mask_type);
unsigned int elements = TYPE_VECTOR_SUBPARTS (vect_type);
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
tree constr, t, si, i_val;
tree vec0tmp = NULL_TREE, vec1tmp = NULL_TREE, masktmp = NULL_TREE;
bool two_operand_p = !operand_equal_p (vec0, vec1, 0);
@@ -1218,7 +1218,7 @@ lower_vec_perm (gimple_stmt_iterator *gsi)
warning_at (loc, OPT_Wvector_operation_performance,
"vector shuffling operation will be expanded piecewise");
- v = VEC_alloc (constructor_elt, gc, elements);
+ vec_alloc (v, elements);
for (i = 0; i < elements; i++)
{
si = size_int (i);
diff --git a/gcc/tree-vect-loop-manip.c b/gcc/tree-vect-loop-manip.c
index 58ded23399e..34bde34ef25 100644
--- a/gcc/tree-vect-loop-manip.c
+++ b/gcc/tree-vect-loop-manip.c
@@ -118,17 +118,13 @@ typedef struct
basic_block bb;
} adjust_info;
-DEF_VEC_O(adjust_info);
-DEF_VEC_ALLOC_O_STACK(adjust_info);
-#define VEC_adjust_info_stack_alloc(alloc) VEC_stack_alloc (adjust_info, alloc)
-
/* A stack of values to be adjusted in debug stmts. We have to
process them LIFO, so that the closest substitution applies. If we
processed them FIFO, without the stack, we might substitute uses
with a PHI DEF that would soon become non-dominant, and when we got
to the suitable one, it wouldn't have anything to substitute any
more. */
-static VEC(adjust_info, stack) *adjust_vec;
+static vec<adjust_info, va_stack> adjust_vec;
/* Adjust any debug stmts that referenced AI->from values to use the
loop-closed AI->to, if the references are dominated by AI->bb and
@@ -185,15 +181,15 @@ adjust_vec_debug_stmts (void)
if (!MAY_HAVE_DEBUG_STMTS)
return;
- gcc_assert (adjust_vec);
+ gcc_assert (adjust_vec.exists ());
- while (!VEC_empty (adjust_info, adjust_vec))
+ while (!adjust_vec.is_empty ())
{
- adjust_debug_stmts_now (&VEC_last (adjust_info, adjust_vec));
- VEC_pop (adjust_info, adjust_vec);
+ adjust_debug_stmts_now (&adjust_vec.last ());
+ adjust_vec.pop ();
}
- VEC_free (adjust_info, stack, adjust_vec);
+ adjust_vec.release ();
}
/* Adjust any debug stmts that referenced FROM values to use the
@@ -214,8 +210,8 @@ adjust_debug_stmts (tree from, tree to, basic_block bb)
ai.to = to;
ai.bb = bb;
- if (adjust_vec)
- VEC_safe_push (adjust_info, stack, adjust_vec, ai);
+ if (adjust_vec.exists ())
+ adjust_vec.safe_push (ai);
else
adjust_debug_stmts_now (&ai);
}
@@ -1253,8 +1249,8 @@ slpeel_tree_peel_loop_to_edge (struct loop *loop,
if (MAY_HAVE_DEBUG_STMTS)
{
- gcc_assert (!adjust_vec);
- adjust_vec = VEC_alloc (adjust_info, stack, 32);
+ gcc_assert (!adjust_vec.exists ());
+ vec_stack_alloc (adjust_info, adjust_vec, 32);
}
if (e == exit_e)
@@ -2138,14 +2134,14 @@ static void
vect_update_inits_of_drs (loop_vec_info loop_vinfo, tree niters)
{
unsigned int i;
- VEC (data_reference_p, heap) *datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
+ vec<data_reference_p> datarefs = LOOP_VINFO_DATAREFS (loop_vinfo);
struct data_reference *dr;
if (dump_enabled_p ())
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
"=== vect_update_inits_of_dr ===");
- FOR_EACH_VEC_ELT (data_reference_p, datarefs, i, dr)
+ FOR_EACH_VEC_ELT (datarefs, i, dr)
vect_update_init_of_dr (dr, niters);
}
@@ -2267,7 +2263,7 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
gimple_seq *cond_expr_stmt_list)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
- VEC(gimple,heap) *may_misalign_stmts
+ vec<gimple> may_misalign_stmts
= LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo);
gimple ref_stmt;
int mask = LOOP_VINFO_PTR_MASK (loop_vinfo);
@@ -2290,7 +2286,7 @@ vect_create_cond_for_align_checks (loop_vec_info loop_vinfo,
/* Create expression (mask & (dr_1 || ... || dr_n)) where dr_i is the address
of the first vector of the i'th data reference. */
- FOR_EACH_VEC_ELT (gimple, may_misalign_stmts, i, ref_stmt)
+ FOR_EACH_VEC_ELT (may_misalign_stmts, i, ref_stmt)
{
gimple_seq new_stmt_list = NULL;
tree addr_base;
@@ -2422,7 +2418,7 @@ vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo,
gimple_seq * cond_expr_stmt_list)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
- VEC (ddr_p, heap) * may_alias_ddrs =
+ vec<ddr_p> may_alias_ddrs =
LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo);
int vect_factor = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
tree scalar_loop_iters = LOOP_VINFO_NITERS (loop_vinfo);
@@ -2440,10 +2436,10 @@ vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo,
((store_ptr_n + store_segment_length_n) <= load_ptr_n)
|| (load_ptr_n + load_segment_length_n) <= store_ptr_n)) */
- if (VEC_empty (ddr_p, may_alias_ddrs))
+ if (may_alias_ddrs.is_empty ())
return;
- FOR_EACH_VEC_ELT (ddr_p, may_alias_ddrs, i, ddr)
+ FOR_EACH_VEC_ELT (may_alias_ddrs, i, ddr)
{
struct data_reference *dr_a, *dr_b;
gimple dr_group_first_a, dr_group_first_b;
@@ -2518,7 +2514,7 @@ vect_create_cond_for_alias_checks (loop_vec_info loop_vinfo,
if (dump_enabled_p ())
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
"created %u versioning for alias checks.\n",
- VEC_length (ddr_p, may_alias_ddrs));
+ may_alias_ddrs.length ());
}
diff --git a/gcc/tree-vect-loop.c b/gcc/tree-vect-loop.c
index 5e99857efd6..6a86c9155d1 100644
--- a/gcc/tree-vect-loop.c
+++ b/gcc/tree-vect-loop.c
@@ -551,7 +551,8 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
{
basic_block bb = loop->header;
tree dumy;
- VEC(gimple,heap) *worklist = VEC_alloc (gimple, heap, 64);
+ vec<gimple> worklist;
+ worklist.create (64);
gimple_stmt_iterator gsi;
bool double_reduc;
@@ -600,7 +601,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
if (!access_fn
|| !vect_is_simple_iv_evolution (loop->num, access_fn, &dumy, &dumy))
{
- VEC_safe_push (gimple, heap, worklist, phi);
+ worklist.safe_push (phi);
continue;
}
@@ -613,9 +614,9 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
/* Second - identify all reductions and nested cycles. */
- while (VEC_length (gimple, worklist) > 0)
+ while (worklist.length () > 0)
{
- gimple phi = VEC_pop (gimple, worklist);
+ gimple phi = worklist.pop ();
tree def = PHI_RESULT (phi);
stmt_vec_info stmt_vinfo = vinfo_for_stmt (phi);
gimple reduc_stmt;
@@ -668,9 +669,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
vect_reduction_def;
/* Store the reduction cycles for possible vectorization in
loop-aware SLP. */
- VEC_safe_push (gimple, heap,
- LOOP_VINFO_REDUCTIONS (loop_vinfo),
- reduc_stmt);
+ LOOP_VINFO_REDUCTIONS (loop_vinfo).safe_push (reduc_stmt);
}
}
}
@@ -680,7 +679,7 @@ vect_analyze_scalar_cycles_1 (loop_vec_info loop_vinfo, struct loop *loop)
"Unknown def-use cycle pattern.");
}
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
}
@@ -862,20 +861,18 @@ new_loop_vec_info (struct loop *loop)
LOOP_VINFO_VECTORIZABLE_P (res) = 0;
LOOP_PEELING_FOR_ALIGNMENT (res) = 0;
LOOP_VINFO_VECT_FACTOR (res) = 0;
- LOOP_VINFO_LOOP_NEST (res) = VEC_alloc (loop_p, heap, 3);
- LOOP_VINFO_DATAREFS (res) = VEC_alloc (data_reference_p, heap, 10);
- LOOP_VINFO_DDRS (res) = VEC_alloc (ddr_p, heap, 10 * 10);
+ LOOP_VINFO_LOOP_NEST (res).create (3);
+ LOOP_VINFO_DATAREFS (res).create (10);
+ LOOP_VINFO_DDRS (res).create (10 * 10);
LOOP_VINFO_UNALIGNED_DR (res) = NULL;
- LOOP_VINFO_MAY_MISALIGN_STMTS (res) =
- VEC_alloc (gimple, heap,
- PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIGNMENT_CHECKS));
- LOOP_VINFO_MAY_ALIAS_DDRS (res) =
- VEC_alloc (ddr_p, heap,
- PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIAS_CHECKS));
- LOOP_VINFO_GROUPED_STORES (res) = VEC_alloc (gimple, heap, 10);
- LOOP_VINFO_REDUCTIONS (res) = VEC_alloc (gimple, heap, 10);
- LOOP_VINFO_REDUCTION_CHAINS (res) = VEC_alloc (gimple, heap, 10);
- LOOP_VINFO_SLP_INSTANCES (res) = VEC_alloc (slp_instance, heap, 10);
+ LOOP_VINFO_MAY_MISALIGN_STMTS (res).create (
+ PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIGNMENT_CHECKS));
+ LOOP_VINFO_MAY_ALIAS_DDRS (res).create (
+ PARAM_VALUE (PARAM_VECT_MAX_VERSION_FOR_ALIAS_CHECKS));
+ LOOP_VINFO_GROUPED_STORES (res).create (10);
+ LOOP_VINFO_REDUCTIONS (res).create (10);
+ LOOP_VINFO_REDUCTION_CHAINS (res).create (10);
+ LOOP_VINFO_SLP_INSTANCES (res).create (10);
LOOP_VINFO_SLP_UNROLLING_FACTOR (res) = 1;
LOOP_VINFO_PEELING_HTAB (res) = NULL;
LOOP_VINFO_TARGET_COST_DATA (res) = init_cost (loop);
@@ -899,7 +896,7 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
int nbbs;
gimple_stmt_iterator si;
int j;
- VEC (slp_instance, heap) *slp_instances;
+ vec<slp_instance> slp_instances;
slp_instance instance;
bool swapped;
@@ -917,9 +914,9 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
free (LOOP_VINFO_BBS (loop_vinfo));
free_data_refs (LOOP_VINFO_DATAREFS (loop_vinfo));
free_dependence_relations (LOOP_VINFO_DDRS (loop_vinfo));
- VEC_free (loop_p, heap, LOOP_VINFO_LOOP_NEST (loop_vinfo));
- VEC_free (gimple, heap, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
- VEC_free (ddr_p, heap, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo));
+ LOOP_VINFO_LOOP_NEST (loop_vinfo).release ();
+ LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).release ();
+ LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo).release ();
free (loop_vinfo);
loop->aux = NULL;
@@ -960,17 +957,17 @@ destroy_loop_vec_info (loop_vec_info loop_vinfo, bool clean_stmts)
free (LOOP_VINFO_BBS (loop_vinfo));
free_data_refs (LOOP_VINFO_DATAREFS (loop_vinfo));
free_dependence_relations (LOOP_VINFO_DDRS (loop_vinfo));
- VEC_free (loop_p, heap, LOOP_VINFO_LOOP_NEST (loop_vinfo));
- VEC_free (gimple, heap, LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
- VEC_free (ddr_p, heap, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo));
+ LOOP_VINFO_LOOP_NEST (loop_vinfo).release ();
+ LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).release ();
+ LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo).release ();
slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, j, instance)
+ FOR_EACH_VEC_ELT (slp_instances, j, instance)
vect_free_slp_instance (instance);
- VEC_free (slp_instance, heap, LOOP_VINFO_SLP_INSTANCES (loop_vinfo));
- VEC_free (gimple, heap, LOOP_VINFO_GROUPED_STORES (loop_vinfo));
- VEC_free (gimple, heap, LOOP_VINFO_REDUCTIONS (loop_vinfo));
- VEC_free (gimple, heap, LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo));
+ LOOP_VINFO_SLP_INSTANCES (loop_vinfo).release ();
+ LOOP_VINFO_GROUPED_STORES (loop_vinfo).release ();
+ LOOP_VINFO_REDUCTIONS (loop_vinfo).release ();
+ LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo).release ();
if (LOOP_VINFO_PEELING_HTAB (loop_vinfo))
htab_delete (LOOP_VINFO_PEELING_HTAB (loop_vinfo));
@@ -2046,7 +2043,7 @@ vect_is_slp_reduction (loop_vec_info loop_info, gimple phi, gimple first_stmt)
/* Save the chain for further analysis in SLP detection. */
first = GROUP_FIRST_ELEMENT (vinfo_for_stmt (current_stmt));
- VEC_safe_push (gimple, heap, LOOP_VINFO_REDUCTION_CHAINS (loop_info), first);
+ LOOP_VINFO_REDUCTION_CHAINS (loop_info).safe_push (first);
GROUP_SIZE (vinfo_for_stmt (first)) = size;
return true;
@@ -2659,8 +2656,7 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo,
if (LOOP_REQUIRES_VERSIONING_FOR_ALIGNMENT (loop_vinfo))
{
/* FIXME: Make cost depend on complexity of individual check. */
- unsigned len = VEC_length (gimple,
- LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo));
+ unsigned len = LOOP_VINFO_MAY_MISALIGN_STMTS (loop_vinfo).length ();
(void) add_stmt_cost (target_cost_data, len, vector_stmt, NULL, 0,
vect_prologue);
dump_printf (MSG_NOTE,
@@ -2672,7 +2668,7 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo,
if (LOOP_REQUIRES_VERSIONING_FOR_ALIAS (loop_vinfo))
{
/* FIXME: Make cost depend on complexity of individual check. */
- unsigned len = VEC_length (ddr_p, LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo));
+ unsigned len = LOOP_VINFO_MAY_ALIAS_DDRS (loop_vinfo).length ();
(void) add_stmt_cost (target_cost_data, len, vector_stmt, NULL, 0,
vect_prologue);
dump_printf (MSG_NOTE,
@@ -2741,8 +2737,8 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo,
int j;
void *data = LOOP_VINFO_TARGET_COST_DATA (loop_vinfo);
- prologue_cost_vec = VEC_alloc (stmt_info_for_cost, heap, 2);
- epilogue_cost_vec = VEC_alloc (stmt_info_for_cost, heap, 2);
+ prologue_cost_vec.create (2);
+ epilogue_cost_vec.create (2);
peel_iters_prologue = npeel;
(void) vect_get_known_peeling_cost (loop_vinfo, peel_iters_prologue,
@@ -2751,7 +2747,7 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo,
&prologue_cost_vec,
&epilogue_cost_vec);
- FOR_EACH_VEC_ELT (stmt_info_for_cost, prologue_cost_vec, j, si)
+ FOR_EACH_VEC_ELT (prologue_cost_vec, j, si)
{
struct _stmt_vec_info *stmt_info
= si->stmt ? vinfo_for_stmt (si->stmt) : NULL;
@@ -2759,7 +2755,7 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo,
si->misalign, vect_prologue);
}
- FOR_EACH_VEC_ELT (stmt_info_for_cost, epilogue_cost_vec, j, si)
+ FOR_EACH_VEC_ELT (epilogue_cost_vec, j, si)
{
struct _stmt_vec_info *stmt_info
= si->stmt ? vinfo_for_stmt (si->stmt) : NULL;
@@ -2767,8 +2763,8 @@ vect_estimate_min_profitable_iters (loop_vec_info loop_vinfo,
si->misalign, vect_epilogue);
}
- VEC_free (stmt_info_for_cost, heap, prologue_cost_vec);
- VEC_free (stmt_info_for_cost, heap, epilogue_cost_vec);
+ prologue_cost_vec.release ();
+ epilogue_cost_vec.release ();
}
/* FORNOW: The scalar outside cost is incremented in one of the
@@ -3140,7 +3136,7 @@ get_initial_def_for_induction (gimple iv_phi)
edge pe = loop_preheader_edge (loop);
struct loop *iv_loop;
basic_block new_bb;
- tree vec, vec_init, vec_step, t;
+ tree new_vec, vec_init, vec_step, t;
tree access_fn;
tree new_var;
tree new_name;
@@ -3211,7 +3207,7 @@ get_initial_def_for_induction (gimple iv_phi)
}
else
{
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
/* iv_loop is the loop to be vectorized. Create:
vec_init = [X, X+S, X+2*S, X+3*S] (S = step_expr, X = init_expr) */
@@ -3223,7 +3219,7 @@ get_initial_def_for_induction (gimple iv_phi)
gcc_assert (!new_bb);
}
- v = VEC_alloc (constructor_elt, gc, nunits);
+ vec_alloc (v, nunits);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, new_name);
for (i = 1; i < nunits; i++)
{
@@ -3247,8 +3243,8 @@ get_initial_def_for_induction (gimple iv_phi)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, new_name);
}
/* Create a vector from [new_name_0, new_name_1, ..., new_name_nunits-1] */
- vec = build_constructor (vectype, v);
- vec_init = vect_init_vector (iv_phi, vec, vectype, NULL);
+ new_vec = build_constructor (vectype, v);
+ vec_init = vect_init_vector (iv_phi, new_vec, vectype, NULL);
}
@@ -3270,8 +3266,8 @@ get_initial_def_for_induction (gimple iv_phi)
gcc_assert (CONSTANT_CLASS_P (new_name));
stepvectype = get_vectype_for_scalar_type (TREE_TYPE (new_name));
gcc_assert (stepvectype);
- vec = build_vector_from_val (stepvectype, t);
- vec_step = vect_init_vector (iv_phi, vec, stepvectype, NULL);
+ new_vec = build_vector_from_val (stepvectype, t);
+ vec_step = vect_init_vector (iv_phi, new_vec, stepvectype, NULL);
/* Create the following def-use cycle:
@@ -3325,8 +3321,8 @@ get_initial_def_for_induction (gimple iv_phi)
expr, step_expr);
t = unshare_expr (new_name);
gcc_assert (CONSTANT_CLASS_P (new_name));
- vec = build_vector_from_val (stepvectype, t);
- vec_step = vect_init_vector (iv_phi, vec, stepvectype, NULL);
+ new_vec = build_vector_from_val (stepvectype, t);
+ vec_step = vect_init_vector (iv_phi, new_vec, stepvectype, NULL);
vec_def = induc_def;
prev_stmt_vinfo = vinfo_for_stmt (induction_phi);
@@ -3577,8 +3573,8 @@ get_initial_def_for_reduction (gimple stmt, tree init_val,
init_def = build_vector (vectype, elts);
else
{
- VEC(constructor_elt,gc) *v;
- v = VEC_alloc (constructor_elt, gc, nunits);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, nunits);
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init_val);
for (i = 1; i < nunits; ++i)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[i]);
@@ -3670,9 +3666,9 @@ get_initial_def_for_reduction (gimple stmt, tree init_val,
*/
static void
-vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
+vect_create_epilog_for_reduction (vec<tree> vect_defs, gimple stmt,
int ncopies, enum tree_code reduc_code,
- VEC (gimple, heap) *reduction_phis,
+ vec<gimple> reduction_phis,
int reduc_index, bool double_reduc,
slp_tree slp_node)
{
@@ -3702,20 +3698,20 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
bool extract_scalar_result = false;
gimple use_stmt, orig_stmt, reduction_phi = NULL;
bool nested_in_vect_loop = false;
- VEC (gimple, heap) *new_phis = NULL;
- VEC (gimple, heap) *inner_phis = NULL;
+ vec<gimple> new_phis = vec<gimple>();
+ vec<gimple> inner_phis = vec<gimple>();
enum vect_def_type dt = vect_unknown_def_type;
int j, i;
- VEC (tree, heap) *scalar_results = NULL;
+ vec<tree> scalar_results = vec<tree>();
unsigned int group_size = 1, k, ratio;
- VEC (tree, heap) *vec_initial_defs = NULL;
- VEC (gimple, heap) *phis;
+ vec<tree> vec_initial_defs = vec<tree>();
+ vec<gimple> phis;
bool slp_reduc = false;
tree new_phi_result;
gimple inner_phi = NULL;
if (slp_node)
- group_size = VEC_length (gimple, SLP_TREE_SCALAR_STMTS (slp_node));
+ group_size = SLP_TREE_SCALAR_STMTS (slp_node).length ();
if (nested_in_vect_loop_p (loop, stmt))
{
@@ -3773,20 +3769,20 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
NULL, slp_node, reduc_index);
else
{
- vec_initial_defs = VEC_alloc (tree, heap, 1);
+ vec_initial_defs.create (1);
/* For the case of reduction, vect_get_vec_def_for_operand returns
the scalar def before the loop, that defines the initial value
of the reduction variable. */
vec_initial_def = vect_get_vec_def_for_operand (reduction_op, stmt,
&adjustment_def);
- VEC_quick_push (tree, vec_initial_defs, vec_initial_def);
+ vec_initial_defs.quick_push (vec_initial_def);
}
/* Set phi nodes arguments. */
- FOR_EACH_VEC_ELT (gimple, reduction_phis, i, phi)
+ FOR_EACH_VEC_ELT (reduction_phis, i, phi)
{
- tree vec_init_def = VEC_index (tree, vec_initial_defs, i);
- tree def = VEC_index (tree, vect_defs, i);
+ tree vec_init_def = vec_initial_defs[i];
+ tree def = vect_defs[i];
for (j = 0; j < ncopies; j++)
{
/* Set the loop-entry arg of the reduction-phi. */
@@ -3812,7 +3808,7 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
}
}
- VEC_free (tree, heap, vec_initial_defs);
+ vec_initial_defs.release ();
/* 2. Create epilog code.
The reduction epilog code operates across the elements of the vector
@@ -3847,8 +3843,8 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
exit_bb = single_exit (loop)->dest;
prev_phi_info = NULL;
- new_phis = VEC_alloc (gimple, heap, VEC_length (tree, vect_defs));
- FOR_EACH_VEC_ELT (tree, vect_defs, i, def)
+ new_phis.create (vect_defs.length ());
+ FOR_EACH_VEC_ELT (vect_defs, i, def)
{
for (j = 0; j < ncopies; j++)
{
@@ -3856,7 +3852,7 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
phi = create_phi_node (new_def, exit_bb);
set_vinfo_for_stmt (phi, new_stmt_vec_info (phi, loop_vinfo, NULL));
if (j == 0)
- VEC_quick_push (gimple, new_phis, phi);
+ new_phis.quick_push (phi);
else
{
def = vect_get_vec_def_for_stmt_copy (dt, def);
@@ -3874,8 +3870,8 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
{
loop = outer_loop;
exit_bb = single_exit (loop)->dest;
- inner_phis = VEC_alloc (gimple, heap, VEC_length (tree, vect_defs));
- FOR_EACH_VEC_ELT (gimple, new_phis, i, phi)
+ inner_phis.create (vect_defs.length ());
+ FOR_EACH_VEC_ELT (new_phis, i, phi)
{
tree new_result = copy_ssa_name (PHI_RESULT (phi), NULL);
gimple outer_phi = create_phi_node (new_result, exit_bb);
@@ -3883,8 +3879,8 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
PHI_RESULT (phi));
set_vinfo_for_stmt (outer_phi, new_stmt_vec_info (outer_phi,
loop_vinfo, NULL));
- VEC_quick_push (gimple, inner_phis, phi);
- VEC_replace (gimple, new_phis, i, outer_phi);
+ inner_phis.quick_push (phi);
+ new_phis[i] = outer_phi;
prev_phi_info = vinfo_for_stmt (outer_phi);
while (STMT_VINFO_RELATED_STMT (vinfo_for_stmt (phi)))
{
@@ -3934,7 +3930,7 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
scalar_dest = gimple_assign_lhs (orig_stmt);
scalar_type = TREE_TYPE (scalar_dest);
- scalar_results = VEC_alloc (tree, heap, group_size);
+ scalar_results.create (group_size);
new_scalar_dest = vect_create_destination_var (scalar_dest, NULL);
bitsize = TYPE_SIZE (scalar_type);
@@ -3963,14 +3959,14 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
one vector. */
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
{
- tree first_vect = PHI_RESULT (VEC_index (gimple, new_phis, 0));
+ tree first_vect = PHI_RESULT (new_phis[0]);
tree tmp;
gimple new_vec_stmt = NULL;
vec_dest = vect_create_destination_var (scalar_dest, vectype);
- for (k = 1; k < VEC_length (gimple, new_phis); k++)
+ for (k = 1; k < new_phis.length (); k++)
{
- gimple next_phi = VEC_index (gimple, new_phis, k);
+ gimple next_phi = new_phis[k];
tree second_vect = PHI_RESULT (next_phi);
tmp = build2 (code, vectype, first_vect, second_vect);
@@ -3983,12 +3979,12 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
new_phi_result = first_vect;
if (new_vec_stmt)
{
- VEC_truncate (gimple, new_phis, 0);
- VEC_safe_push (gimple, heap, new_phis, new_vec_stmt);
+ new_phis.truncate (0);
+ new_phis.safe_push (new_vec_stmt);
}
}
else
- new_phi_result = PHI_RESULT (VEC_index (gimple, new_phis, 0));
+ new_phi_result = PHI_RESULT (new_phis[0]);
/* 2.3 Create the reduction code, using one of the three schemes described
above. In SLP we simply need to extract all the elements from the
@@ -4097,7 +4093,7 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
"Reduce using scalar code. ");
vec_size_in_bits = tree_low_cst (TYPE_SIZE (vectype), 1);
- FOR_EACH_VEC_ELT (gimple, new_phis, i, new_phi)
+ FOR_EACH_VEC_ELT (new_phis, i, new_phi)
{
if (gimple_code (new_phi) == GIMPLE_PHI)
vec_temp = PHI_RESULT (new_phi);
@@ -4113,7 +4109,7 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
/* In SLP we don't need to apply reduction operation, so we just
collect s' values in SCALAR_RESULTS. */
if (slp_reduc)
- VEC_safe_push (tree, heap, scalar_results, new_temp);
+ scalar_results.safe_push (new_temp);
for (bit_offset = element_bitsize;
bit_offset < vec_size_in_bits;
@@ -4133,7 +4129,7 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
/* In SLP we don't need to apply reduction operation, so
we just collect s' values in SCALAR_RESULTS. */
new_temp = new_name;
- VEC_safe_push (tree, heap, scalar_results, new_name);
+ scalar_results.safe_push (new_name);
}
else
{
@@ -4156,21 +4152,21 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
gimple new_stmt;
/* Reduce multiple scalar results in case of SLP unrolling. */
- for (j = group_size; VEC_iterate (tree, scalar_results, j, res);
+ for (j = group_size; scalar_results.iterate (j, &res);
j++)
{
- first_res = VEC_index (tree, scalar_results, j % group_size);
+ first_res = scalar_results[j % group_size];
new_stmt = gimple_build_assign_with_ops (code,
new_scalar_dest, first_res, res);
new_res = make_ssa_name (new_scalar_dest, new_stmt);
gimple_assign_set_lhs (new_stmt, new_res);
gsi_insert_before (&exit_gsi, new_stmt, GSI_SAME_STMT);
- VEC_replace (tree, scalar_results, j % group_size, new_res);
+ scalar_results[j % group_size] = new_res;
}
}
else
/* Not SLP - we have one scalar to keep in SCALAR_RESULTS. */
- VEC_safe_push (tree, heap, scalar_results, new_temp);
+ scalar_results.safe_push (new_temp);
extract_scalar_result = false;
}
@@ -4199,7 +4195,7 @@ vect_create_epilog_for_reduction (VEC (tree, heap) *vect_defs, gimple stmt,
new_temp = make_ssa_name (new_scalar_dest, epilog_stmt);
gimple_assign_set_lhs (epilog_stmt, new_temp);
gsi_insert_before (&exit_gsi, epilog_stmt, GSI_SAME_STMT);
- VEC_safe_push (tree, heap, scalar_results, new_temp);
+ scalar_results.safe_push (new_temp);
}
vect_finalize_reduction:
@@ -4217,14 +4213,14 @@ vect_finalize_reduction:
gcc_assert (!slp_reduc);
if (nested_in_vect_loop)
{
- new_phi = VEC_index (gimple, new_phis, 0);
+ new_phi = new_phis[0];
gcc_assert (TREE_CODE (TREE_TYPE (adjustment_def)) == VECTOR_TYPE);
expr = build2 (code, vectype, PHI_RESULT (new_phi), adjustment_def);
new_dest = vect_create_destination_var (scalar_dest, vectype);
}
else
{
- new_temp = VEC_index (tree, scalar_results, 0);
+ new_temp = scalar_results[0];
gcc_assert (TREE_CODE (TREE_TYPE (adjustment_def)) != VECTOR_TYPE);
expr = build2 (code, scalar_type, new_temp, adjustment_def);
new_dest = vect_create_destination_var (scalar_dest, scalar_type);
@@ -4244,14 +4240,14 @@ vect_finalize_reduction:
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (new_phi));
if (!double_reduc)
- VEC_quick_push (tree, scalar_results, new_temp);
+ scalar_results.quick_push (new_temp);
else
- VEC_replace (tree, scalar_results, 0, new_temp);
+ scalar_results[0] = new_temp;
}
else
- VEC_replace (tree, scalar_results, 0, new_temp);
+ scalar_results[0] = new_temp;
- VEC_replace (gimple, new_phis, 0, epilog_stmt);
+ new_phis[0] = epilog_stmt;
}
/* 2.6 Handle the loop-exit phis. Replace the uses of scalar loop-exit
@@ -4286,9 +4282,8 @@ vect_finalize_reduction:
exit phi node. */
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
{
- scalar_dest = gimple_assign_lhs (VEC_index (gimple,
- SLP_TREE_SCALAR_STMTS (slp_node),
- group_size - 1));
+ scalar_dest = gimple_assign_lhs (
+ SLP_TREE_SCALAR_STMTS (slp_node)[group_size - 1]);
group_size = 1;
}
@@ -4298,10 +4293,10 @@ vect_finalize_reduction:
(GROUP_SIZE / number of new vector stmts) scalar results correspond to
the first vector stmt, etc.
(RATIO is equal to (GROUP_SIZE / number of new vector stmts)). */
- if (group_size > VEC_length (gimple, new_phis))
+ if (group_size > new_phis.length ())
{
- ratio = group_size / VEC_length (gimple, new_phis);
- gcc_assert (!(group_size % VEC_length (gimple, new_phis)));
+ ratio = group_size / new_phis.length ();
+ gcc_assert (!(group_size % new_phis.length ()));
}
else
ratio = 1;
@@ -4310,16 +4305,15 @@ vect_finalize_reduction:
{
if (k % ratio == 0)
{
- epilog_stmt = VEC_index (gimple, new_phis, k / ratio);
- reduction_phi = VEC_index (gimple, reduction_phis, k / ratio);
+ epilog_stmt = new_phis[k / ratio];
+ reduction_phi = reduction_phis[k / ratio];
if (double_reduc)
- inner_phi = VEC_index (gimple, inner_phis, k / ratio);
+ inner_phi = inner_phis[k / ratio];
}
if (slp_reduc)
{
- gimple current_stmt = VEC_index (gimple,
- SLP_TREE_SCALAR_STMTS (slp_node), k);
+ gimple current_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[k];
orig_stmt = STMT_VINFO_RELATED_STMT (vinfo_for_stmt (current_stmt));
/* SLP statements can't participate in patterns. */
@@ -4327,19 +4321,19 @@ vect_finalize_reduction:
scalar_dest = gimple_assign_lhs (current_stmt);
}
- phis = VEC_alloc (gimple, heap, 3);
+ phis.create (3);
/* Find the loop-closed-use at the loop exit of the original scalar
result. (The reduction result is expected to have two immediate uses -
one at the latch block, and one at the loop exit). */
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, scalar_dest)
if (!flow_bb_inside_loop_p (loop, gimple_bb (USE_STMT (use_p))))
- VEC_safe_push (gimple, heap, phis, USE_STMT (use_p));
+ phis.safe_push (USE_STMT (use_p));
/* We expect to have found an exit_phi because of loop-closed-ssa
form. */
- gcc_assert (!VEC_empty (gimple, phis));
+ gcc_assert (!phis.is_empty ());
- FOR_EACH_VEC_ELT (gimple, phis, i, exit_phi)
+ FOR_EACH_VEC_ELT (phis, i, exit_phi)
{
if (outer_loop)
{
@@ -4445,7 +4439,7 @@ vect_finalize_reduction:
}
}
- VEC_free (gimple, heap, phis);
+ phis.release ();
if (nested_in_vect_loop)
{
if (double_reduc)
@@ -4454,7 +4448,7 @@ vect_finalize_reduction:
continue;
}
- phis = VEC_alloc (gimple, heap, 3);
+ phis.create (3);
/* Find the loop-closed-use at the loop exit of the original scalar
result. (The reduction result is expected to have two immediate uses,
one at the latch block, and one at the loop exit). For double
@@ -4462,7 +4456,7 @@ vect_finalize_reduction:
FOR_EACH_IMM_USE_FAST (use_p, imm_iter, scalar_dest)
{
if (!flow_bb_inside_loop_p (loop, gimple_bb (USE_STMT (use_p))))
- VEC_safe_push (gimple, heap, phis, USE_STMT (use_p));
+ phis.safe_push (USE_STMT (use_p));
else
{
if (double_reduc && gimple_code (USE_STMT (use_p)) == GIMPLE_PHI)
@@ -4473,28 +4467,27 @@ vect_finalize_reduction:
{
if (!flow_bb_inside_loop_p (loop,
gimple_bb (USE_STMT (phi_use_p))))
- VEC_safe_push (gimple, heap, phis,
- USE_STMT (phi_use_p));
+ phis.safe_push (USE_STMT (phi_use_p));
}
}
}
}
- FOR_EACH_VEC_ELT (gimple, phis, i, exit_phi)
+ FOR_EACH_VEC_ELT (phis, i, exit_phi)
{
/* Replace the uses: */
orig_name = PHI_RESULT (exit_phi);
- scalar_result = VEC_index (tree, scalar_results, k);
+ scalar_result = scalar_results[k];
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, orig_name)
FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
SET_USE (use_p, scalar_result);
}
- VEC_free (gimple, heap, phis);
+ phis.release ();
}
- VEC_free (tree, heap, scalar_results);
- VEC_free (gimple, heap, new_phis);
+ scalar_results.release ();
+ new_phis.release ();
}
@@ -4582,8 +4575,10 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
struct loop * def_stmt_loop, *outer_loop = NULL;
tree def_arg;
gimple def_arg_stmt;
- VEC (tree, heap) *vec_oprnds0 = NULL, *vec_oprnds1 = NULL, *vect_defs = NULL;
- VEC (gimple, heap) *phis = NULL;
+ vec<tree> vec_oprnds0 = vec<tree>();
+ vec<tree> vec_oprnds1 = vec<tree>();
+ vec<tree> vect_defs = vec<tree>();
+ vec<gimple> phis = vec<gimple>();
int vec_num;
tree def0, def1, tem, op0, op1 = NULL_TREE;
@@ -5018,15 +5013,15 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
else
{
vec_num = 1;
- vec_oprnds0 = VEC_alloc (tree, heap, 1);
+ vec_oprnds0.create (1);
if (op_type == ternary_op)
- vec_oprnds1 = VEC_alloc (tree, heap, 1);
+ vec_oprnds1.create (1);
}
- phis = VEC_alloc (gimple, heap, vec_num);
- vect_defs = VEC_alloc (tree, heap, vec_num);
+ phis.create (vec_num);
+ vect_defs.create (vec_num);
if (!slp_node)
- VEC_quick_push (tree, vect_defs, NULL_TREE);
+ vect_defs.quick_push (NULL_TREE);
for (j = 0; j < ncopies; j++)
{
@@ -5041,7 +5036,7 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
new_stmt_vec_info (new_phi, loop_vinfo,
NULL));
if (j == 0 || slp_node)
- VEC_quick_push (gimple, phis, new_phi);
+ phis.quick_push (new_phi);
}
}
@@ -5049,7 +5044,7 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
{
gcc_assert (!slp_node);
vectorizable_condition (stmt, gsi, vec_stmt,
- PHI_RESULT (VEC_index (gimple, phis, 0)),
+ PHI_RESULT (phis[0]),
reduc_index, NULL);
/* Multiple types are not supported for condition. */
break;
@@ -5074,12 +5069,12 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
{
loop_vec_def0 = vect_get_vec_def_for_operand (ops[!reduc_index],
stmt, NULL);
- VEC_quick_push (tree, vec_oprnds0, loop_vec_def0);
+ vec_oprnds0.quick_push (loop_vec_def0);
if (op_type == ternary_op)
{
loop_vec_def1 = vect_get_vec_def_for_operand (op1, stmt,
NULL);
- VEC_quick_push (tree, vec_oprnds1, loop_vec_def1);
+ vec_oprnds1.quick_push (loop_vec_def1);
}
}
}
@@ -5095,14 +5090,14 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
&dummy_stmt, &dummy, &dt);
loop_vec_def0 = vect_get_vec_def_for_stmt_copy (dt,
loop_vec_def0);
- VEC_replace (tree, vec_oprnds0, 0, loop_vec_def0);
+ vec_oprnds0[0] = loop_vec_def0;
if (op_type == ternary_op)
{
vect_is_simple_use (op1, stmt, loop_vinfo, NULL, &dummy_stmt,
&dummy, &dt);
loop_vec_def1 = vect_get_vec_def_for_stmt_copy (dt,
loop_vec_def1);
- VEC_replace (tree, vec_oprnds1, 0, loop_vec_def1);
+ vec_oprnds1[0] = loop_vec_def1;
}
}
@@ -5112,10 +5107,10 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
STMT_VINFO_RELATED_STMT (prev_phi_info) = new_phi;
}
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, def0)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, def0)
{
if (slp_node)
- reduc_def = PHI_RESULT (VEC_index (gimple, phis, i));
+ reduc_def = PHI_RESULT (phis[i]);
else
{
if (!single_defuse_cycle || j == 0)
@@ -5123,7 +5118,7 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
}
def1 = ((op_type == ternary_op)
- ? VEC_index (tree, vec_oprnds1, i) : NULL);
+ ? vec_oprnds1[i] : NULL);
if (op_type == binary_op)
{
if (reduc_index == 0)
@@ -5151,11 +5146,11 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
if (slp_node)
{
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
- VEC_quick_push (tree, vect_defs, new_temp);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
+ vect_defs.quick_push (new_temp);
}
else
- VEC_replace (tree, vect_defs, 0, new_temp);
+ vect_defs[0] = new_temp;
}
if (slp_node)
@@ -5175,17 +5170,16 @@ vectorizable_reduction (gimple stmt, gimple_stmt_iterator *gsi,
if ((!single_defuse_cycle || code == COND_EXPR) && !slp_node)
{
new_temp = gimple_assign_lhs (*vec_stmt);
- VEC_replace (tree, vect_defs, 0, new_temp);
+ vect_defs[0] = new_temp;
}
vect_create_epilog_for_reduction (vect_defs, stmt, epilog_copies,
epilog_reduc_code, phis, reduc_index,
double_reduc, slp_node);
- VEC_free (gimple, heap, phis);
- VEC_free (tree, heap, vec_oprnds0);
- if (vec_oprnds1)
- VEC_free (tree, heap, vec_oprnds1);
+ phis.release ();
+ vec_oprnds0.release ();
+ vec_oprnds1.release ();
return true;
}
diff --git a/gcc/tree-vect-patterns.c b/gcc/tree-vect-patterns.c
index dea3595eb34..4b30ab2395c 100644
--- a/gcc/tree-vect-patterns.c
+++ b/gcc/tree-vect-patterns.c
@@ -40,24 +40,24 @@ along with GCC; see the file COPYING3. If not see
#include "dumpfile.h"
/* Pattern recognition functions */
-static gimple vect_recog_widen_sum_pattern (VEC (gimple, heap) **, tree *,
+static gimple vect_recog_widen_sum_pattern (vec<gimple> *, tree *,
tree *);
-static gimple vect_recog_widen_mult_pattern (VEC (gimple, heap) **, tree *,
+static gimple vect_recog_widen_mult_pattern (vec<gimple> *, tree *,
tree *);
-static gimple vect_recog_dot_prod_pattern (VEC (gimple, heap) **, tree *,
+static gimple vect_recog_dot_prod_pattern (vec<gimple> *, tree *,
tree *);
-static gimple vect_recog_pow_pattern (VEC (gimple, heap) **, tree *, tree *);
-static gimple vect_recog_over_widening_pattern (VEC (gimple, heap) **, tree *,
+static gimple vect_recog_pow_pattern (vec<gimple> *, tree *, tree *);
+static gimple vect_recog_over_widening_pattern (vec<gimple> *, tree *,
tree *);
-static gimple vect_recog_widen_shift_pattern (VEC (gimple, heap) **,
+static gimple vect_recog_widen_shift_pattern (vec<gimple> *,
tree *, tree *);
-static gimple vect_recog_vector_vector_shift_pattern (VEC (gimple, heap) **,
+static gimple vect_recog_vector_vector_shift_pattern (vec<gimple> *,
tree *, tree *);
-static gimple vect_recog_divmod_pattern (VEC (gimple, heap) **,
+static gimple vect_recog_divmod_pattern (vec<gimple> *,
tree *, tree *);
-static gimple vect_recog_mixed_size_cond_pattern (VEC (gimple, heap) **,
+static gimple vect_recog_mixed_size_cond_pattern (vec<gimple> *,
tree *, tree *);
-static gimple vect_recog_bool_pattern (VEC (gimple, heap) **, tree *, tree *);
+static gimple vect_recog_bool_pattern (vec<gimple> *, tree *, tree *);
static vect_recog_func_ptr vect_vect_recog_func_ptrs[NUM_PATTERNS] = {
vect_recog_widen_mult_pattern,
vect_recog_widen_sum_pattern,
@@ -255,10 +255,10 @@ vect_recog_temp_ssa_var (tree type, gimple stmt)
inner-loop nested in an outer-loop that us being vectorized). */
static gimple
-vect_recog_dot_prod_pattern (VEC (gimple, heap) **stmts, tree *type_in,
+vect_recog_dot_prod_pattern (vec<gimple> *stmts, tree *type_in,
tree *type_out)
{
- gimple stmt, last_stmt = VEC_index (gimple, *stmts, 0);
+ gimple stmt, last_stmt = (*stmts)[0];
tree oprnd0, oprnd1;
tree oprnd00, oprnd01;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
@@ -446,7 +446,7 @@ vect_recog_dot_prod_pattern (VEC (gimple, heap) **stmts, tree *type_in,
static bool
vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
tree const_oprnd, tree *oprnd,
- VEC (gimple, heap) **stmts, tree type,
+ vec<gimple> *stmts, tree type,
tree *half_type, gimple def_stmt)
{
tree new_type, new_oprnd;
@@ -491,7 +491,7 @@ vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
|| TREE_TYPE (gimple_assign_lhs (new_stmt)) != new_type)
return false;
- VEC_safe_push (gimple, heap, *stmts, def_stmt);
+ stmts->safe_push (def_stmt);
*oprnd = gimple_assign_lhs (new_stmt);
}
else
@@ -502,7 +502,7 @@ vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
new_stmt = gimple_build_assign_with_ops (NOP_EXPR, new_oprnd, *oprnd,
NULL_TREE);
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (def_stmt)) = new_stmt;
- VEC_safe_push (gimple, heap, *stmts, def_stmt);
+ stmts->safe_push (def_stmt);
*oprnd = new_oprnd;
}
@@ -585,10 +585,10 @@ vect_handle_widen_op_by_const (gimple stmt, enum tree_code code,
*/
static gimple
-vect_recog_widen_mult_pattern (VEC (gimple, heap) **stmts,
+vect_recog_widen_mult_pattern (vec<gimple> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = VEC_pop (gimple, *stmts);
+ gimple last_stmt = stmts->pop ();
gimple def_stmt0, def_stmt1;
tree oprnd0, oprnd1;
tree type, half_type0, half_type1;
@@ -597,7 +597,7 @@ vect_recog_widen_mult_pattern (VEC (gimple, heap) **stmts,
tree var;
enum tree_code dummy_code;
int dummy_int;
- VEC (tree, heap) *dummy_vec;
+ vec<tree> dummy_vec;
bool op1_ok;
bool promotion;
@@ -702,7 +702,7 @@ vect_recog_widen_mult_pattern (VEC (gimple, heap) **stmts,
if (dump_enabled_p ())
dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt, 0);
- VEC_safe_push (gimple, heap, *stmts, last_stmt);
+ stmts->safe_push (last_stmt);
return pattern_stmt;
}
@@ -734,10 +734,10 @@ vect_recog_widen_mult_pattern (VEC (gimple, heap) **stmts,
*/
static gimple
-vect_recog_pow_pattern (VEC (gimple, heap) **stmts, tree *type_in,
+vect_recog_pow_pattern (vec<gimple> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = VEC_index (gimple, *stmts, 0);
+ gimple last_stmt = (*stmts)[0];
tree fn, base, exp = NULL;
gimple stmt;
tree var;
@@ -847,10 +847,10 @@ vect_recog_pow_pattern (VEC (gimple, heap) **stmts, tree *type_in,
inner-loop nested in an outer-loop that us being vectorized). */
static gimple
-vect_recog_widen_sum_pattern (VEC (gimple, heap) **stmts, tree *type_in,
+vect_recog_widen_sum_pattern (vec<gimple> *stmts, tree *type_in,
tree *type_out)
{
- gimple stmt, last_stmt = VEC_index (gimple, *stmts, 0);
+ gimple stmt, last_stmt = (*stmts)[0];
tree oprnd0, oprnd1;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
tree type, half_type;
@@ -958,7 +958,7 @@ vect_recog_widen_sum_pattern (VEC (gimple, heap) **stmts, tree *type_in,
static bool
vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
tree *op0, tree *op1, gimple *new_def_stmt,
- VEC (gimple, heap) **stmts)
+ vec<gimple> *stmts)
{
enum tree_code code;
tree const_oprnd, oprnd;
@@ -1096,7 +1096,7 @@ vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
|| TREE_TYPE (gimple_assign_lhs (new_stmt)) != interm_type)
return false;
- VEC_safe_push (gimple, heap, *stmts, def_stmt);
+ stmts->safe_push (def_stmt);
oprnd = gimple_assign_lhs (new_stmt);
}
else
@@ -1107,7 +1107,7 @@ vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
new_stmt = gimple_build_assign_with_ops (NOP_EXPR, new_oprnd,
oprnd, NULL_TREE);
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (def_stmt)) = new_stmt;
- VEC_safe_push (gimple, heap, *stmts, def_stmt);
+ stmts->safe_push (def_stmt);
oprnd = new_oprnd;
}
}
@@ -1163,10 +1163,10 @@ vect_operation_fits_smaller_type (gimple stmt, tree def, tree *new_type,
demotion operation. We also check that S3 and S4 have only one use. */
static gimple
-vect_recog_over_widening_pattern (VEC (gimple, heap) **stmts,
+vect_recog_over_widening_pattern (vec<gimple> *stmts,
tree *type_in, tree *type_out)
{
- gimple stmt = VEC_pop (gimple, *stmts);
+ gimple stmt = stmts->pop ();
gimple pattern_stmt = NULL, new_def_stmt, prev_stmt = NULL, use_stmt = NULL;
tree op0, op1, vectype = NULL_TREE, use_lhs, use_type;
tree var = NULL_TREE, new_type = NULL_TREE, new_oprnd;
@@ -1208,7 +1208,7 @@ vect_recog_over_widening_pattern (VEC (gimple, heap) **stmts,
in the sequence. Therefore, we only add the original statement to
the list if we know that it is not the last. */
if (prev_stmt)
- VEC_safe_push (gimple, heap, *stmts, prev_stmt);
+ stmts->safe_push (prev_stmt);
var = vect_recog_temp_ssa_var (new_type, NULL);
pattern_stmt
@@ -1266,7 +1266,7 @@ vect_recog_over_widening_pattern (VEC (gimple, heap) **stmts,
statement created for PREV_STMT. Therefore, we add PREV_STMT
to the list in order to mark it later in vect_pattern_recog_1. */
if (prev_stmt)
- VEC_safe_push (gimple, heap, *stmts, prev_stmt);
+ stmts->safe_push (prev_stmt);
}
else
{
@@ -1278,7 +1278,7 @@ vect_recog_over_widening_pattern (VEC (gimple, heap) **stmts,
*type_out = NULL_TREE;
}
- VEC_safe_push (gimple, heap, *stmts, use_stmt);
+ stmts->safe_push (use_stmt);
}
else
/* TODO: support general case, create a conversion to the correct type. */
@@ -1353,10 +1353,10 @@ vect_recog_over_widening_pattern (VEC (gimple, heap) **stmts,
WIDEN_LSHIFT_EXPR <a_t, CONST>. */
static gimple
-vect_recog_widen_shift_pattern (VEC (gimple, heap) **stmts,
+vect_recog_widen_shift_pattern (vec<gimple> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = VEC_pop (gimple, *stmts);
+ gimple last_stmt = stmts->pop ();
gimple def_stmt0;
tree oprnd0, oprnd1;
tree type, half_type0;
@@ -1365,7 +1365,7 @@ vect_recog_widen_shift_pattern (VEC (gimple, heap) **stmts,
tree var;
enum tree_code dummy_code;
int dummy_int;
- VEC (tree, heap) * dummy_vec;
+ vec<tree> dummy_vec;
gimple use_stmt;
bool promotion;
@@ -1448,7 +1448,7 @@ vect_recog_widen_shift_pattern (VEC (gimple, heap) **stmts,
if (dump_enabled_p ())
dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt, 0);
- VEC_safe_push (gimple, heap, *stmts, last_stmt);
+ stmts->safe_push (last_stmt);
return pattern_stmt;
}
@@ -1493,10 +1493,10 @@ vect_recog_widen_shift_pattern (VEC (gimple, heap) **stmts,
S3 stmt. */
static gimple
-vect_recog_vector_vector_shift_pattern (VEC (gimple, heap) **stmts,
+vect_recog_vector_vector_shift_pattern (vec<gimple> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = VEC_pop (gimple, *stmts);
+ gimple last_stmt = stmts->pop ();
tree oprnd0, oprnd1, lhs, var;
gimple pattern_stmt, def_stmt;
enum tree_code rhs_code;
@@ -1578,7 +1578,7 @@ vect_recog_vector_vector_shift_pattern (VEC (gimple, heap) **stmts,
if (dump_enabled_p ())
dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt, 0);
- VEC_safe_push (gimple, heap, *stmts, last_stmt);
+ stmts->safe_push (last_stmt);
return pattern_stmt;
}
@@ -1623,10 +1623,10 @@ vect_recog_vector_vector_shift_pattern (VEC (gimple, heap) **stmts,
S1 or modulo S4 stmt. */
static gimple
-vect_recog_divmod_pattern (VEC (gimple, heap) **stmts,
+vect_recog_divmod_pattern (vec<gimple> *stmts,
tree *type_in, tree *type_out)
{
- gimple last_stmt = VEC_pop (gimple, *stmts);
+ gimple last_stmt = stmts->pop ();
tree oprnd0, oprnd1, vectype, itype, cond;
gimple pattern_stmt, def_stmt;
enum tree_code rhs_code;
@@ -1793,7 +1793,7 @@ vect_recog_divmod_pattern (VEC (gimple, heap) **stmts,
dump_gimple_stmt_loc (MSG_NOTE, vect_location, TDF_SLIM, pattern_stmt,
0);
- VEC_safe_push (gimple, heap, *stmts, last_stmt);
+ stmts->safe_push (last_stmt);
*type_in = vectype;
*type_out = vectype;
@@ -2038,7 +2038,7 @@ vect_recog_divmod_pattern (VEC (gimple, heap) **stmts,
dump_gimple_stmt (MSG_OPTIMIZED_LOCATIONS, TDF_SLIM, pattern_stmt, 0);
}
- VEC_safe_push (gimple, heap, *stmts, last_stmt);
+ stmts->safe_push (last_stmt);
*type_in = vectype;
*type_out = vectype;
@@ -2076,10 +2076,10 @@ vect_recog_divmod_pattern (VEC (gimple, heap) **stmts,
a_T = (TYPE) a_it; */
static gimple
-vect_recog_mixed_size_cond_pattern (VEC (gimple, heap) **stmts, tree *type_in,
+vect_recog_mixed_size_cond_pattern (vec<gimple> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = VEC_index (gimple, *stmts, 0);
+ gimple last_stmt = (*stmts)[0];
tree cond_expr, then_clause, else_clause;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt), def_stmt_info;
tree type, vectype, comp_vectype, itype = NULL_TREE, vecitype;
@@ -2321,7 +2321,7 @@ adjust_bool_pattern_cast (tree type, tree var)
static tree
adjust_bool_pattern (tree var, tree out_type, tree trueval,
- VEC (gimple, heap) **stmts)
+ vec<gimple> *stmts)
{
gimple stmt = SSA_NAME_DEF_STMT (var);
enum tree_code rhs_code, def_rhs_code;
@@ -2400,9 +2400,9 @@ adjust_bool_pattern (tree var, tree out_type, tree trueval,
gimple tstmt;
stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
irhs2 = adjust_bool_pattern (rhs2, out_type, irhs1, stmts);
- tstmt = VEC_pop (gimple, *stmts);
+ tstmt = stmts->pop ();
gcc_assert (tstmt == def_stmt);
- VEC_quick_push (gimple, *stmts, stmt);
+ stmts->quick_push (stmt);
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt))
= STMT_VINFO_RELATED_STMT (stmt_def_vinfo);
gcc_assert (!STMT_VINFO_PATTERN_DEF_SEQ (stmt_def_vinfo));
@@ -2425,9 +2425,9 @@ adjust_bool_pattern (tree var, tree out_type, tree trueval,
gimple tstmt;
stmt_vec_info stmt_def_vinfo = vinfo_for_stmt (def_stmt);
irhs1 = adjust_bool_pattern (rhs1, out_type, irhs2, stmts);
- tstmt = VEC_pop (gimple, *stmts);
+ tstmt = stmts->pop ();
gcc_assert (tstmt == def_stmt);
- VEC_quick_push (gimple, *stmts, stmt);
+ stmts->quick_push (stmt);
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt))
= STMT_VINFO_RELATED_STMT (stmt_def_vinfo);
gcc_assert (!STMT_VINFO_PATTERN_DEF_SEQ (stmt_def_vinfo));
@@ -2494,7 +2494,7 @@ adjust_bool_pattern (tree var, tree out_type, tree trueval,
break;
}
- VEC_safe_push (gimple, heap, *stmts, stmt);
+ stmts->safe_push (stmt);
gimple_set_location (pattern_stmt, loc);
STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt)) = pattern_stmt;
return gimple_assign_lhs (pattern_stmt);
@@ -2546,10 +2546,10 @@ adjust_bool_pattern (tree var, tree out_type, tree trueval,
but the above is more efficient. */
static gimple
-vect_recog_bool_pattern (VEC (gimple, heap) **stmts, tree *type_in,
+vect_recog_bool_pattern (vec<gimple> *stmts, tree *type_in,
tree *type_out)
{
- gimple last_stmt = VEC_pop (gimple, *stmts);
+ gimple last_stmt = stmts->pop ();
enum tree_code rhs_code;
tree var, lhs, rhs, vectype;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (last_stmt);
@@ -2591,7 +2591,7 @@ vect_recog_bool_pattern (VEC (gimple, heap) **stmts, tree *type_in,
= gimple_build_assign_with_ops (NOP_EXPR, lhs, rhs, NULL_TREE);
*type_out = vectype;
*type_in = vectype;
- VEC_safe_push (gimple, heap, *stmts, last_stmt);
+ stmts->safe_push (last_stmt);
if (dump_enabled_p ())
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
"vect_recog_bool_pattern: detected: ");
@@ -2637,7 +2637,7 @@ vect_recog_bool_pattern (VEC (gimple, heap) **stmts, tree *type_in,
DR_STMT (STMT_VINFO_DATA_REF (stmt_vinfo)) = pattern_stmt;
*type_out = vectype;
*type_in = vectype;
- VEC_safe_push (gimple, heap, *stmts, last_stmt);
+ stmts->safe_push (last_stmt);
if (dump_enabled_p ())
dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, vect_location,
"vect_recog_bool_pattern: detected: ");
@@ -2726,7 +2726,7 @@ vect_mark_pattern_stmts (gimple orig_stmt, gimple pattern_stmt,
static void
vect_pattern_recog_1 (vect_recog_func_ptr vect_recog_func,
gimple_stmt_iterator si,
- VEC (gimple, heap) **stmts_to_replace)
+ vec<gimple> *stmts_to_replace)
{
gimple stmt = gsi_stmt (si), pattern_stmt;
stmt_vec_info stmt_info;
@@ -2737,13 +2737,13 @@ vect_pattern_recog_1 (vect_recog_func_ptr vect_recog_func,
int i;
gimple next;
- VEC_truncate (gimple, *stmts_to_replace, 0);
- VEC_quick_push (gimple, *stmts_to_replace, stmt);
+ stmts_to_replace->truncate (0);
+ stmts_to_replace->quick_push (stmt);
pattern_stmt = (* vect_recog_func) (stmts_to_replace, &type_in, &type_out);
if (!pattern_stmt)
return;
- stmt = VEC_last (gimple, *stmts_to_replace);
+ stmt = stmts_to_replace->last ();
stmt_info = vinfo_for_stmt (stmt);
loop_vinfo = STMT_VINFO_LOOP_VINFO (stmt_info);
@@ -2801,15 +2801,15 @@ vect_pattern_recog_1 (vect_recog_func_ptr vect_recog_func,
/* Patterns cannot be vectorized using SLP, because they change the order of
computation. */
if (loop_vinfo)
- FOR_EACH_VEC_ELT (gimple, LOOP_VINFO_REDUCTIONS (loop_vinfo), i, next)
+ FOR_EACH_VEC_ELT (LOOP_VINFO_REDUCTIONS (loop_vinfo), i, next)
if (next == stmt)
- VEC_ordered_remove (gimple, LOOP_VINFO_REDUCTIONS (loop_vinfo), i);
+ LOOP_VINFO_REDUCTIONS (loop_vinfo).ordered_remove (i);
/* It is possible that additional pattern stmts are created and inserted in
STMTS_TO_REPLACE. We create a stmt_info for each of them, and mark the
relevant statements. */
- for (i = 0; VEC_iterate (gimple, *stmts_to_replace, i, stmt)
- && (unsigned) i < (VEC_length (gimple, *stmts_to_replace) - 1);
+ for (i = 0; stmts_to_replace->iterate (i, &stmt)
+ && (unsigned) i < (stmts_to_replace->length () - 1);
i++)
{
stmt_info = vinfo_for_stmt (stmt);
@@ -2912,7 +2912,8 @@ vect_pattern_recog (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
gimple_stmt_iterator si;
unsigned int i, j;
vect_recog_func_ptr vect_recog_func;
- VEC (gimple, heap) *stmts_to_replace = VEC_alloc (gimple, heap, 1);
+ vec<gimple> stmts_to_replace;
+ stmts_to_replace.create (1);
gimple stmt;
if (dump_enabled_p ())
@@ -2953,5 +2954,5 @@ vect_pattern_recog (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
}
}
- VEC_free (gimple, heap, stmts_to_replace);
+ stmts_to_replace.release ();
}
diff --git a/gcc/tree-vect-slp.c b/gcc/tree-vect-slp.c
index 7dd5e93ba15..e6f3da9bdfb 100644
--- a/gcc/tree-vect-slp.c
+++ b/gcc/tree-vect-slp.c
@@ -73,14 +73,12 @@ vect_free_slp_tree (slp_tree node)
if (!node)
return;
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_free_slp_tree ((slp_tree) child);
- VEC_free (slp_void_p, heap, SLP_TREE_CHILDREN (node));
- VEC_free (gimple, heap, SLP_TREE_SCALAR_STMTS (node));
-
- if (SLP_TREE_VEC_STMTS (node))
- VEC_free (gimple, heap, SLP_TREE_VEC_STMTS (node));
+ SLP_TREE_CHILDREN (node).release ();
+ SLP_TREE_SCALAR_STMTS (node).release ();
+ SLP_TREE_VEC_STMTS (node).release ();
free (node);
}
@@ -92,9 +90,9 @@ void
vect_free_slp_instance (slp_instance instance)
{
vect_free_slp_tree (SLP_INSTANCE_TREE (instance));
- VEC_free (int, heap, SLP_INSTANCE_LOAD_PERMUTATION (instance));
- VEC_free (slp_tree, heap, SLP_INSTANCE_LOADS (instance));
- VEC_free (stmt_info_for_cost, heap, SLP_INSTANCE_BODY_COST_VEC (instance));
+ SLP_INSTANCE_LOAD_PERMUTATION (instance).release ();
+ SLP_INSTANCE_LOADS (instance).release ();
+ SLP_INSTANCE_BODY_COST_VEC (instance).release ();
free (instance);
}
@@ -102,10 +100,10 @@ vect_free_slp_instance (slp_instance instance)
/* Create an SLP node for SCALAR_STMTS. */
static slp_tree
-vect_create_new_slp_node (VEC (gimple, heap) *scalar_stmts)
+vect_create_new_slp_node (vec<gimple> scalar_stmts)
{
slp_tree node;
- gimple stmt = VEC_index (gimple, scalar_stmts, 0);
+ gimple stmt = scalar_stmts[0];
unsigned int nops;
if (is_gimple_call (stmt))
@@ -121,8 +119,8 @@ vect_create_new_slp_node (VEC (gimple, heap) *scalar_stmts)
node = XNEW (struct _slp_tree);
SLP_TREE_SCALAR_STMTS (node) = scalar_stmts;
- SLP_TREE_VEC_STMTS (node) = NULL;
- SLP_TREE_CHILDREN (node) = VEC_alloc (slp_void_p, heap, nops);
+ SLP_TREE_VEC_STMTS (node).create (0);
+ SLP_TREE_CHILDREN (node).create (nops);
return node;
}
@@ -130,23 +128,23 @@ vect_create_new_slp_node (VEC (gimple, heap) *scalar_stmts)
/* Allocate operands info for NOPS operands, and GROUP_SIZE def-stmts for each
operand. */
-static VEC (slp_oprnd_info, heap) *
+static vec<slp_oprnd_info>
vect_create_oprnd_info (int nops, int group_size)
{
int i;
slp_oprnd_info oprnd_info;
- VEC (slp_oprnd_info, heap) *oprnds_info;
+ vec<slp_oprnd_info> oprnds_info;
- oprnds_info = VEC_alloc (slp_oprnd_info, heap, nops);
+ oprnds_info.create (nops);
for (i = 0; i < nops; i++)
{
oprnd_info = XNEW (struct _slp_oprnd_info);
- oprnd_info->def_stmts = VEC_alloc (gimple, heap, group_size);
+ oprnd_info->def_stmts.create (group_size);
oprnd_info->first_dt = vect_uninitialized_def;
oprnd_info->first_def_type = NULL_TREE;
oprnd_info->first_const_oprnd = NULL_TREE;
oprnd_info->first_pattern = false;
- VEC_quick_push (slp_oprnd_info, oprnds_info, oprnd_info);
+ oprnds_info.quick_push (oprnd_info);
}
return oprnds_info;
@@ -156,18 +154,18 @@ vect_create_oprnd_info (int nops, int group_size)
/* Free operands info. */
static void
-vect_free_oprnd_info (VEC (slp_oprnd_info, heap) **oprnds_info)
+vect_free_oprnd_info (vec<slp_oprnd_info> &oprnds_info)
{
int i;
slp_oprnd_info oprnd_info;
- FOR_EACH_VEC_ELT (slp_oprnd_info, *oprnds_info, i, oprnd_info)
+ FOR_EACH_VEC_ELT (oprnds_info, i, oprnd_info)
{
- VEC_free (gimple, heap, oprnd_info->def_stmts);
+ oprnd_info->def_stmts.release ();
XDELETE (oprnd_info);
}
- VEC_free (slp_oprnd_info, heap, *oprnds_info);
+ oprnds_info.release ();
}
@@ -179,7 +177,7 @@ static bool
vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
slp_tree slp_node, gimple stmt,
int ncopies_for_cost, bool first,
- VEC (slp_oprnd_info, heap) **oprnds_info,
+ vec<slp_oprnd_info> *oprnds_info,
stmt_vector_for_cost *prologue_cost_vec,
stmt_vector_for_cost *body_cost_vec)
{
@@ -226,7 +224,7 @@ vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
else
oprnd = gimple_op (stmt, op_idx++);
- oprnd_info = VEC_index (slp_oprnd_info, *oprnds_info, i);
+ oprnd_info = (*oprnds_info)[i];
if (COMPARISON_CLASS_P (oprnd))
{
@@ -373,7 +371,7 @@ vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
different_types = true;
else
{
- oprnd0_info = VEC_index (slp_oprnd_info, *oprnds_info, 0);
+ oprnd0_info = (*oprnds_info)[0];
if (is_gimple_assign (stmt)
&& (rhs_code = gimple_assign_rhs_code (stmt))
&& TREE_CODE_CLASS (rhs_code) == tcc_binary
@@ -421,15 +419,15 @@ vect_get_and_check_slp_defs (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
case vect_internal_def:
if (different_types)
{
- oprnd0_info = VEC_index (slp_oprnd_info, *oprnds_info, 0);
- oprnd1_info = VEC_index (slp_oprnd_info, *oprnds_info, 0);
+ oprnd0_info = (*oprnds_info)[0];
+ oprnd1_info = (*oprnds_info)[0];
if (i == 0)
- VEC_quick_push (gimple, oprnd1_info->def_stmts, def_stmt);
+ oprnd1_info->def_stmts.quick_push (def_stmt);
else
- VEC_quick_push (gimple, oprnd0_info->def_stmts, def_stmt);
+ oprnd0_info->def_stmts.quick_push (def_stmt);
}
else
- VEC_quick_push (gimple, oprnd_info->def_stmts, def_stmt);
+ oprnd_info->def_stmts.quick_push (def_stmt);
break;
@@ -459,15 +457,15 @@ static bool
vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
slp_tree *node, unsigned int group_size, int *outside_cost,
int ncopies_for_cost, unsigned int *max_nunits,
- VEC (int, heap) **load_permutation,
- VEC (slp_tree, heap) **loads,
+ vec<int> *load_permutation,
+ vec<slp_tree> *loads,
unsigned int vectorization_factor, bool *loads_permuted,
stmt_vector_for_cost *prologue_cost_vec,
stmt_vector_for_cost *body_cost_vec)
{
unsigned int i;
- VEC (gimple, heap) *stmts = SLP_TREE_SCALAR_STMTS (*node);
- gimple stmt = VEC_index (gimple, stmts, 0);
+ vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (*node);
+ gimple stmt = stmts[0];
enum tree_code first_stmt_code = ERROR_MARK, rhs_code = ERROR_MARK;
enum tree_code first_cond_code = ERROR_MARK;
tree lhs;
@@ -483,7 +481,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
bool permutation = false;
unsigned int load_place;
gimple first_load = NULL, prev_first_load = NULL, old_first_load = NULL;
- VEC (slp_oprnd_info, heap) *oprnds_info;
+ vec<slp_oprnd_info> oprnds_info;
unsigned int nops;
slp_oprnd_info oprnd_info;
tree cond;
@@ -502,7 +500,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
oprnds_info = vect_create_oprnd_info (nops, group_size);
/* For every stmt in NODE find its def stmt/s. */
- FOR_EACH_VEC_ELT (gimple, stmts, i, stmt)
+ FOR_EACH_VEC_ELT (stmts, i, stmt)
{
if (dump_enabled_p ())
{
@@ -520,7 +518,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -535,7 +533,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -552,7 +550,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -568,7 +566,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
scalar_type);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -598,7 +596,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
}
@@ -634,7 +632,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"Build SLP failed: no optab.");
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
icode = (int) optab_handler (optab, vec_mode);
@@ -644,7 +642,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"Build SLP failed: "
"op not supported by target.");
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
optab_op2_mode = insn_data[icode].operand[2].mode;
@@ -682,7 +680,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -697,13 +695,13 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
if (rhs_code == CALL_EXPR)
{
- gimple first_stmt = VEC_index (gimple, stmts, 0);
+ gimple first_stmt = stmts[0];
if (gimple_call_num_args (stmt) != nops
|| !operand_equal_p (gimple_call_fn (first_stmt),
gimple_call_fn (stmt), 0)
@@ -718,7 +716,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
}
@@ -736,7 +734,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
prologue_cost_vec,
body_cost_vec))
{
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
}
@@ -758,7 +756,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -777,7 +775,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -802,7 +800,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
}
@@ -827,7 +825,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -845,7 +843,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
if (load_place != i)
permutation = true;
- VEC_safe_push (int, heap, *load_permutation, load_place);
+ load_permutation->safe_push (load_place);
/* We stop the tree when we reach a group of loads. */
stop_recursion = true;
@@ -865,7 +863,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
}
/* FORNOW: Not grouped loads are not supported. */
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -883,7 +881,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
dump_gimple_stmt (MSG_MISSED_OPTIMIZATION, TDF_SLIM, stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
@@ -904,7 +902,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
stmt, 0);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
}
@@ -915,7 +913,7 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
&oprnds_info, prologue_cost_vec,
body_cost_vec))
{
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
}
@@ -924,10 +922,10 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
/* Grouped loads were reached - stop the recursion. */
if (stop_recursion)
{
- VEC_safe_push (slp_tree, heap, *loads, *node);
+ loads->safe_push (*node);
if (permutation)
{
- gimple first_stmt = VEC_index (gimple, stmts, 0);
+ gimple first_stmt = stmts[0];
*loads_permuted = true;
(void) record_stmt_cost (body_cost_vec, group_size, vec_perm,
vinfo_for_stmt (first_stmt), 0, vect_body);
@@ -941,12 +939,12 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
*loads_permuted = true;
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return true;
}
/* Create SLP_TREE nodes for the definition node/s. */
- FOR_EACH_VEC_ELT (slp_oprnd_info, oprnds_info, i, oprnd_info)
+ FOR_EACH_VEC_ELT (oprnds_info, i, oprnd_info)
{
slp_tree child;
@@ -962,17 +960,17 @@ vect_build_slp_tree (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
prologue_cost_vec, body_cost_vec))
{
if (child)
- oprnd_info->def_stmts = NULL;
+ oprnd_info->def_stmts = vec<gimple>();
vect_free_slp_tree (child);
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return false;
}
- oprnd_info->def_stmts = NULL;
- VEC_quick_push (slp_void_p, SLP_TREE_CHILDREN (*node), child);
+ oprnd_info->def_stmts.create (0);
+ SLP_TREE_CHILDREN (*node).quick_push (child);
}
- vect_free_oprnd_info (&oprnds_info);
+ vect_free_oprnd_info (oprnds_info);
return true;
}
@@ -989,14 +987,14 @@ vect_print_slp_tree (int dump_kind, slp_tree node)
return;
dump_printf (dump_kind, "node ");
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
{
dump_printf (dump_kind, "\n\tstmt %d ", i);
dump_gimple_stmt (dump_kind, TDF_SLIM, stmt, 0);
}
dump_printf (dump_kind, "\n");
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_print_slp_tree (dump_kind, (slp_tree) child);
}
@@ -1016,11 +1014,11 @@ vect_mark_slp_stmts (slp_tree node, enum slp_vect_type mark, int j)
if (!node)
return;
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
if (j < 0 || i == j)
STMT_SLP_TYPE (vinfo_for_stmt (stmt)) = mark;
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_mark_slp_stmts ((slp_tree) child, mark, j);
}
@@ -1038,7 +1036,7 @@ vect_mark_slp_stmts_relevant (slp_tree node)
if (!node)
return;
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
{
stmt_info = vinfo_for_stmt (stmt);
gcc_assert (!STMT_VINFO_RELEVANT (stmt_info)
@@ -1046,7 +1044,7 @@ vect_mark_slp_stmts_relevant (slp_tree node)
STMT_VINFO_RELEVANT (stmt_info) = vect_used_in_scope;
}
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_mark_slp_stmts_relevant ((slp_tree) child);
}
@@ -1057,10 +1055,10 @@ vect_mark_slp_stmts_relevant (slp_tree node)
static bool
vect_supported_slp_permutation_p (slp_instance instance)
{
- slp_tree node = VEC_index (slp_tree, SLP_INSTANCE_LOADS (instance), 0);
- gimple stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (node), 0);
+ slp_tree node = SLP_INSTANCE_LOADS (instance)[0];
+ gimple stmt = SLP_TREE_SCALAR_STMTS (node)[0];
gimple first_load = GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt));
- VEC (slp_tree, heap) *sorted_loads = NULL;
+ vec<slp_tree> sorted_loads = vec<slp_tree>();
int index;
slp_tree *tmp_loads = NULL;
int group_size = SLP_INSTANCE_GROUP_SIZE (instance), i, j;
@@ -1072,11 +1070,11 @@ vect_supported_slp_permutation_p (slp_instance instance)
Sort the nodes according to the order of accesses in the chain. */
tmp_loads = (slp_tree *) xmalloc (sizeof (slp_tree) * group_size);
for (i = 0, j = 0;
- VEC_iterate (int, SLP_INSTANCE_LOAD_PERMUTATION (instance), i, index)
- && VEC_iterate (slp_tree, SLP_INSTANCE_LOADS (instance), j, load);
+ SLP_INSTANCE_LOAD_PERMUTATION (instance).iterate (i, &index)
+ && SLP_INSTANCE_LOADS (instance).iterate (j, &load);
i += group_size, j++)
{
- gimple scalar_stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (load), 0);
+ gimple scalar_stmt = SLP_TREE_SCALAR_STMTS (load)[0];
/* Check that the loads are all in the same interleaving chain. */
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (scalar_stmt)) != first_load)
{
@@ -1096,15 +1094,15 @@ vect_supported_slp_permutation_p (slp_instance instance)
tmp_loads[index] = load;
}
- sorted_loads = VEC_alloc (slp_tree, heap, group_size);
+ sorted_loads.create (group_size);
for (i = 0; i < group_size; i++)
- VEC_safe_push (slp_tree, heap, sorted_loads, tmp_loads[i]);
+ sorted_loads.safe_push (tmp_loads[i]);
- VEC_free (slp_tree, heap, SLP_INSTANCE_LOADS (instance));
+ SLP_INSTANCE_LOADS (instance).release ();
SLP_INSTANCE_LOADS (instance) = sorted_loads;
free (tmp_loads);
- if (!vect_transform_slp_perm_load (stmt, NULL, NULL,
+ if (!vect_transform_slp_perm_load (stmt, vec<tree>(), NULL,
SLP_INSTANCE_UNROLLING_FACTOR (instance),
instance, true))
return false;
@@ -1117,32 +1115,32 @@ vect_supported_slp_permutation_p (slp_instance instance)
static void
vect_slp_rearrange_stmts (slp_tree node, unsigned int group_size,
- VEC (int, heap) *permutation)
+ vec<int> permutation)
{
gimple stmt;
- VEC (gimple, heap) *tmp_stmts;
+ vec<gimple> tmp_stmts;
unsigned int index, i;
slp_void_p child;
if (!node)
return;
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_slp_rearrange_stmts ((slp_tree) child, group_size, permutation);
- gcc_assert (group_size == VEC_length (gimple, SLP_TREE_SCALAR_STMTS (node)));
- tmp_stmts = VEC_alloc (gimple, heap, group_size);
+ gcc_assert (group_size == SLP_TREE_SCALAR_STMTS (node).length ());
+ tmp_stmts.create (group_size);
for (i = 0; i < group_size; i++)
- VEC_safe_push (gimple, heap, tmp_stmts, NULL);
+ tmp_stmts.safe_push (NULL);
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
{
- index = VEC_index (int, permutation, i);
- VEC_replace (gimple, tmp_stmts, index, stmt);
+ index = permutation[i];
+ tmp_stmts[index] = stmt;
}
- VEC_free (gimple, heap, SLP_TREE_SCALAR_STMTS (node));
+ SLP_TREE_SCALAR_STMTS (node).release ();
SLP_TREE_SCALAR_STMTS (node) = tmp_stmts;
}
@@ -1154,7 +1152,7 @@ vect_slp_rearrange_stmts (slp_tree node, unsigned int group_size,
static bool
vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
- VEC (int, heap) *load_permutation)
+ vec<int> load_permutation)
{
int i = 0, j, prev = -1, next, k, number_of_groups;
bool supported, bad_permutation = false;
@@ -1172,7 +1170,7 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
if (dump_enabled_p ())
{
dump_printf_loc (MSG_NOTE, vect_location, "Load permutation ");
- FOR_EACH_VEC_ELT (int, load_permutation, i, next)
+ FOR_EACH_VEC_ELT (load_permutation, i, next)
dump_printf (MSG_NOTE, "%d ", next);
}
@@ -1184,13 +1182,12 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
permutation). */
/* Check that all the load nodes are of the same size. */
- FOR_EACH_VEC_ELT (slp_tree, SLP_INSTANCE_LOADS (slp_instn), i, node)
+ FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (slp_instn), i, node)
{
- if (VEC_length (gimple, SLP_TREE_SCALAR_STMTS (node))
- != (unsigned) group_size)
+ if (SLP_TREE_SCALAR_STMTS (node).length () != (unsigned) group_size)
return false;
- stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (node), 0);
+ stmt = SLP_TREE_SCALAR_STMTS (node)[0];
if (is_gimple_assign (stmt)
&& (gimple_assign_rhs_code (stmt) == REALPART_EXPR
|| gimple_assign_rhs_code (stmt) == IMAGPART_EXPR))
@@ -1205,9 +1202,9 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
chains are mixed, they match the above pattern. */
if (complex_numbers)
{
- FOR_EACH_VEC_ELT (slp_tree, SLP_INSTANCE_LOADS (slp_instn), i, node)
+ FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (slp_instn), i, node)
{
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), j, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), j, stmt)
{
if (j == 0)
first = stmt;
@@ -1223,10 +1220,9 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
else
k = 0;
- other_complex_node = VEC_index (slp_tree,
- SLP_INSTANCE_LOADS (slp_instn), k);
- other_node_first = VEC_index (gimple,
- SLP_TREE_SCALAR_STMTS (other_complex_node), 0);
+ other_complex_node = SLP_INSTANCE_LOADS (slp_instn)[k];
+ other_node_first =
+ SLP_TREE_SCALAR_STMTS (other_complex_node)[0];
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt))
!= other_node_first)
@@ -1240,20 +1236,20 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
/* We checked that this case ok, so there is no need to proceed with
permutation tests. */
if (complex_numbers == 2
- && VEC_length (slp_tree, SLP_INSTANCE_LOADS (slp_instn)) == 2)
+ && SLP_INSTANCE_LOADS (slp_instn).length () == 2)
{
- VEC_free (slp_tree, heap, SLP_INSTANCE_LOADS (slp_instn));
- VEC_free (int, heap, SLP_INSTANCE_LOAD_PERMUTATION (slp_instn));
+ SLP_INSTANCE_LOADS (slp_instn).release ();
+ SLP_INSTANCE_LOAD_PERMUTATION (slp_instn).release ();
return true;
}
node = SLP_INSTANCE_TREE (slp_instn);
- stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (node), 0);
+ stmt = SLP_TREE_SCALAR_STMTS (node)[0];
/* LOAD_PERMUTATION is a list of indices of all the loads of the SLP
instance, not all the loads belong to the same node or interleaving
group. Hence, we need to divide them into groups according to
GROUP_SIZE. */
- number_of_groups = VEC_length (int, load_permutation) / group_size;
+ number_of_groups = load_permutation.length () / group_size;
/* Reduction (there are no data-refs in the root).
In reduction chain the order of the loads is important. */
@@ -1268,8 +1264,8 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
k = 0;
for (j = i * group_size; j < i * group_size + group_size; j++)
{
- next = VEC_index (int, load_permutation, j);
- first_group_load_index = VEC_index (int, load_permutation, k);
+ next = load_permutation[j];
+ first_group_load_index = load_permutation[k];
if (next != first_group_load_index)
{
@@ -1292,7 +1288,7 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
bitmap_clear (load_index);
for (k = 0; k < group_size; k++)
{
- first_group_load_index = VEC_index (int, load_permutation, k);
+ first_group_load_index = load_permutation[k];
if (bitmap_bit_p (load_index, first_group_load_index))
{
bad_permutation = true;
@@ -1321,7 +1317,7 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
according to the order of the loads. */
vect_slp_rearrange_stmts (SLP_INSTANCE_TREE (slp_instn), group_size,
load_permutation);
- VEC_free (int, heap, SLP_INSTANCE_LOAD_PERMUTATION (slp_instn));
+ SLP_INSTANCE_LOAD_PERMUTATION (slp_instn).release ();
return true;
}
}
@@ -1334,11 +1330,11 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
/* Check that for every node in the instance the loads form a subchain. */
if (bb_vinfo)
{
- FOR_EACH_VEC_ELT (slp_tree, SLP_INSTANCE_LOADS (slp_instn), i, node)
+ FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (slp_instn), i, node)
{
next_load = NULL;
first_load = NULL;
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), j, load)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), j, load)
{
if (!first_load)
first_load = GROUP_FIRST_ELEMENT (vinfo_for_stmt (load));
@@ -1366,9 +1362,9 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
the first statement in every load node, is supported. */
if (!bad_permutation)
{
- FOR_EACH_VEC_ELT (slp_tree, SLP_INSTANCE_LOADS (slp_instn), i, node)
+ FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (slp_instn), i, node)
{
- first_load = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (node), 0);
+ first_load = SLP_TREE_SCALAR_STMTS (node)[0];
if (first_load
!= GROUP_FIRST_ELEMENT (vinfo_for_stmt (first_load)))
{
@@ -1392,7 +1388,7 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
if (!bad_permutation)
{
- VEC_free (int, heap, SLP_INSTANCE_LOAD_PERMUTATION (slp_instn));
+ SLP_INSTANCE_LOAD_PERMUTATION (slp_instn).release ();
return true;
}
}
@@ -1401,7 +1397,7 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
/* FORNOW: the only supported permutation is 0..01..1.. of length equal to
GROUP_SIZE and where each sequence of same drs is of GROUP_SIZE length as
well (unless it's reduction). */
- if (VEC_length (int, load_permutation)
+ if (load_permutation.length ()
!= (unsigned int) (group_size * group_size))
return false;
@@ -1411,7 +1407,7 @@ vect_supported_load_permutation_p (slp_instance slp_instn, int group_size,
for (j = 0; j < group_size; j++)
{
for (i = j * group_size, k = 0;
- VEC_iterate (int, load_permutation, i, next) && k < group_size;
+ load_permutation.iterate (i, &next) && k < group_size;
i++, k++)
{
if (i != j * group_size && next != prev)
@@ -1460,8 +1456,8 @@ vect_find_first_load_in_slp_instance (slp_instance instance)
slp_tree load_node;
gimple first_load = NULL, load;
- FOR_EACH_VEC_ELT (slp_tree, SLP_INSTANCE_LOADS (instance), i, load_node)
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (load_node), j, load)
+ FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (instance), i, load_node)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (load_node), j, load)
first_load = get_earlier_stmt (load, first_load);
return first_load;
@@ -1478,9 +1474,7 @@ vect_find_last_store_in_slp_instance (slp_instance instance)
gimple last_store = NULL, store;
node = SLP_INSTANCE_TREE (instance);
- for (i = 0;
- VEC_iterate (gimple, SLP_TREE_SCALAR_STMTS (node), i, store);
- i++)
+ for (i = 0; SLP_TREE_SCALAR_STMTS (node).iterate (i, &store); i++)
last_store = get_later_stmt (store, last_store);
return last_store;
@@ -1504,11 +1498,11 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
unsigned int vectorization_factor = 0;
int outside_cost = 0, ncopies_for_cost, i;
unsigned int max_nunits = 0;
- VEC (int, heap) *load_permutation;
- VEC (slp_tree, heap) *loads;
+ vec<int> load_permutation;
+ vec<slp_tree> loads;
struct data_reference *dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (stmt));
bool loads_permuted = false;
- VEC (gimple, heap) *scalar_stmts;
+ vec<gimple> scalar_stmts;
stmt_vector_for_cost body_cost_vec, prologue_cost_vec;
stmt_info_for_cost *si;
@@ -1531,7 +1525,7 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
{
gcc_assert (loop_vinfo);
vectype = STMT_VINFO_VECTYPE (vinfo_for_stmt (stmt));
- group_size = VEC_length (gimple, LOOP_VINFO_REDUCTIONS (loop_vinfo));
+ group_size = LOOP_VINFO_REDUCTIONS (loop_vinfo).length ();
}
if (!vectype)
@@ -1565,7 +1559,7 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
}
/* Create a node (a root of the SLP tree) for the packed grouped stores. */
- scalar_stmts = VEC_alloc (gimple, heap, group_size);
+ scalar_stmts.create (group_size);
next = stmt;
if (GROUP_FIRST_ELEMENT (vinfo_for_stmt (stmt)))
{
@@ -1574,19 +1568,19 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
{
if (STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (next))
&& STMT_VINFO_RELATED_STMT (vinfo_for_stmt (next)))
- VEC_safe_push (gimple, heap, scalar_stmts,
- STMT_VINFO_RELATED_STMT (vinfo_for_stmt (next)));
+ scalar_stmts.safe_push (
+ STMT_VINFO_RELATED_STMT (vinfo_for_stmt (next)));
else
- VEC_safe_push (gimple, heap, scalar_stmts, next);
+ scalar_stmts.safe_push (next);
next = GROUP_NEXT_ELEMENT (vinfo_for_stmt (next));
}
}
else
{
/* Collect reduction statements. */
- VEC (gimple, heap) *reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
- for (i = 0; VEC_iterate (gimple, reductions, i, next); i++)
- VEC_safe_push (gimple, heap, scalar_stmts, next);
+ vec<gimple> reductions = LOOP_VINFO_REDUCTIONS (loop_vinfo);
+ for (i = 0; reductions.iterate (i, &next); i++)
+ scalar_stmts.safe_push (next);
}
node = vect_create_new_slp_node (scalar_stmts);
@@ -1596,10 +1590,10 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
GROUP_SIZE / NUNITS otherwise. */
ncopies_for_cost = unrolling_factor * group_size / nunits;
- load_permutation = VEC_alloc (int, heap, group_size * group_size);
- loads = VEC_alloc (slp_tree, heap, group_size);
- prologue_cost_vec = VEC_alloc (stmt_info_for_cost, heap, 10);
- body_cost_vec = VEC_alloc (stmt_info_for_cost, heap, 10);
+ load_permutation.create (group_size * group_size);
+ loads.create (group_size);
+ prologue_cost_vec.create (10);
+ body_cost_vec.create (10);
/* Build the tree for the SLP instance. */
if (vect_build_slp_tree (loop_vinfo, bb_vinfo, &node, group_size,
@@ -1623,10 +1617,10 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
"Build SLP failed: unrolling required in basic"
" block SLP");
vect_free_slp_tree (node);
- VEC_free (stmt_info_for_cost, heap, body_cost_vec);
- VEC_free (stmt_info_for_cost, heap, prologue_cost_vec);
- VEC_free (int, heap, load_permutation);
- VEC_free (slp_tree, heap, loads);
+ body_cost_vec.release ();
+ prologue_cost_vec.release ();
+ load_permutation.release ();
+ loads.release ();
return false;
}
@@ -1654,7 +1648,7 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
}
vect_free_slp_instance (new_instance);
- VEC_free (stmt_info_for_cost, heap, prologue_cost_vec);
+ prologue_cost_vec.release ();
return false;
}
@@ -1662,12 +1656,12 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
= vect_find_first_load_in_slp_instance (new_instance);
}
else
- VEC_free (int, heap, SLP_INSTANCE_LOAD_PERMUTATION (new_instance));
+ SLP_INSTANCE_LOAD_PERMUTATION (new_instance).release ();
/* Record the prologue costs, which were delayed until we were
sure that SLP was successful. Unlike the body costs, we know
the final values now regardless of the loop vectorization factor. */
- FOR_EACH_VEC_ELT (stmt_info_for_cost, prologue_cost_vec, i, si)
+ FOR_EACH_VEC_ELT (prologue_cost_vec, i, si)
{
struct _stmt_vec_info *stmt_info
= si->stmt ? vinfo_for_stmt (si->stmt) : NULL;
@@ -1675,15 +1669,12 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
si->misalign, vect_prologue);
}
- VEC_free (stmt_info_for_cost, heap, prologue_cost_vec);
+ prologue_cost_vec.release ();
if (loop_vinfo)
- VEC_safe_push (slp_instance, heap,
- LOOP_VINFO_SLP_INSTANCES (loop_vinfo),
- new_instance);
+ LOOP_VINFO_SLP_INSTANCES (loop_vinfo).safe_push (new_instance);
else
- VEC_safe_push (slp_instance, heap, BB_VINFO_SLP_INSTANCES (bb_vinfo),
- new_instance);
+ BB_VINFO_SLP_INSTANCES (bb_vinfo).safe_push (new_instance);
if (dump_enabled_p ())
vect_print_slp_tree (MSG_NOTE, node);
@@ -1692,15 +1683,15 @@ vect_analyze_slp_instance (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo,
}
else
{
- VEC_free (stmt_info_for_cost, heap, body_cost_vec);
- VEC_free (stmt_info_for_cost, heap, prologue_cost_vec);
+ body_cost_vec.release ();
+ prologue_cost_vec.release ();
}
/* Failed to SLP. */
/* Free the allocated memory. */
vect_free_slp_tree (node);
- VEC_free (int, heap, load_permutation);
- VEC_free (slp_tree, heap, loads);
+ load_permutation.release ();
+ loads.release ();
return false;
}
@@ -1713,7 +1704,9 @@ bool
vect_analyze_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
{
unsigned int i;
- VEC (gimple, heap) *grouped_stores, *reductions = NULL, *reduc_chains = NULL;
+ vec<gimple> grouped_stores;
+ vec<gimple> reductions = vec<gimple>();
+ vec<gimple> reduc_chains = vec<gimple>();
gimple first_element;
bool ok = false;
@@ -1730,7 +1723,7 @@ vect_analyze_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
grouped_stores = BB_VINFO_GROUPED_STORES (bb_vinfo);
/* Find SLP sequences starting from groups of grouped stores. */
- FOR_EACH_VEC_ELT (gimple, grouped_stores, i, first_element)
+ FOR_EACH_VEC_ELT (grouped_stores, i, first_element)
if (vect_analyze_slp_instance (loop_vinfo, bb_vinfo, first_element))
ok = true;
@@ -1744,10 +1737,10 @@ vect_analyze_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
}
if (loop_vinfo
- && VEC_length (gimple, LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo)) > 0)
+ && LOOP_VINFO_REDUCTION_CHAINS (loop_vinfo).length () > 0)
{
/* Find SLP sequences starting from reduction chains. */
- FOR_EACH_VEC_ELT (gimple, reduc_chains, i, first_element)
+ FOR_EACH_VEC_ELT (reduc_chains, i, first_element)
if (vect_analyze_slp_instance (loop_vinfo, bb_vinfo, first_element))
ok = true;
else
@@ -1759,9 +1752,8 @@ vect_analyze_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
}
/* Find SLP sequences starting from groups of reductions. */
- if (loop_vinfo && VEC_length (gimple, LOOP_VINFO_REDUCTIONS (loop_vinfo)) > 1
- && vect_analyze_slp_instance (loop_vinfo, bb_vinfo,
- VEC_index (gimple, reductions, 0)))
+ if (loop_vinfo && LOOP_VINFO_REDUCTIONS (loop_vinfo).length () > 1
+ && vect_analyze_slp_instance (loop_vinfo, bb_vinfo, reductions[0]))
ok = true;
return true;
@@ -1776,14 +1768,14 @@ bool
vect_make_slp_decision (loop_vec_info loop_vinfo)
{
unsigned int i, unrolling_factor = 1;
- VEC (slp_instance, heap) *slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
+ vec<slp_instance> slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
slp_instance instance;
int decided_to_slp = 0;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "=== vect_make_slp_decision ===");
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
/* FORNOW: SLP if you can. */
if (unrolling_factor < SLP_INSTANCE_UNROLLING_FACTOR (instance))
@@ -1814,8 +1806,8 @@ static void
vect_detect_hybrid_slp_stmts (slp_tree node)
{
int i;
- VEC (gimple, heap) *stmts = SLP_TREE_SCALAR_STMTS (node);
- gimple stmt = VEC_index (gimple, stmts, 0);
+ vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (node);
+ gimple stmt = stmts[0];
imm_use_iterator imm_iter;
gimple use_stmt;
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
@@ -1833,7 +1825,7 @@ vect_detect_hybrid_slp_stmts (slp_tree node)
else
bb = BB_VINFO_BB (bb_vinfo);
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
if (PURE_SLP_STMT (vinfo_for_stmt (stmt))
&& TREE_CODE (gimple_op (stmt, 0)) == SSA_NAME)
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, gimple_op (stmt, 0))
@@ -1849,7 +1841,7 @@ vect_detect_hybrid_slp_stmts (slp_tree node)
== vect_reduction_def))
vect_mark_slp_stmts (node, hybrid, i);
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_detect_hybrid_slp_stmts ((slp_tree) child);
}
@@ -1860,13 +1852,13 @@ void
vect_detect_hybrid_slp (loop_vec_info loop_vinfo)
{
unsigned int i;
- VEC (slp_instance, heap) *slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
+ vec<slp_instance> slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
slp_instance instance;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "=== vect_detect_hybrid_slp ===");
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
vect_detect_hybrid_slp_stmts (SLP_INSTANCE_TREE (instance));
}
@@ -1890,8 +1882,8 @@ new_bb_vec_info (basic_block bb)
set_vinfo_for_stmt (stmt, new_stmt_vec_info (stmt, NULL, res));
}
- BB_VINFO_GROUPED_STORES (res) = VEC_alloc (gimple, heap, 10);
- BB_VINFO_SLP_INSTANCES (res) = VEC_alloc (slp_instance, heap, 2);
+ BB_VINFO_GROUPED_STORES (res).create (10);
+ BB_VINFO_SLP_INSTANCES (res).create (2);
BB_VINFO_TARGET_COST_DATA (res) = init_cost (NULL);
bb->aux = res;
@@ -1905,7 +1897,7 @@ new_bb_vec_info (basic_block bb)
static void
destroy_bb_vec_info (bb_vec_info bb_vinfo)
{
- VEC (slp_instance, heap) *slp_instances;
+ vec<slp_instance> slp_instances;
slp_instance instance;
basic_block bb;
gimple_stmt_iterator si;
@@ -1928,11 +1920,11 @@ destroy_bb_vec_info (bb_vec_info bb_vinfo)
free_data_refs (BB_VINFO_DATAREFS (bb_vinfo));
free_dependence_relations (BB_VINFO_DDRS (bb_vinfo));
- VEC_free (gimple, heap, BB_VINFO_GROUPED_STORES (bb_vinfo));
+ BB_VINFO_GROUPED_STORES (bb_vinfo).release ();
slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
vect_free_slp_instance (instance);
- VEC_free (slp_instance, heap, BB_VINFO_SLP_INSTANCES (bb_vinfo));
+ BB_VINFO_SLP_INSTANCES (bb_vinfo).release ();
destroy_cost_data (BB_VINFO_TARGET_COST_DATA (bb_vinfo));
free (bb_vinfo);
bb->aux = NULL;
@@ -1953,11 +1945,11 @@ vect_slp_analyze_node_operations (bb_vec_info bb_vinfo, slp_tree node)
if (!node)
return true;
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
if (!vect_slp_analyze_node_operations (bb_vinfo, (slp_tree) child))
return false;
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
gcc_assert (stmt_info);
@@ -1977,23 +1969,23 @@ vect_slp_analyze_node_operations (bb_vec_info bb_vinfo, slp_tree node)
static bool
vect_slp_analyze_operations (bb_vec_info bb_vinfo)
{
- VEC (slp_instance, heap) *slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
+ vec<slp_instance> slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
slp_instance instance;
int i;
- for (i = 0; VEC_iterate (slp_instance, slp_instances, i, instance); )
+ for (i = 0; slp_instances.iterate (i, &instance); )
{
if (!vect_slp_analyze_node_operations (bb_vinfo,
SLP_INSTANCE_TREE (instance)))
{
vect_free_slp_instance (instance);
- VEC_ordered_remove (slp_instance, slp_instances, i);
+ slp_instances.ordered_remove (i);
}
else
i++;
}
- if (!VEC_length (slp_instance, slp_instances))
+ if (!slp_instances.length ())
return false;
return true;
@@ -2004,7 +1996,7 @@ vect_slp_analyze_operations (bb_vec_info bb_vinfo)
static bool
vect_bb_vectorization_profitable_p (bb_vec_info bb_vinfo)
{
- VEC (slp_instance, heap) *slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
+ vec<slp_instance> slp_instances = BB_VINFO_SLP_INSTANCES (bb_vinfo);
slp_instance instance;
int i, j;
unsigned int vec_inside_cost = 0, vec_outside_cost = 0, scalar_cost = 0;
@@ -2019,11 +2011,11 @@ vect_bb_vectorization_profitable_p (bb_vec_info bb_vinfo)
stmt_info_for_cost *ci;
/* Calculate vector costs. */
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
body_cost_vec = SLP_INSTANCE_BODY_COST_VEC (instance);
- FOR_EACH_VEC_ELT (stmt_info_for_cost, body_cost_vec, j, ci)
+ FOR_EACH_VEC_ELT (body_cost_vec, j, ci)
{
stmt_info = ci->stmt ? vinfo_for_stmt (ci->stmt) : NULL;
(void) add_stmt_cost (target_cost_data, ci->count, ci->kind,
@@ -2084,8 +2076,8 @@ static bb_vec_info
vect_slp_analyze_bb_1 (basic_block bb)
{
bb_vec_info bb_vinfo;
- VEC (ddr_p, heap) *ddrs;
- VEC (slp_instance, heap) *slp_instances;
+ vec<ddr_p> ddrs;
+ vec<slp_instance> slp_instances;
slp_instance instance;
int i;
int min_vf = 2;
@@ -2107,7 +2099,7 @@ vect_slp_analyze_bb_1 (basic_block bb)
}
ddrs = BB_VINFO_DDRS (bb_vinfo);
- if (!VEC_length (ddr_p, ddrs))
+ if (!ddrs.length ())
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
@@ -2171,7 +2163,7 @@ vect_slp_analyze_bb_1 (basic_block bb)
/* Mark all the statements that we want to vectorize as pure SLP and
relevant. */
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
vect_mark_slp_stmts (SLP_INSTANCE_TREE (instance), pure_slp, -1);
vect_mark_slp_stmts_relevant (SLP_INSTANCE_TREE (instance));
@@ -2286,7 +2278,7 @@ void
vect_update_slp_costs_according_to_vf (loop_vec_info loop_vinfo)
{
unsigned int i, j, vf = LOOP_VINFO_VECT_FACTOR (loop_vinfo);
- VEC (slp_instance, heap) *slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
+ vec<slp_instance> slp_instances = LOOP_VINFO_SLP_INSTANCES (loop_vinfo);
slp_instance instance;
stmt_vector_for_cost body_cost_vec;
stmt_info_for_cost *si;
@@ -2296,7 +2288,7 @@ vect_update_slp_costs_according_to_vf (loop_vec_info loop_vinfo)
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_update_slp_costs_according_to_vf ===");
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
/* We assume that costs are linear in ncopies. */
int ncopies = vf / SLP_INSTANCE_UNROLLING_FACTOR (instance);
@@ -2306,7 +2298,7 @@ vect_update_slp_costs_according_to_vf (loop_vec_info loop_vinfo)
isn't known beforehand. */
body_cost_vec = SLP_INSTANCE_BODY_COST_VEC (instance);
- FOR_EACH_VEC_ELT (stmt_info_for_cost, body_cost_vec, j, si)
+ FOR_EACH_VEC_ELT (body_cost_vec, j, si)
(void) add_stmt_cost (data, si->count * ncopies, si->kind,
vinfo_for_stmt (si->stmt), si->misalign,
vect_body);
@@ -2323,12 +2315,12 @@ vect_update_slp_costs_according_to_vf (loop_vec_info loop_vinfo)
static void
vect_get_constant_vectors (tree op, slp_tree slp_node,
- VEC (tree, heap) **vec_oprnds,
+ vec<tree> *vec_oprnds,
unsigned int op_num, unsigned int number_of_vectors,
int reduc_index)
{
- VEC (gimple, heap) *stmts = SLP_TREE_SCALAR_STMTS (slp_node);
- gimple stmt = VEC_index (gimple, stmts, 0);
+ vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
+ gimple stmt = stmts[0];
stmt_vec_info stmt_vinfo = vinfo_for_stmt (stmt);
unsigned nunits;
tree vec_cst;
@@ -2336,10 +2328,11 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
unsigned j, number_of_places_left_in_vector;
tree vector_type;
tree vop;
- int group_size = VEC_length (gimple, stmts);
+ int group_size = stmts.length ();
unsigned int vec_num, i;
unsigned number_of_copies = 1;
- VEC (tree, heap) *voprnds = VEC_alloc (tree, heap, number_of_vectors);
+ vec<tree> voprnds;
+ voprnds.create (number_of_vectors);
bool constant_p, is_store;
tree neutral_op = NULL;
enum tree_code code = gimple_expr_code (stmt);
@@ -2436,7 +2429,7 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
elts = XALLOCAVEC (tree, nunits);
for (j = 0; j < number_of_copies; j++)
{
- for (i = group_size - 1; VEC_iterate (gimple, stmts, i, stmt); i--)
+ for (i = group_size - 1; stmts.iterate (i, &stmt); i--)
{
if (is_store)
op = gimple_assign_rhs1 (stmt);
@@ -2536,20 +2529,18 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
vec_cst = build_vector (vector_type, elts);
else
{
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
unsigned k;
- v = VEC_alloc (constructor_elt, gc, nunits);
+ vec_alloc (v, nunits);
for (k = 0; k < nunits; ++k)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[k]);
vec_cst = build_constructor (vector_type, v);
}
- VEC_quick_push (tree, voprnds,
- vect_init_vector (stmt, vec_cst,
- vector_type, NULL));
+ voprnds.quick_push (vect_init_vector (stmt, vec_cst,
+ vector_type, NULL));
if (ctor_seq != NULL)
{
- gimple init_stmt
- = SSA_NAME_DEF_STMT (VEC_last (tree, voprnds));
+ gimple init_stmt = SSA_NAME_DEF_STMT (voprnds.last ());
gimple_stmt_iterator gsi = gsi_for_stmt (init_stmt);
gsi_insert_seq_before_without_update (&gsi, ctor_seq,
GSI_SAME_STMT);
@@ -2561,20 +2552,20 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
/* Since the vectors are created in the reverse order, we should invert
them. */
- vec_num = VEC_length (tree, voprnds);
+ vec_num = voprnds.length ();
for (j = vec_num; j != 0; j--)
{
- vop = VEC_index (tree, voprnds, j - 1);
- VEC_quick_push (tree, *vec_oprnds, vop);
+ vop = voprnds[j - 1];
+ vec_oprnds->quick_push (vop);
}
- VEC_free (tree, heap, voprnds);
+ voprnds.release ();
/* In case that VF is greater than the unrolling factor needed for the SLP
group of stmts, NUMBER_OF_VECTORS to be created is greater than
NUMBER_OF_SCALARS/NUNITS or NUNITS/NUMBER_OF_SCALARS, and hence we have
to replicate the vectors. */
- while (number_of_vectors > VEC_length (tree, *vec_oprnds))
+ while (number_of_vectors > vec_oprnds->length ())
{
tree neutral_vec = NULL;
@@ -2583,12 +2574,12 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
if (!neutral_vec)
neutral_vec = build_vector_from_val (vector_type, neutral_op);
- VEC_quick_push (tree, *vec_oprnds, neutral_vec);
+ vec_oprnds->quick_push (neutral_vec);
}
else
{
- for (i = 0; VEC_iterate (tree, *vec_oprnds, i, vop) && i < vec_num; i++)
- VEC_quick_push (tree, *vec_oprnds, vop);
+ for (i = 0; vec_oprnds->iterate (i, &vop) && i < vec_num; i++)
+ vec_oprnds->quick_push (vop);
}
}
}
@@ -2598,19 +2589,19 @@ vect_get_constant_vectors (tree op, slp_tree slp_node,
vectorized def-stmts. */
static void
-vect_get_slp_vect_defs (slp_tree slp_node, VEC (tree,heap) **vec_oprnds)
+vect_get_slp_vect_defs (slp_tree slp_node, vec<tree> *vec_oprnds)
{
tree vec_oprnd;
gimple vec_def_stmt;
unsigned int i;
- gcc_assert (SLP_TREE_VEC_STMTS (slp_node));
+ gcc_assert (SLP_TREE_VEC_STMTS (slp_node).exists ());
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_VEC_STMTS (slp_node), i, vec_def_stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_VEC_STMTS (slp_node), i, vec_def_stmt)
{
gcc_assert (vec_def_stmt);
vec_oprnd = gimple_get_lhs (vec_def_stmt);
- VEC_quick_push (tree, *vec_oprnds, vec_oprnd);
+ vec_oprnds->quick_push (vec_oprnd);
}
}
@@ -2623,20 +2614,20 @@ vect_get_slp_vect_defs (slp_tree slp_node, VEC (tree,heap) **vec_oprnds)
vect_get_slp_vect_defs () to retrieve them. */
void
-vect_get_slp_defs (VEC (tree, heap) *ops, slp_tree slp_node,
- VEC (slp_void_p, heap) **vec_oprnds, int reduc_index)
+vect_get_slp_defs (vec<tree> ops, slp_tree slp_node,
+ vec<slp_void_p> *vec_oprnds, int reduc_index)
{
gimple first_stmt, first_def;
int number_of_vects = 0, i;
unsigned int child_index = 0;
HOST_WIDE_INT lhs_size_unit, rhs_size_unit;
slp_tree child = NULL;
- VEC (tree, heap) *vec_defs;
+ vec<tree> *vec_defs;
tree oprnd, def_lhs;
bool vectorized_defs;
- first_stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (slp_node), 0);
- FOR_EACH_VEC_ELT (tree, ops, i, oprnd)
+ first_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[0];
+ FOR_EACH_VEC_ELT (ops, i, oprnd)
{
/* For each operand we check if it has vectorized definitions in a child
node or we need to create them (for invariants and constants). We
@@ -2645,12 +2636,10 @@ vect_get_slp_defs (VEC (tree, heap) *ops, slp_tree slp_node,
vect_get_constant_vectors (), and not advance CHILD_INDEX in order
to check this child node for the next operand. */
vectorized_defs = false;
- if (VEC_length (slp_void_p, SLP_TREE_CHILDREN (slp_node)) > child_index)
+ if (SLP_TREE_CHILDREN (slp_node).length () > child_index)
{
- child = (slp_tree) VEC_index (slp_void_p,
- SLP_TREE_CHILDREN (slp_node),
- child_index);
- first_def = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (child), 0);
+ child = (slp_tree) SLP_TREE_CHILDREN (slp_node)[child_index];
+ first_def = SLP_TREE_SCALAR_STMTS (child)[0];
/* In the end of a pattern sequence we have a use of the original stmt,
so we need to compare OPRND with the original def. */
@@ -2695,19 +2684,19 @@ vect_get_slp_defs (VEC (tree, heap) *ops, slp_tree slp_node,
}
/* Allocate memory for vectorized defs. */
- vec_defs = VEC_alloc (tree, heap, number_of_vects);
+ vec_alloc (vec_defs, number_of_vects);
/* For reduction defs we call vect_get_constant_vectors (), since we are
looking for initial loop invariant values. */
if (vectorized_defs && reduc_index == -1)
/* The defs are already vectorized. */
- vect_get_slp_vect_defs (child, &vec_defs);
+ vect_get_slp_vect_defs (child, vec_defs);
else
/* Build vectors from scalar defs. */
- vect_get_constant_vectors (oprnd, slp_node, &vec_defs, i,
+ vect_get_constant_vectors (oprnd, slp_node, vec_defs, i,
number_of_vects, reduc_index);
- VEC_quick_push (slp_void_p, *vec_oprnds, (slp_void_p) vec_defs);
+ vec_oprnds->quick_push ((slp_void_p) vec_defs);
/* For reductions, we only need initial values. */
if (reduc_index != -1)
@@ -2729,7 +2718,7 @@ static inline void
vect_create_mask_and_perm (gimple stmt, gimple next_scalar_stmt,
tree mask, int first_vec_indx, int second_vec_indx,
gimple_stmt_iterator *gsi, slp_tree node,
- tree vectype, VEC(tree,heap) *dr_chain,
+ tree vectype, vec<tree> dr_chain,
int ncopies, int vect_stmts_counter)
{
tree perm_dest;
@@ -2742,15 +2731,15 @@ vect_create_mask_and_perm (gimple stmt, gimple next_scalar_stmt,
/* Initialize the vect stmts of NODE to properly insert the generated
stmts later. */
- for (i = VEC_length (gimple, SLP_TREE_VEC_STMTS (node));
+ for (i = SLP_TREE_VEC_STMTS (node).length ();
i < (int) SLP_TREE_NUMBER_OF_VEC_STMTS (node); i++)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (node), NULL);
+ SLP_TREE_VEC_STMTS (node).quick_push (NULL);
perm_dest = vect_create_destination_var (gimple_assign_lhs (stmt), vectype);
for (i = 0; i < ncopies; i++)
{
- first_vec = VEC_index (tree, dr_chain, first_vec_indx);
- second_vec = VEC_index (tree, dr_chain, second_vec_indx);
+ first_vec = dr_chain[first_vec_indx];
+ second_vec = dr_chain[second_vec_indx];
/* Generate the permute statement. */
perm_stmt = gimple_build_assign_with_ops (VEC_PERM_EXPR, perm_dest,
@@ -2760,8 +2749,7 @@ vect_create_mask_and_perm (gimple stmt, gimple next_scalar_stmt,
vect_finish_stmt_generation (stmt, perm_stmt, gsi);
/* Store the vector statement in NODE. */
- VEC_replace (gimple, SLP_TREE_VEC_STMTS (node),
- stride * i + vect_stmts_counter, perm_stmt);
+ SLP_TREE_VEC_STMTS (node)[stride * i + vect_stmts_counter] = perm_stmt;
first_vec_indx += stride;
second_vec_indx += stride;
@@ -2859,7 +2847,7 @@ vect_get_mask_element (gimple stmt, int first_mask_element, int m,
If ANALYZE_ONLY is TRUE, only check that it is possible to create valid
permute statements for SLP_NODE_INSTANCE. */
bool
-vect_transform_slp_perm_load (gimple stmt, VEC (tree, heap) *dr_chain,
+vect_transform_slp_perm_load (gimple stmt, vec<tree> dr_chain,
gimple_stmt_iterator *gsi, int vf,
slp_instance slp_node_instance, bool analyze_only)
{
@@ -2931,7 +2919,7 @@ vect_transform_slp_perm_load (gimple stmt, VEC (tree, heap) *dr_chain,
we need the second and the third vectors: {b1,c1,a2,b2} and
{c2,a3,b3,c3}. */
- FOR_EACH_VEC_ELT (slp_tree, SLP_INSTANCE_LOADS (slp_node_instance), i, node)
+ FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (slp_node_instance), i, node)
{
scalar_index = 0;
index = 0;
@@ -2991,8 +2979,8 @@ vect_transform_slp_perm_load (gimple stmt, VEC (tree, heap) *dr_chain,
second_vec_index = vec_index;
}
- next_scalar_stmt = VEC_index (gimple,
- SLP_TREE_SCALAR_STMTS (node), scalar_index++);
+ next_scalar_stmt
+ = SLP_TREE_SCALAR_STMTS (node)[scalar_index++];
vect_create_mask_and_perm (stmt, next_scalar_stmt,
mask_vec, first_vec_index, second_vec_index,
@@ -3028,11 +3016,11 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance,
if (!node)
return false;
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_schedule_slp_instance ((slp_tree) child, instance,
vectorization_factor);
- stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (node), 0);
+ stmt = SLP_TREE_SCALAR_STMTS (node)[0];
stmt_info = vinfo_for_stmt (stmt);
/* VECTYPE is the type of the destination. */
@@ -3049,22 +3037,21 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance,
/* In case of load permutation we have to allocate vectorized statements for
all the nodes that participate in that permutation. */
- if (SLP_INSTANCE_LOAD_PERMUTATION (instance))
+ if (SLP_INSTANCE_LOAD_PERMUTATION (instance).exists ())
{
- FOR_EACH_VEC_ELT (slp_tree, SLP_INSTANCE_LOADS (instance), i, loads_node)
+ FOR_EACH_VEC_ELT (SLP_INSTANCE_LOADS (instance), i, loads_node)
{
- if (!SLP_TREE_VEC_STMTS (loads_node))
+ if (!SLP_TREE_VEC_STMTS (loads_node).exists ())
{
- SLP_TREE_VEC_STMTS (loads_node) = VEC_alloc (gimple, heap,
- vec_stmts_size);
+ SLP_TREE_VEC_STMTS (loads_node).create (vec_stmts_size);
SLP_TREE_NUMBER_OF_VEC_STMTS (loads_node) = vec_stmts_size;
}
}
}
- if (!SLP_TREE_VEC_STMTS (node))
+ if (!SLP_TREE_VEC_STMTS (node).exists ())
{
- SLP_TREE_VEC_STMTS (node) = VEC_alloc (gimple, heap, vec_stmts_size);
+ SLP_TREE_VEC_STMTS (node).create (vec_stmts_size);
SLP_TREE_NUMBER_OF_VEC_STMTS (node) = vec_stmts_size;
}
@@ -3079,7 +3066,7 @@ vect_schedule_slp_instance (slp_tree node, slp_instance instance,
if (SLP_INSTANCE_FIRST_LOAD_STMT (instance)
&& STMT_VINFO_GROUPED_ACCESS (stmt_info)
&& !REFERENCE_CLASS_P (gimple_get_lhs (stmt))
- && SLP_INSTANCE_LOAD_PERMUTATION (instance))
+ && SLP_INSTANCE_LOAD_PERMUTATION (instance).exists ())
si = gsi_for_stmt (SLP_INSTANCE_FIRST_LOAD_STMT (instance));
else if (is_pattern_stmt_p (stmt_info))
si = gsi_for_stmt (STMT_VINFO_RELATED_STMT (stmt_info));
@@ -3128,10 +3115,10 @@ vect_remove_slp_scalar_calls (slp_tree node)
if (!node)
return;
- FOR_EACH_VEC_ELT (slp_void_p, SLP_TREE_CHILDREN (node), i, child)
+ FOR_EACH_VEC_ELT (SLP_TREE_CHILDREN (node), i, child)
vect_remove_slp_scalar_calls ((slp_tree) child);
- FOR_EACH_VEC_ELT (gimple, SLP_TREE_SCALAR_STMTS (node), i, stmt)
+ FOR_EACH_VEC_ELT (SLP_TREE_SCALAR_STMTS (node), i, stmt)
{
if (!is_gimple_call (stmt) || gimple_bb (stmt) == NULL)
continue;
@@ -3156,7 +3143,7 @@ vect_remove_slp_scalar_calls (slp_tree node)
bool
vect_schedule_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
{
- VEC (slp_instance, heap) *slp_instances;
+ vec<slp_instance> slp_instances;
slp_instance instance;
unsigned int i, vf;
bool is_store = false;
@@ -3172,7 +3159,7 @@ vect_schedule_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
vf = 1;
}
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
/* Schedule the tree of INSTANCE. */
is_store = vect_schedule_slp_instance (SLP_INSTANCE_TREE (instance),
@@ -3182,7 +3169,7 @@ vect_schedule_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
"vectorizing stmts using SLP.");
}
- FOR_EACH_VEC_ELT (slp_instance, slp_instances, i, instance)
+ FOR_EACH_VEC_ELT (slp_instances, i, instance)
{
slp_tree root = SLP_INSTANCE_TREE (instance);
gimple store;
@@ -3191,7 +3178,7 @@ vect_schedule_slp (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo)
vect_remove_slp_scalar_calls (root);
- for (j = 0; VEC_iterate (gimple, SLP_TREE_SCALAR_STMTS (root), j, store)
+ for (j = 0; SLP_TREE_SCALAR_STMTS (root).iterate (j, &store)
&& j < SLP_INSTANCE_GROUP_SIZE (instance); j++)
{
if (!STMT_VINFO_DATA_REF (vinfo_for_stmt (store)))
diff --git a/gcc/tree-vect-stmts.c b/gcc/tree-vect-stmts.c
index 2f4be11703b..cfe1275f892 100644
--- a/gcc/tree-vect-stmts.c
+++ b/gcc/tree-vect-stmts.c
@@ -181,7 +181,7 @@ create_array_ref (tree type, tree ptr, struct data_reference *first_dr)
Mark STMT as "relevant for vectorization" and add it to WORKLIST. */
static void
-vect_mark_relevant (VEC(gimple,heap) **worklist, gimple stmt,
+vect_mark_relevant (vec<gimple> *worklist, gimple stmt,
enum vect_relevant relevant, bool live_p,
bool used_in_pattern)
{
@@ -271,7 +271,7 @@ vect_mark_relevant (VEC(gimple,heap) **worklist, gimple stmt,
return;
}
- VEC_safe_push (gimple, heap, *worklist, stmt);
+ worklist->safe_push (stmt);
}
@@ -419,7 +419,7 @@ exist_non_indexing_operands_for_use_p (tree use, gimple stmt)
static bool
process_use (gimple stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
- enum vect_relevant relevant, VEC(gimple,heap) **worklist,
+ enum vect_relevant relevant, vec<gimple> *worklist,
bool force)
{
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
@@ -575,7 +575,7 @@ process_use (gimple stmt, tree use, loop_vec_info loop_vinfo, bool live_p,
bool
vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
{
- VEC(gimple,heap) *worklist;
+ vec<gimple> worklist;
struct loop *loop = LOOP_VINFO_LOOP (loop_vinfo);
basic_block *bbs = LOOP_VINFO_BBS (loop_vinfo);
unsigned int nbbs = loop->num_nodes;
@@ -593,7 +593,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
dump_printf_loc (MSG_NOTE, vect_location,
"=== vect_mark_stmts_to_be_vectorized ===");
- worklist = VEC_alloc (gimple, heap, 64);
+ worklist.create (64);
/* 1. Init worklist. */
for (i = 0; i < nbbs; i++)
@@ -626,12 +626,12 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
}
/* 2. Process_worklist */
- while (VEC_length (gimple, worklist) > 0)
+ while (worklist.length () > 0)
{
use_operand_p use_p;
ssa_op_iter iter;
- stmt = VEC_pop (gimple, worklist);
+ stmt = worklist.pop ();
if (dump_enabled_p ())
{
dump_printf_loc (MSG_NOTE, vect_location, "worklist: examine stmt: ");
@@ -680,7 +680,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"unsupported use of reduction.");
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
@@ -696,7 +696,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"unsupported use of nested cycle.");
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
@@ -711,7 +711,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"unsupported use of double reduction.");
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
@@ -740,7 +740,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
|| !process_use (stmt, TREE_OPERAND (op, 1), loop_vinfo,
live_p, relevant, &worklist, false))
{
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
i = 2;
@@ -751,7 +751,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
if (!process_use (stmt, op, loop_vinfo, live_p, relevant,
&worklist, false))
{
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
}
@@ -764,7 +764,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
if (!process_use (stmt, arg, loop_vinfo, live_p, relevant,
&worklist, false))
{
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
}
@@ -777,7 +777,7 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
if (!process_use (stmt, op, loop_vinfo, live_p, relevant,
&worklist, false))
{
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
}
@@ -790,13 +790,13 @@ vect_mark_stmts_to_be_vectorized (loop_vec_info loop_vinfo)
if (!process_use (stmt, off, loop_vinfo, live_p, relevant,
&worklist, true))
{
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return false;
}
}
} /* while worklist */
- VEC_free (gimple, heap, worklist);
+ worklist.release ();
return true;
}
@@ -930,7 +930,7 @@ vect_model_store_cost (stmt_vec_info stmt_info, int ncopies,
{
if (slp_node)
{
- first_stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (slp_node), 0);
+ first_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[0];
group_size = 1;
}
else
@@ -1552,19 +1552,19 @@ vect_get_vec_def_for_stmt_copy (enum vect_def_type dt, tree vec_oprnd)
static void
vect_get_vec_defs_for_stmt_copy (enum vect_def_type *dt,
- VEC(tree,heap) **vec_oprnds0,
- VEC(tree,heap) **vec_oprnds1)
+ vec<tree> *vec_oprnds0,
+ vec<tree> *vec_oprnds1)
{
- tree vec_oprnd = VEC_pop (tree, *vec_oprnds0);
+ tree vec_oprnd = vec_oprnds0->pop ();
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt[0], vec_oprnd);
- VEC_quick_push (tree, *vec_oprnds0, vec_oprnd);
+ vec_oprnds0->quick_push (vec_oprnd);
- if (vec_oprnds1 && *vec_oprnds1)
+ if (vec_oprnds1 && vec_oprnds1->length ())
{
- vec_oprnd = VEC_pop (tree, *vec_oprnds1);
+ vec_oprnd = vec_oprnds1->pop ();
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt[1], vec_oprnd);
- VEC_quick_push (tree, *vec_oprnds1, vec_oprnd);
+ vec_oprnds1->quick_push (vec_oprnd);
}
}
@@ -1575,42 +1575,44 @@ vect_get_vec_defs_for_stmt_copy (enum vect_def_type *dt,
void
vect_get_vec_defs (tree op0, tree op1, gimple stmt,
- VEC (tree, heap) **vec_oprnds0,
- VEC (tree, heap) **vec_oprnds1,
+ vec<tree> *vec_oprnds0,
+ vec<tree> *vec_oprnds1,
slp_tree slp_node, int reduc_index)
{
if (slp_node)
{
int nops = (op1 == NULL_TREE) ? 1 : 2;
- VEC (tree, heap) *ops = VEC_alloc (tree, heap, nops);
- VEC (slp_void_p, heap) *vec_defs = VEC_alloc (slp_void_p, heap, nops);
+ vec<tree> ops;
+ ops.create (nops);
+ vec<slp_void_p> vec_defs;
+ vec_defs.create (nops);
- VEC_quick_push (tree, ops, op0);
+ ops.quick_push (op0);
if (op1)
- VEC_quick_push (tree, ops, op1);
+ ops.quick_push (op1);
vect_get_slp_defs (ops, slp_node, &vec_defs, reduc_index);
- *vec_oprnds0 = (VEC (tree, heap) *) VEC_index (slp_void_p, vec_defs, 0);
+ *vec_oprnds0 = *((vec<tree> *) vec_defs[0]);
if (op1)
- *vec_oprnds1 = (VEC (tree, heap) *) VEC_index (slp_void_p, vec_defs, 1);
+ *vec_oprnds1 = *((vec<tree> *) vec_defs[1]);
- VEC_free (tree, heap, ops);
- VEC_free (slp_void_p, heap, vec_defs);
+ ops.release ();
+ vec_defs.release ();
}
else
{
tree vec_oprnd;
- *vec_oprnds0 = VEC_alloc (tree, heap, 1);
+ vec_oprnds0->create (1);
vec_oprnd = vect_get_vec_def_for_operand (op0, stmt, NULL);
- VEC_quick_push (tree, *vec_oprnds0, vec_oprnd);
+ vec_oprnds0->quick_push (vec_oprnd);
if (op1)
{
- *vec_oprnds1 = VEC_alloc (tree, heap, 1);
+ vec_oprnds1->create (1);
vec_oprnd = vect_get_vec_def_for_operand (op1, stmt, NULL);
- VEC_quick_push (tree, *vec_oprnds1, vec_oprnd);
+ vec_oprnds1->quick_push (vec_oprnd);
}
}
}
@@ -1720,7 +1722,7 @@ vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
= {vect_unknown_def_type, vect_unknown_def_type, vect_unknown_def_type};
gimple new_stmt = NULL;
int ncopies, j;
- VEC(tree, heap) *vargs = NULL;
+ vec<tree> vargs = vec<tree>();
enum { NARROW, NONE, WIDEN } modifier;
size_t i, nargs;
tree lhs;
@@ -1875,50 +1877,43 @@ vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
/* Build argument list for the vectorized call. */
if (j == 0)
- vargs = VEC_alloc (tree, heap, nargs);
+ vargs.create (nargs);
else
- VEC_truncate (tree, vargs, 0);
+ vargs.truncate (0);
if (slp_node)
{
- VEC (slp_void_p, heap) *vec_defs
- = VEC_alloc (slp_void_p, heap, nargs);
- VEC (tree, heap) *vec_oprnds0;
+ vec<slp_void_p> vec_defs;
+ vec_defs.create (nargs);
+ vec<tree> vec_oprnds0;
for (i = 0; i < nargs; i++)
- VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
+ vargs.quick_push (gimple_call_arg (stmt, i));
vect_get_slp_defs (vargs, slp_node, &vec_defs, -1);
- vec_oprnds0
- = (VEC (tree, heap) *) VEC_index (slp_void_p, vec_defs, 0);
+ vec_oprnds0 = *((vec<tree> *) vec_defs[0]);
/* Arguments are ready. Create the new vector stmt. */
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, vec_oprnd0)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, vec_oprnd0)
{
size_t k;
for (k = 0; k < nargs; k++)
{
- VEC (tree, heap) *vec_oprndsk
- = (VEC (tree, heap) *)
- VEC_index (slp_void_p, vec_defs, k);
- VEC_replace (tree, vargs, k,
- VEC_index (tree, vec_oprndsk, i));
+ vec<tree> vec_oprndsk = *((vec<tree> *) vec_defs[k]);
+ vargs[k] = vec_oprndsk[i];
}
new_stmt = gimple_build_call_vec (fndecl, vargs);
new_temp = make_ssa_name (vec_dest, new_stmt);
gimple_call_set_lhs (new_stmt, new_temp);
vect_finish_stmt_generation (stmt, new_stmt, gsi);
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node),
- new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
for (i = 0; i < nargs; i++)
{
- VEC (tree, heap) *vec_oprndsi
- = (VEC (tree, heap) *)
- VEC_index (slp_void_p, vec_defs, i);
- VEC_free (tree, heap, vec_oprndsi);
+ vec<tree> vec_oprndsi = *((vec<tree> *) vec_defs[i]);
+ vec_oprndsi.release ();
}
- VEC_free (slp_void_p, heap, vec_defs);
+ vec_defs.release ();
continue;
}
@@ -1935,7 +1930,7 @@ vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
= vect_get_vec_def_for_stmt_copy (dt[i], vec_oprnd0);
}
- VEC_quick_push (tree, vargs, vec_oprnd0);
+ vargs.quick_push (vec_oprnd0);
}
new_stmt = gimple_build_call_vec (fndecl, vargs);
@@ -1958,54 +1953,45 @@ vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
/* Build argument list for the vectorized call. */
if (j == 0)
- vargs = VEC_alloc (tree, heap, nargs * 2);
+ vargs.create (nargs * 2);
else
- VEC_truncate (tree, vargs, 0);
+ vargs.truncate (0);
if (slp_node)
{
- VEC (slp_void_p, heap) *vec_defs
- = VEC_alloc (slp_void_p, heap, nargs);
- VEC (tree, heap) *vec_oprnds0;
+ vec<slp_void_p> vec_defs;
+ vec_defs.create (nargs);
+ vec<tree> vec_oprnds0;
for (i = 0; i < nargs; i++)
- VEC_quick_push (tree, vargs, gimple_call_arg (stmt, i));
+ vargs.quick_push (gimple_call_arg (stmt, i));
vect_get_slp_defs (vargs, slp_node, &vec_defs, -1);
- vec_oprnds0
- = (VEC (tree, heap) *) VEC_index (slp_void_p, vec_defs, 0);
+ vec_oprnds0 = *((vec<tree> *) vec_defs[0]);
/* Arguments are ready. Create the new vector stmt. */
- for (i = 0; VEC_iterate (tree, vec_oprnds0, i, vec_oprnd0);
- i += 2)
+ for (i = 0; vec_oprnds0.iterate (i, &vec_oprnd0); i += 2)
{
size_t k;
- VEC_truncate (tree, vargs, 0);
+ vargs.truncate (0);
for (k = 0; k < nargs; k++)
{
- VEC (tree, heap) *vec_oprndsk
- = (VEC (tree, heap) *)
- VEC_index (slp_void_p, vec_defs, k);
- VEC_quick_push (tree, vargs,
- VEC_index (tree, vec_oprndsk, i));
- VEC_quick_push (tree, vargs,
- VEC_index (tree, vec_oprndsk, i + 1));
+ vec<tree> vec_oprndsk = *((vec<tree> *) vec_defs[k]);
+ vargs.quick_push (vec_oprndsk[i]);
+ vargs.quick_push (vec_oprndsk[i + 1]);
}
new_stmt = gimple_build_call_vec (fndecl, vargs);
new_temp = make_ssa_name (vec_dest, new_stmt);
gimple_call_set_lhs (new_stmt, new_temp);
vect_finish_stmt_generation (stmt, new_stmt, gsi);
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node),
- new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
for (i = 0; i < nargs; i++)
{
- VEC (tree, heap) *vec_oprndsi
- = (VEC (tree, heap) *)
- VEC_index (slp_void_p, vec_defs, i);
- VEC_free (tree, heap, vec_oprndsi);
+ vec<tree> vec_oprndsi = *((vec<tree> *) vec_defs[i]);
+ vec_oprndsi.release ();
}
- VEC_free (slp_void_p, heap, vec_defs);
+ vec_defs.release ();
continue;
}
@@ -2028,8 +2014,8 @@ vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
= vect_get_vec_def_for_stmt_copy (dt[i], vec_oprnd0);
}
- VEC_quick_push (tree, vargs, vec_oprnd0);
- VEC_quick_push (tree, vargs, vec_oprnd1);
+ vargs.quick_push (vec_oprnd0);
+ vargs.quick_push (vec_oprnd1);
}
new_stmt = gimple_build_call_vec (fndecl, vargs);
@@ -2054,7 +2040,7 @@ vectorizable_call (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
return false;
}
- VEC_free (tree, heap, vargs);
+ vargs.release ();
/* Update the exception handling table with the vector stmt if necessary. */
if (maybe_clean_or_replace_eh_stmt (stmt, *vec_stmt))
@@ -2140,7 +2126,7 @@ vect_gen_widened_results_half (enum tree_code code,
static void
vect_get_loop_based_defs (tree *oprnd, gimple stmt, enum vect_def_type dt,
- VEC (tree, heap) **vec_oprnds, int multi_step_cvt)
+ vec<tree> *vec_oprnds, int multi_step_cvt)
{
tree vec_oprnd;
@@ -2152,11 +2138,11 @@ vect_get_loop_based_defs (tree *oprnd, gimple stmt, enum vect_def_type dt,
else
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, *oprnd);
- VEC_quick_push (tree, *vec_oprnds, vec_oprnd);
+ vec_oprnds->quick_push (vec_oprnd);
/* Get second vector operand. */
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, vec_oprnd);
- VEC_quick_push (tree, *vec_oprnds, vec_oprnd);
+ vec_oprnds->quick_push (vec_oprnd);
*oprnd = vec_oprnd;
@@ -2172,9 +2158,9 @@ vect_get_loop_based_defs (tree *oprnd, gimple stmt, enum vect_def_type dt,
recursively. */
static void
-vect_create_vectorized_demotion_stmts (VEC (tree, heap) **vec_oprnds,
+vect_create_vectorized_demotion_stmts (vec<tree> *vec_oprnds,
int multi_step_cvt, gimple stmt,
- VEC (tree, heap) *vec_dsts,
+ vec<tree> vec_dsts,
gimple_stmt_iterator *gsi,
slp_tree slp_node, enum tree_code code,
stmt_vec_info *prev_stmt_info)
@@ -2184,13 +2170,13 @@ vect_create_vectorized_demotion_stmts (VEC (tree, heap) **vec_oprnds,
gimple new_stmt;
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
- vec_dest = VEC_pop (tree, vec_dsts);
+ vec_dest = vec_dsts.pop ();
- for (i = 0; i < VEC_length (tree, *vec_oprnds); i += 2)
+ for (i = 0; i < vec_oprnds->length (); i += 2)
{
/* Create demotion operation. */
- vop0 = VEC_index (tree, *vec_oprnds, i);
- vop1 = VEC_index (tree, *vec_oprnds, i + 1);
+ vop0 = (*vec_oprnds)[i];
+ vop1 = (*vec_oprnds)[i + 1];
new_stmt = gimple_build_assign_with_ops (code, vec_dest, vop0, vop1);
new_tmp = make_ssa_name (vec_dest, new_stmt);
gimple_assign_set_lhs (new_stmt, new_tmp);
@@ -2198,14 +2184,14 @@ vect_create_vectorized_demotion_stmts (VEC (tree, heap) **vec_oprnds,
if (multi_step_cvt)
/* Store the resulting vector for next recursive call. */
- VEC_replace (tree, *vec_oprnds, i/2, new_tmp);
+ (*vec_oprnds)[i/2] = new_tmp;
else
{
/* This is the last step of the conversion sequence. Store the
vectors in SLP_NODE or in vector info of the scalar statement
(or in STMT_VINFO_RELATED_STMT chain). */
if (slp_node)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
else
{
if (!*prev_stmt_info)
@@ -2226,14 +2212,14 @@ vect_create_vectorized_demotion_stmts (VEC (tree, heap) **vec_oprnds,
{
/* At each level of recursion we have half of the operands we had at the
previous level. */
- VEC_truncate (tree, *vec_oprnds, (i+1)/2);
+ vec_oprnds->truncate ((i+1)/2);
vect_create_vectorized_demotion_stmts (vec_oprnds, multi_step_cvt - 1,
stmt, vec_dsts, gsi, slp_node,
VEC_PACK_TRUNC_EXPR,
prev_stmt_info);
}
- VEC_quick_push (tree, vec_dsts, vec_dest);
+ vec_dsts.quick_push (vec_dest);
}
@@ -2242,8 +2228,8 @@ vect_create_vectorized_demotion_stmts (VEC (tree, heap) **vec_oprnds,
the resulting vectors and call the function recursively. */
static void
-vect_create_vectorized_promotion_stmts (VEC (tree, heap) **vec_oprnds0,
- VEC (tree, heap) **vec_oprnds1,
+vect_create_vectorized_promotion_stmts (vec<tree> *vec_oprnds0,
+ vec<tree> *vec_oprnds1,
gimple stmt, tree vec_dest,
gimple_stmt_iterator *gsi,
enum tree_code code1,
@@ -2253,13 +2239,13 @@ vect_create_vectorized_promotion_stmts (VEC (tree, heap) **vec_oprnds0,
int i;
tree vop0, vop1, new_tmp1, new_tmp2;
gimple new_stmt1, new_stmt2;
- VEC (tree, heap) *vec_tmp = NULL;
+ vec<tree> vec_tmp = vec<tree>();
- vec_tmp = VEC_alloc (tree, heap, VEC_length (tree, *vec_oprnds0) * 2);
- FOR_EACH_VEC_ELT (tree, *vec_oprnds0, i, vop0)
+ vec_tmp.create (vec_oprnds0->length () * 2);
+ FOR_EACH_VEC_ELT (*vec_oprnds0, i, vop0)
{
if (op_type == binary_op)
- vop1 = VEC_index (tree, *vec_oprnds1, i);
+ vop1 = (*vec_oprnds1)[i];
else
vop1 = NULL_TREE;
@@ -2280,11 +2266,11 @@ vect_create_vectorized_promotion_stmts (VEC (tree, heap) **vec_oprnds0,
}
/* Store the results for the next step. */
- VEC_quick_push (tree, vec_tmp, new_tmp1);
- VEC_quick_push (tree, vec_tmp, new_tmp2);
+ vec_tmp.quick_push (new_tmp1);
+ vec_tmp.quick_push (new_tmp2);
}
- VEC_free (tree, heap, *vec_oprnds0);
+ vec_oprnds0->truncate (0);
*vec_oprnds0 = vec_tmp;
}
@@ -2319,11 +2305,13 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
int ncopies, i, j;
tree lhs_type, rhs_type;
enum { NARROW, NONE, WIDEN } modifier;
- VEC (tree,heap) *vec_oprnds0 = NULL, *vec_oprnds1 = NULL;
+ vec<tree> vec_oprnds0 = vec<tree>();
+ vec<tree> vec_oprnds1 = vec<tree>();
tree vop0;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
int multi_step_cvt = 0;
- VEC (tree, heap) *vec_dsts = NULL, *interm_types = NULL;
+ vec<tree> vec_dsts = vec<tree>();
+ vec<tree> interm_types = vec<tree>();
tree last_oprnd, intermediate_type, cvt_type = NULL_TREE;
int op_type;
enum machine_mode rhs_mode;
@@ -2527,7 +2515,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
else
{
multi_step_cvt++;
- VEC_safe_push (tree, heap, interm_types, cvt_type);
+ interm_types.safe_push (cvt_type);
cvt_type = NULL_TREE;
}
break;
@@ -2583,7 +2571,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
STMT_VINFO_TYPE (stmt_info) = type_promotion_vec_info_type;
vect_model_promotion_demotion_cost (stmt_info, dt, multi_step_cvt);
}
- VEC_free (tree, heap, interm_types);
+ interm_types.release ();
return true;
}
@@ -2605,20 +2593,20 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
We create vector destinations for the intermediate type (TYPES) received
from supportable_*_operation, and store them in the correct order
for future use in vect_create_vectorized_*_stmts (). */
- vec_dsts = VEC_alloc (tree, heap, multi_step_cvt + 1);
+ vec_dsts.create (multi_step_cvt + 1);
vec_dest = vect_create_destination_var (scalar_dest,
(cvt_type && modifier == WIDEN)
? cvt_type : vectype_out);
- VEC_quick_push (tree, vec_dsts, vec_dest);
+ vec_dsts.quick_push (vec_dest);
if (multi_step_cvt)
{
- for (i = VEC_length (tree, interm_types) - 1;
- VEC_iterate (tree, interm_types, i, intermediate_type); i--)
+ for (i = interm_types.length () - 1;
+ interm_types.iterate (i, &intermediate_type); i--)
{
vec_dest = vect_create_destination_var (scalar_dest,
intermediate_type);
- VEC_quick_push (tree, vec_dsts, vec_dest);
+ vec_dsts.quick_push (vec_dest);
}
}
@@ -2630,22 +2618,19 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
if (!slp_node)
{
if (modifier == NONE)
- vec_oprnds0 = VEC_alloc (tree, heap, 1);
+ vec_oprnds0.create (1);
else if (modifier == WIDEN)
{
- vec_oprnds0 = VEC_alloc (tree, heap,
- (multi_step_cvt
- ? vect_pow2 (multi_step_cvt) : 1));
+ vec_oprnds0.create (multi_step_cvt ? vect_pow2(multi_step_cvt) : 1);
if (op_type == binary_op)
- vec_oprnds1 = VEC_alloc (tree, heap, 1);
+ vec_oprnds1.create (1);
}
else
- vec_oprnds0 = VEC_alloc (tree, heap,
- 2 * (multi_step_cvt
- ? vect_pow2 (multi_step_cvt) : 1));
+ vec_oprnds0.create (
+ 2 * (multi_step_cvt ? vect_pow2 (multi_step_cvt) : 1));
}
else if (code == WIDEN_LSHIFT_EXPR)
- vec_oprnds1 = VEC_alloc (tree, heap, slp_node->vec_stmts_size);
+ vec_oprnds1.create (slp_node->vec_stmts_size);
last_oprnd = op0;
prev_stmt_info = NULL;
@@ -2660,7 +2645,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
else
vect_get_vec_defs_for_stmt_copy (dt, &vec_oprnds0, NULL);
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, vop0)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)
{
/* Arguments are ready, create the new vector stmt. */
if (code1 == CALL_EXPR)
@@ -2680,8 +2665,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if (slp_node)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node),
- new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
if (j == 0)
@@ -2713,7 +2697,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
for SLP_NODE. We check during the analysis that all
the shift arguments are the same. */
for (k = 0; k < slp_node->vec_stmts_size - 1; k++)
- VEC_quick_push (tree, vec_oprnds1, vec_oprnd1);
+ vec_oprnds1.quick_push (vec_oprnd1);
vect_get_vec_defs (op0, NULL_TREE, stmt, &vec_oprnds0, NULL,
slp_node, -1);
@@ -2725,7 +2709,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
else
{
vec_oprnd0 = vect_get_vec_def_for_operand (op0, stmt, NULL);
- VEC_quick_push (tree, vec_oprnds0, vec_oprnd0);
+ vec_oprnds0.quick_push (vec_oprnd0);
if (op_type == binary_op)
{
if (code == WIDEN_LSHIFT_EXPR)
@@ -2733,15 +2717,15 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
else
vec_oprnd1 = vect_get_vec_def_for_operand (op1, stmt,
NULL);
- VEC_quick_push (tree, vec_oprnds1, vec_oprnd1);
+ vec_oprnds1.quick_push (vec_oprnd1);
}
}
}
else
{
vec_oprnd0 = vect_get_vec_def_for_stmt_copy (dt[0], vec_oprnd0);
- VEC_truncate (tree, vec_oprnds0, 0);
- VEC_quick_push (tree, vec_oprnds0, vec_oprnd0);
+ vec_oprnds0.truncate (0);
+ vec_oprnds0.quick_push (vec_oprnd0);
if (op_type == binary_op)
{
if (code == WIDEN_LSHIFT_EXPR)
@@ -2749,15 +2733,15 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
else
vec_oprnd1 = vect_get_vec_def_for_stmt_copy (dt[1],
vec_oprnd1);
- VEC_truncate (tree, vec_oprnds1, 0);
- VEC_quick_push (tree, vec_oprnds1, vec_oprnd1);
+ vec_oprnds1.truncate (0);
+ vec_oprnds1.quick_push (vec_oprnd1);
}
}
/* Arguments are ready. Create the new vector stmts. */
for (i = multi_step_cvt; i >= 0; i--)
{
- tree this_dest = VEC_index (tree, vec_dsts, i);
+ tree this_dest = vec_dsts[i];
enum tree_code c1 = code1, c2 = code2;
if (i == 0 && codecvt2 != ERROR_MARK)
{
@@ -2771,7 +2755,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
op_type);
}
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, vop0)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)
{
if (cvt_type)
{
@@ -2796,8 +2780,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
new_stmt = SSA_NAME_DEF_STMT (vop0);
if (slp_node)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node),
- new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
else
{
if (!prev_stmt_info)
@@ -2825,14 +2808,14 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
slp_node, -1);
else
{
- VEC_truncate (tree, vec_oprnds0, 0);
+ vec_oprnds0.truncate (0);
vect_get_loop_based_defs (&last_oprnd, stmt, dt[0], &vec_oprnds0,
vect_pow2 (multi_step_cvt) - 1);
}
/* Arguments are ready. Create the new vector stmts. */
if (cvt_type)
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, vop0)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)
{
if (codecvt1 == CALL_EXPR)
{
@@ -2849,7 +2832,7 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
}
vect_finish_stmt_generation (stmt, new_stmt, gsi);
- VEC_replace (tree, vec_oprnds0, i, new_temp);
+ vec_oprnds0[i] = new_temp;
}
vect_create_vectorized_demotion_stmts (&vec_oprnds0, multi_step_cvt,
@@ -2862,10 +2845,10 @@ vectorizable_conversion (gimple stmt, gimple_stmt_iterator *gsi,
break;
}
- VEC_free (tree, heap, vec_oprnds0);
- VEC_free (tree, heap, vec_oprnds1);
- VEC_free (tree, heap, vec_dsts);
- VEC_free (tree, heap, interm_types);
+ vec_oprnds0.release ();
+ vec_oprnds1.release ();
+ vec_dsts.release ();
+ interm_types.release ();
return true;
}
@@ -2895,7 +2878,7 @@ vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
unsigned int nunits = TYPE_VECTOR_SUBPARTS (vectype);
int ncopies;
int i, j;
- VEC(tree,heap) *vec_oprnds = NULL;
+ vec<tree> vec_oprnds = vec<tree>();
tree vop;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
gimple new_stmt = NULL;
@@ -3004,7 +2987,7 @@ vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
vect_get_vec_defs_for_stmt_copy (dt, &vec_oprnds, NULL);
/* Arguments are ready. create the new vector stmt. */
- FOR_EACH_VEC_ELT (tree, vec_oprnds, i, vop)
+ FOR_EACH_VEC_ELT (vec_oprnds, i, vop)
{
if (CONVERT_EXPR_CODE_P (code)
|| code == VIEW_CONVERT_EXPR)
@@ -3014,7 +2997,7 @@ vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
gimple_assign_set_lhs (new_stmt, new_temp);
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if (slp_node)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
if (slp_node)
@@ -3028,7 +3011,7 @@ vectorizable_assignment (gimple stmt, gimple_stmt_iterator *gsi,
prev_stmt_info = vinfo_for_stmt (new_stmt);
}
- VEC_free (tree, heap, vec_oprnds);
+ vec_oprnds.release ();
return true;
}
@@ -3104,7 +3087,8 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
tree op1_vectype;
int ncopies;
int j, i;
- VEC (tree, heap) *vec_oprnds0 = NULL, *vec_oprnds1 = NULL;
+ vec<tree> vec_oprnds0 = vec<tree>();
+ vec<tree> vec_oprnds1 = vec<tree>();
tree vop0, vop1;
unsigned int k;
bool scalar_shift_arg = true;
@@ -3208,10 +3192,10 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
a scalar shift. */
if (slp_node)
{
- VEC (gimple, heap) *stmts = SLP_TREE_SCALAR_STMTS (slp_node);
+ vec<gimple> stmts = SLP_TREE_SCALAR_STMTS (slp_node);
gimple slpstmt;
- FOR_EACH_VEC_ELT (gimple, stmts, k, slpstmt)
+ FOR_EACH_VEC_ELT (stmts, k, slpstmt)
if (!operand_equal_p (gimple_assign_rhs2 (slpstmt), op1, 0))
scalar_shift_arg = false;
}
@@ -3361,11 +3345,11 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
allocate VEC_OPRNDS1 only in case of binary operation. */
if (!slp_node)
{
- vec_oprnds0 = VEC_alloc (tree, heap, 1);
- vec_oprnds1 = VEC_alloc (tree, heap, 1);
+ vec_oprnds0.create (1);
+ vec_oprnds1.create (1);
}
else if (scalar_shift_arg)
- vec_oprnds1 = VEC_alloc (tree, heap, slp_node->vec_stmts_size);
+ vec_oprnds1.create (slp_node->vec_stmts_size);
prev_stmt_info = NULL;
for (j = 0; j < ncopies; j++)
@@ -3386,7 +3370,7 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
dump_printf_loc (MSG_NOTE, vect_location,
"operand 1 using scalar mode.");
vec_oprnd1 = op1;
- VEC_quick_push (tree, vec_oprnds1, vec_oprnd1);
+ vec_oprnds1.quick_push (vec_oprnd1);
if (slp_node)
{
/* Store vec_oprnd1 for every vector stmt to be created
@@ -3395,7 +3379,7 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
TODO: Allow different constants for different vector
stmts generated for an SLP instance. */
for (k = 0; k < slp_node->vec_stmts_size - 1; k++)
- VEC_quick_push (tree, vec_oprnds1, vec_oprnd1);
+ vec_oprnds1.quick_push (vec_oprnd1);
}
}
}
@@ -3414,15 +3398,15 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
vect_get_vec_defs_for_stmt_copy (dt, &vec_oprnds0, &vec_oprnds1);
/* Arguments are ready. Create the new vector stmt. */
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, vop0)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)
{
- vop1 = VEC_index (tree, vec_oprnds1, i);
+ vop1 = vec_oprnds1[i];
new_stmt = gimple_build_assign_with_ops (code, vec_dest, vop0, vop1);
new_temp = make_ssa_name (vec_dest, new_stmt);
gimple_assign_set_lhs (new_stmt, new_temp);
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if (slp_node)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
if (slp_node)
@@ -3435,8 +3419,8 @@ vectorizable_shift (gimple stmt, gimple_stmt_iterator *gsi,
prev_stmt_info = vinfo_for_stmt (new_stmt);
}
- VEC_free (tree, heap, vec_oprnds0);
- VEC_free (tree, heap, vec_oprnds1);
+ vec_oprnds0.release ();
+ vec_oprnds1.release ();
return true;
}
@@ -3481,7 +3465,9 @@ vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
tree vectype_out;
int ncopies;
int j, i;
- VEC(tree,heap) *vec_oprnds0 = NULL, *vec_oprnds1 = NULL, *vec_oprnds2 = NULL;
+ vec<tree> vec_oprnds0 = vec<tree>();
+ vec<tree> vec_oprnds1 = vec<tree>();
+ vec<tree> vec_oprnds2 = vec<tree>();
tree vop0, vop1, vop2;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
int vf;
@@ -3746,9 +3732,10 @@ vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
slp_node, -1);
if (op_type == ternary_op)
{
- vec_oprnds2 = VEC_alloc (tree, heap, 1);
- VEC_quick_push (tree, vec_oprnds2,
- vect_get_vec_def_for_operand (op2, stmt, NULL));
+ vec_oprnds2.create (1);
+ vec_oprnds2.quick_push (vect_get_vec_def_for_operand (op2,
+ stmt,
+ NULL));
}
}
else
@@ -3756,27 +3743,26 @@ vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
vect_get_vec_defs_for_stmt_copy (dt, &vec_oprnds0, &vec_oprnds1);
if (op_type == ternary_op)
{
- tree vec_oprnd = VEC_pop (tree, vec_oprnds2);
- VEC_quick_push (tree, vec_oprnds2,
- vect_get_vec_def_for_stmt_copy (dt[2],
- vec_oprnd));
+ tree vec_oprnd = vec_oprnds2.pop ();
+ vec_oprnds2.quick_push (vect_get_vec_def_for_stmt_copy (dt[2],
+ vec_oprnd));
}
}
/* Arguments are ready. Create the new vector stmt. */
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, vop0)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, vop0)
{
vop1 = ((op_type == binary_op || op_type == ternary_op)
- ? VEC_index (tree, vec_oprnds1, i) : NULL_TREE);
+ ? vec_oprnds1[i] : NULL_TREE);
vop2 = ((op_type == ternary_op)
- ? VEC_index (tree, vec_oprnds2, i) : NULL_TREE);
+ ? vec_oprnds2[i] : NULL_TREE);
new_stmt = gimple_build_assign_with_ops (code, vec_dest,
vop0, vop1, vop2);
new_temp = make_ssa_name (vec_dest, new_stmt);
gimple_assign_set_lhs (new_stmt, new_temp);
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if (slp_node)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
if (slp_node)
@@ -3789,11 +3775,9 @@ vectorizable_operation (gimple stmt, gimple_stmt_iterator *gsi,
prev_stmt_info = vinfo_for_stmt (new_stmt);
}
- VEC_free (tree, heap, vec_oprnds0);
- if (vec_oprnds1)
- VEC_free (tree, heap, vec_oprnds1);
- if (vec_oprnds2)
- VEC_free (tree, heap, vec_oprnds2);
+ vec_oprnds0.release ();
+ vec_oprnds1.release ();
+ vec_oprnds2.release ();
return true;
}
@@ -3836,9 +3820,11 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
bool grouped_store = false;
bool store_lanes_p = false;
unsigned int group_size, i;
- VEC(tree,heap) *dr_chain = NULL, *oprnds = NULL, *result_chain = NULL;
+ vec<tree> dr_chain = vec<tree>();
+ vec<tree> oprnds = vec<tree>();
+ vec<tree> result_chain = vec<tree>();
bool inv_p;
- VEC(tree,heap) *vec_oprnds = NULL;
+ vec<tree> vec_oprnds = vec<tree>();
bool slp = (slp_node != NULL);
unsigned int vec_num;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
@@ -3992,7 +3978,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
/* VEC_NUM is the number of vect stmts to be created for this
group. */
vec_num = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node);
- first_stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (slp_node), 0);
+ first_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[0];
first_dr = STMT_VINFO_DATA_REF (vinfo_for_stmt (first_stmt));
op = gimple_assign_rhs1 (first_stmt);
}
@@ -4012,8 +3998,8 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
dump_printf_loc (MSG_NOTE, vect_location,
"transform store. ncopies = %d", ncopies);
- dr_chain = VEC_alloc (tree, heap, group_size);
- oprnds = VEC_alloc (tree, heap, group_size);
+ dr_chain.create (group_size);
+ oprnds.create (group_size);
alignment_support_scheme = vect_supportable_dr_alignment (first_dr, false);
gcc_assert (alignment_support_scheme);
@@ -4081,7 +4067,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
vect_get_vec_defs (op, NULL_TREE, stmt, &vec_oprnds,
NULL, slp_node, -1);
- vec_oprnd = VEC_index (tree, vec_oprnds, 0);
+ vec_oprnd = vec_oprnds[0];
}
else
{
@@ -4106,8 +4092,8 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
vec_oprnd = vect_get_vec_def_for_operand (op, next_stmt,
NULL);
- VEC_quick_push(tree, dr_chain, vec_oprnd);
- VEC_quick_push(tree, oprnds, vec_oprnd);
+ dr_chain.quick_push (vec_oprnd);
+ oprnds.quick_push (vec_oprnd);
next_stmt = GROUP_NEXT_ELEMENT (vinfo_for_stmt (next_stmt));
}
}
@@ -4131,12 +4117,12 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
OPRNDS are of size 1. */
for (i = 0; i < group_size; i++)
{
- op = VEC_index (tree, oprnds, i);
+ op = oprnds[i];
vect_is_simple_use (op, NULL, loop_vinfo, bb_vinfo, &def_stmt,
&def, &dt);
vec_oprnd = vect_get_vec_def_for_stmt_copy (dt, op);
- VEC_replace(tree, dr_chain, i, vec_oprnd);
- VEC_replace(tree, oprnds, i, vec_oprnd);
+ dr_chain[i] = vec_oprnd;
+ oprnds[i] = vec_oprnd;
}
dataref_ptr = bump_vector_ptr (dataref_ptr, ptr_incr, gsi, stmt,
TYPE_SIZE_UNIT (aggr_type));
@@ -4150,7 +4136,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
vec_array = create_vector_array (vectype, vec_num);
for (i = 0; i < vec_num; i++)
{
- vec_oprnd = VEC_index (tree, dr_chain, i);
+ vec_oprnd = dr_chain[i];
write_vector_array (stmt, gsi, vec_oprnd, vec_array, i);
}
@@ -4166,7 +4152,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
new_stmt = NULL;
if (grouped_store)
{
- result_chain = VEC_alloc (tree, heap, group_size);
+ result_chain.create (group_size);
/* Permute. */
vect_permute_store_chain (dr_chain, group_size, stmt, gsi,
&result_chain);
@@ -4183,11 +4169,11 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
stmt, NULL_TREE);
if (slp)
- vec_oprnd = VEC_index (tree, vec_oprnds, i);
+ vec_oprnd = vec_oprnds[i];
else if (grouped_store)
/* For grouped stores vectorized defs are interleaved in
vect_permute_store_chain(). */
- vec_oprnd = VEC_index (tree, result_chain, i);
+ vec_oprnd = result_chain[i];
data_ref = build2 (MEM_REF, TREE_TYPE (vec_oprnd), dataref_ptr,
build_int_cst (reference_alias_ptr_type
@@ -4235,12 +4221,10 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
}
}
- VEC_free (tree, heap, dr_chain);
- VEC_free (tree, heap, oprnds);
- if (result_chain)
- VEC_free (tree, heap, result_chain);
- if (vec_oprnds)
- VEC_free (tree, heap, vec_oprnds);
+ dr_chain.release ();
+ oprnds.release ();
+ result_chain.release ();
+ vec_oprnds.release ();
return true;
}
@@ -4353,7 +4337,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
tree offset = NULL_TREE;
tree realignment_token = NULL_TREE;
gimple phi = NULL;
- VEC(tree,heap) *dr_chain = NULL;
+ vec<tree> dr_chain = vec<tree>();
bool grouped_load = false;
bool load_lanes_p = false;
gimple first_stmt;
@@ -4694,7 +4678,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
tree ref = DR_REF (dr);
tree ivstep;
tree running_off;
- VEC(constructor_elt, gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
gimple_seq stmts = NULL;
gcc_assert (stride_base && stride_step);
@@ -4737,7 +4721,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
tree vec_inv;
- v = VEC_alloc (constructor_elt, gc, nunits);
+ vec_alloc (v, nunits);
for (i = 0; i < nunits; i++)
{
tree newref, newoff;
@@ -4791,9 +4775,9 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
first_stmt = GROUP_FIRST_ELEMENT (stmt_info);
if (slp
- && !SLP_INSTANCE_LOAD_PERMUTATION (slp_node_instance)
- && first_stmt != VEC_index (gimple, SLP_TREE_SCALAR_STMTS (slp_node), 0))
- first_stmt = VEC_index (gimple, SLP_TREE_SCALAR_STMTS (slp_node), 0);
+ && !SLP_INSTANCE_LOAD_PERMUTATION (slp_node_instance).exists ()
+ && first_stmt != SLP_TREE_SCALAR_STMTS (slp_node)[0])
+ first_stmt = SLP_TREE_SCALAR_STMTS (slp_node)[0];
/* Check if the chain of loads is already vectorized. */
if (STMT_VINFO_VEC_STMT (vinfo_for_stmt (first_stmt)))
@@ -4809,7 +4793,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
grouped_load = false;
vec_num = SLP_TREE_NUMBER_OF_VEC_STMTS (slp_node);
- if (SLP_INSTANCE_LOAD_PERMUTATION (slp_node_instance))
+ if (SLP_INSTANCE_LOAD_PERMUTATION (slp_node_instance).exists ())
slp_perm = true;
}
else
@@ -4978,7 +4962,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
TYPE_SIZE_UNIT (aggr_type));
if (grouped_load || slp_perm)
- dr_chain = VEC_alloc (tree, heap, vec_num);
+ dr_chain.create (vec_num);
if (load_lanes_p)
{
@@ -4998,7 +4982,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
{
new_temp = read_vector_array (stmt, gsi, scalar_dest,
vec_array, i);
- VEC_quick_push (tree, dr_chain, new_temp);
+ dr_chain.quick_push (new_temp);
}
/* Record the mapping between SSA_NAMEs and statements. */
@@ -5173,12 +5157,11 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
/* Collect vector loads and later create their permutation in
vect_transform_grouped_load (). */
if (grouped_load || slp_perm)
- VEC_quick_push (tree, dr_chain, new_temp);
+ dr_chain.quick_push (new_temp);
/* Store vector loads in the corresponding SLP_NODE. */
if (slp && !slp_perm)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node),
- new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
}
@@ -5190,7 +5173,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
if (!vect_transform_slp_perm_load (stmt, dr_chain, gsi, vf,
slp_node_instance, false))
{
- VEC_free (tree, heap, dr_chain);
+ dr_chain.release ();
return false;
}
}
@@ -5211,8 +5194,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
prev_stmt_info = vinfo_for_stmt (new_stmt);
}
}
- if (dr_chain)
- VEC_free (tree, heap, dr_chain);
+ dr_chain.release ();
}
return true;
@@ -5308,8 +5290,10 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
stmt_vec_info prev_stmt_info = NULL;
int i, j;
bb_vec_info bb_vinfo = STMT_VINFO_BB_VINFO (stmt_info);
- VEC (tree, heap) *vec_oprnds0 = NULL, *vec_oprnds1 = NULL;
- VEC (tree, heap) *vec_oprnds2 = NULL, *vec_oprnds3 = NULL;
+ vec<tree> vec_oprnds0 = vec<tree>();
+ vec<tree> vec_oprnds1 = vec<tree>();
+ vec<tree> vec_oprnds2 = vec<tree>();
+ vec<tree> vec_oprnds3 = vec<tree>();
tree vec_cmp_type = vectype;
if (slp_node || PURE_SLP_STMT (stmt_info))
@@ -5402,10 +5386,10 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
if (!slp_node)
{
- vec_oprnds0 = VEC_alloc (tree, heap, 1);
- vec_oprnds1 = VEC_alloc (tree, heap, 1);
- vec_oprnds2 = VEC_alloc (tree, heap, 1);
- vec_oprnds3 = VEC_alloc (tree, heap, 1);
+ vec_oprnds0.create (1);
+ vec_oprnds1.create (1);
+ vec_oprnds2.create (1);
+ vec_oprnds3.create (1);
}
/* Handle def. */
@@ -5420,22 +5404,23 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
{
if (slp_node)
{
- VEC (tree, heap) *ops = VEC_alloc (tree, heap, 4);
- VEC (slp_void_p, heap) *vec_defs;
-
- vec_defs = VEC_alloc (slp_void_p, heap, 4);
- VEC_safe_push (tree, heap, ops, TREE_OPERAND (cond_expr, 0));
- VEC_safe_push (tree, heap, ops, TREE_OPERAND (cond_expr, 1));
- VEC_safe_push (tree, heap, ops, then_clause);
- VEC_safe_push (tree, heap, ops, else_clause);
+ vec<tree> ops;
+ ops.create (4);
+ vec<slp_void_p> vec_defs;
+
+ vec_defs.create (4);
+ ops.safe_push (TREE_OPERAND (cond_expr, 0));
+ ops.safe_push (TREE_OPERAND (cond_expr, 1));
+ ops.safe_push (then_clause);
+ ops.safe_push (else_clause);
vect_get_slp_defs (ops, slp_node, &vec_defs, -1);
- vec_oprnds3 = (VEC (tree, heap) *) VEC_pop (slp_void_p, vec_defs);
- vec_oprnds2 = (VEC (tree, heap) *) VEC_pop (slp_void_p, vec_defs);
- vec_oprnds1 = (VEC (tree, heap) *) VEC_pop (slp_void_p, vec_defs);
- vec_oprnds0 = (VEC (tree, heap) *) VEC_pop (slp_void_p, vec_defs);
+ vec_oprnds3 = *((vec<tree> *) vec_defs.pop ());
+ vec_oprnds2 = *((vec<tree> *) vec_defs.pop ());
+ vec_oprnds1 = *((vec<tree> *) vec_defs.pop ());
+ vec_oprnds0 = *((vec<tree> *) vec_defs.pop ());
- VEC_free (tree, heap, ops);
- VEC_free (slp_void_p, heap, vec_defs);
+ ops.release ();
+ vec_defs.release ();
}
else
{
@@ -5474,29 +5459,29 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
else
{
vec_cond_lhs = vect_get_vec_def_for_stmt_copy (dts[0],
- VEC_pop (tree, vec_oprnds0));
+ vec_oprnds0.pop ());
vec_cond_rhs = vect_get_vec_def_for_stmt_copy (dts[1],
- VEC_pop (tree, vec_oprnds1));
+ vec_oprnds1.pop ());
vec_then_clause = vect_get_vec_def_for_stmt_copy (dts[2],
- VEC_pop (tree, vec_oprnds2));
+ vec_oprnds2.pop ());
vec_else_clause = vect_get_vec_def_for_stmt_copy (dts[3],
- VEC_pop (tree, vec_oprnds3));
+ vec_oprnds3.pop ());
}
if (!slp_node)
{
- VEC_quick_push (tree, vec_oprnds0, vec_cond_lhs);
- VEC_quick_push (tree, vec_oprnds1, vec_cond_rhs);
- VEC_quick_push (tree, vec_oprnds2, vec_then_clause);
- VEC_quick_push (tree, vec_oprnds3, vec_else_clause);
+ vec_oprnds0.quick_push (vec_cond_lhs);
+ vec_oprnds1.quick_push (vec_cond_rhs);
+ vec_oprnds2.quick_push (vec_then_clause);
+ vec_oprnds3.quick_push (vec_else_clause);
}
/* Arguments are ready. Create the new vector stmt. */
- FOR_EACH_VEC_ELT (tree, vec_oprnds0, i, vec_cond_lhs)
+ FOR_EACH_VEC_ELT (vec_oprnds0, i, vec_cond_lhs)
{
- vec_cond_rhs = VEC_index (tree, vec_oprnds1, i);
- vec_then_clause = VEC_index (tree, vec_oprnds2, i);
- vec_else_clause = VEC_index (tree, vec_oprnds3, i);
+ vec_cond_rhs = vec_oprnds1[i];
+ vec_then_clause = vec_oprnds2[i];
+ vec_else_clause = vec_oprnds3[i];
vec_compare = build2 (TREE_CODE (cond_expr), vec_cmp_type,
vec_cond_lhs, vec_cond_rhs);
@@ -5508,7 +5493,7 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
gimple_assign_set_lhs (new_stmt, new_temp);
vect_finish_stmt_generation (stmt, new_stmt, gsi);
if (slp_node)
- VEC_quick_push (gimple, SLP_TREE_VEC_STMTS (slp_node), new_stmt);
+ SLP_TREE_VEC_STMTS (slp_node).quick_push (new_stmt);
}
if (slp_node)
@@ -5522,10 +5507,10 @@ vectorizable_condition (gimple stmt, gimple_stmt_iterator *gsi,
prev_stmt_info = vinfo_for_stmt (new_stmt);
}
- VEC_free (tree, heap, vec_oprnds0);
- VEC_free (tree, heap, vec_oprnds1);
- VEC_free (tree, heap, vec_oprnds2);
- VEC_free (tree, heap, vec_oprnds3);
+ vec_oprnds0.release ();
+ vec_oprnds1.release ();
+ vec_oprnds2.release ();
+ vec_oprnds3.release ();
return true;
}
@@ -5984,7 +5969,7 @@ new_stmt_vec_info (gimple stmt, loop_vec_info loop_vinfo,
else
STMT_VINFO_DEF_TYPE (res) = vect_internal_def;
- STMT_VINFO_SAME_ALIGN_REFS (res) = NULL;
+ STMT_VINFO_SAME_ALIGN_REFS (res).create (0);
STMT_SLP_TYPE (res) = loop_vect;
GROUP_FIRST_ELEMENT (res) = NULL;
GROUP_NEXT_ELEMENT (res) = NULL;
@@ -6003,8 +5988,8 @@ new_stmt_vec_info (gimple stmt, loop_vec_info loop_vinfo,
void
init_stmt_vec_info_vec (void)
{
- gcc_assert (!stmt_vec_info_vec);
- stmt_vec_info_vec = VEC_alloc (vec_void_p, heap, 50);
+ gcc_assert (!stmt_vec_info_vec.exists ());
+ stmt_vec_info_vec.create (50);
}
@@ -6013,8 +5998,8 @@ init_stmt_vec_info_vec (void)
void
free_stmt_vec_info_vec (void)
{
- gcc_assert (stmt_vec_info_vec);
- VEC_free (vec_void_p, heap, stmt_vec_info_vec);
+ gcc_assert (stmt_vec_info_vec.exists ());
+ stmt_vec_info_vec.release ();
}
@@ -6049,7 +6034,7 @@ free_stmt_vec_info (gimple stmt)
}
}
- VEC_free (dr_p, heap, STMT_VINFO_SAME_ALIGN_REFS (stmt_info));
+ STMT_VINFO_SAME_ALIGN_REFS (stmt_info).release ();
set_vinfo_for_stmt (stmt, NULL);
free (stmt_info);
}
@@ -6392,7 +6377,7 @@ supportable_widening_operation (enum tree_code code, gimple stmt,
tree vectype_out, tree vectype_in,
enum tree_code *code1, enum tree_code *code2,
int *multi_step_cvt,
- VEC (tree, heap) **interm_types)
+ vec<tree> *interm_types)
{
stmt_vec_info stmt_info = vinfo_for_stmt (stmt);
loop_vec_info loop_info = STMT_VINFO_LOOP_VINFO (stmt_info);
@@ -6538,7 +6523,7 @@ supportable_widening_operation (enum tree_code code, gimple stmt,
intermediate steps in promotion sequence. We try
MAX_INTERM_CVT_STEPS to get to NARROW_VECTYPE, and fail if we do
not. */
- *interm_types = VEC_alloc (tree, heap, MAX_INTERM_CVT_STEPS);
+ interm_types->create (MAX_INTERM_CVT_STEPS);
for (i = 0; i < MAX_INTERM_CVT_STEPS; i++)
{
intermediate_mode = insn_data[icode1].operand[0].mode;
@@ -6559,7 +6544,7 @@ supportable_widening_operation (enum tree_code code, gimple stmt,
== CODE_FOR_nothing))
break;
- VEC_quick_push (tree, *interm_types, intermediate_type);
+ interm_types->quick_push (intermediate_type);
(*multi_step_cvt)++;
if (insn_data[icode1].operand[0].mode == TYPE_MODE (wide_vectype)
@@ -6570,7 +6555,7 @@ supportable_widening_operation (enum tree_code code, gimple stmt,
prev_mode = intermediate_mode;
}
- VEC_free (tree, heap, *interm_types);
+ interm_types->release ();
return false;
}
@@ -6599,7 +6584,7 @@ bool
supportable_narrowing_operation (enum tree_code code,
tree vectype_out, tree vectype_in,
enum tree_code *code1, int *multi_step_cvt,
- VEC (tree, heap) **interm_types)
+ vec<tree> *interm_types)
{
enum machine_mode vec_mode;
enum insn_code icode1;
@@ -6683,7 +6668,7 @@ supportable_narrowing_operation (enum tree_code code,
/* We assume here that there will not be more than MAX_INTERM_CVT_STEPS
intermediate steps in promotion sequence. We try
MAX_INTERM_CVT_STEPS to get to NARROW_VECTYPE, and fail if we do not. */
- *interm_types = VEC_alloc (tree, heap, MAX_INTERM_CVT_STEPS);
+ interm_types->create (MAX_INTERM_CVT_STEPS);
for (i = 0; i < MAX_INTERM_CVT_STEPS; i++)
{
intermediate_mode = insn_data[icode1].operand[0].mode;
@@ -6699,7 +6684,7 @@ supportable_narrowing_operation (enum tree_code code,
== CODE_FOR_nothing))
break;
- VEC_quick_push (tree, *interm_types, intermediate_type);
+ interm_types->quick_push (intermediate_type);
(*multi_step_cvt)++;
if (insn_data[icode1].operand[0].mode == TYPE_MODE (narrow_vectype))
@@ -6709,6 +6694,6 @@ supportable_narrowing_operation (enum tree_code code,
optab1 = interm_optab;
}
- VEC_free (tree, heap, *interm_types);
+ interm_types->release ();
return false;
}
diff --git a/gcc/tree-vectorizer.c b/gcc/tree-vectorizer.c
index b9bc00144f5..ec99663ec7f 100644
--- a/gcc/tree-vectorizer.c
+++ b/gcc/tree-vectorizer.c
@@ -72,7 +72,7 @@ along with GCC; see the file COPYING3. If not see
LOC vect_location;
/* Vector mapping GIMPLE stmt to stmt_vec_info. */
-VEC(vec_void_p,heap) *stmt_vec_info_vec;
+vec<vec_void_p> stmt_vec_info_vec;
/* Function vectorize_loops.
diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h
index 5762e00b69e..e533d2c6086 100644
--- a/gcc/tree-vectorizer.h
+++ b/gcc/tree-vectorizer.h
@@ -80,10 +80,8 @@ typedef struct _stmt_info_for_cost {
int misalign;
} stmt_info_for_cost;
-DEF_VEC_O (stmt_info_for_cost);
-DEF_VEC_ALLOC_O (stmt_info_for_cost, heap);
-typedef VEC(stmt_info_for_cost, heap) *stmt_vector_for_cost;
+typedef vec<stmt_info_for_cost> stmt_vector_for_cost;
static inline void
add_stmt_info_to_vec (stmt_vector_for_cost *stmt_cost_vec, int count,
@@ -94,25 +92,23 @@ add_stmt_info_to_vec (stmt_vector_for_cost *stmt_cost_vec, int count,
si.kind = kind;
si.stmt = stmt;
si.misalign = misalign;
- VEC_safe_push (stmt_info_for_cost, heap, *stmt_cost_vec, si);
+ stmt_cost_vec->safe_push (si);
}
/************************************************************************
SLP
************************************************************************/
typedef void *slp_void_p;
-DEF_VEC_P (slp_void_p);
-DEF_VEC_ALLOC_P (slp_void_p, heap);
/* A computation tree of an SLP instance. Each node corresponds to a group of
stmts to be packed in a SIMD stmt. */
typedef struct _slp_tree {
/* Nodes that contain def-stmts of this node statements operands. */
- VEC (slp_void_p, heap) *children;
+ vec<slp_void_p> children;
/* A group of scalar stmts to be vectorized together. */
- VEC (gimple, heap) *stmts;
+ vec<gimple> stmts;
/* Vectorized stmt/s. */
- VEC (gimple, heap) *vec_stmts;
+ vec<gimple> vec_stmts;
/* Number of vector stmts that are created to replace the group of scalar
stmts. It is calculated during the transformation phase as the number of
scalar elements in one scalar iteration (GROUP_SIZE) multiplied by VF
@@ -120,8 +116,6 @@ typedef struct _slp_tree {
unsigned int vec_stmts_size;
} *slp_tree;
-DEF_VEC_P(slp_tree);
-DEF_VEC_ALLOC_P(slp_tree, heap);
/* SLP instance is a sequence of stmts in a loop that can be packed into
SIMD stmts. */
@@ -140,18 +134,16 @@ typedef struct _slp_instance {
/* Loads permutation relatively to the stores, NULL if there is no
permutation. */
- VEC (int, heap) *load_permutation;
+ vec<int> load_permutation;
/* The group of nodes that contain loads of this SLP instance. */
- VEC (slp_tree, heap) *loads;
+ vec<slp_tree> loads;
/* The first scalar load of the instance. The created vector loads will be
inserted before this statement. */
gimple first_load;
} *slp_instance;
-DEF_VEC_P(slp_instance);
-DEF_VEC_ALLOC_P(slp_instance, heap);
/* Access Functions. */
#define SLP_INSTANCE_TREE(S) (S)->root
@@ -173,7 +165,7 @@ DEF_VEC_ALLOC_P(slp_instance, heap);
typedef struct _slp_oprnd_info
{
/* Def-stmts for the operands. */
- VEC (gimple, heap) *def_stmts;
+ vec<gimple> def_stmts;
/* Information about the first statement, its vector def-type, type, the
operand itself in case it's constant, and an indication if it's a pattern
stmt. */
@@ -183,8 +175,6 @@ typedef struct _slp_oprnd_info
bool first_pattern;
} *slp_oprnd_info;
-DEF_VEC_P(slp_oprnd_info);
-DEF_VEC_ALLOC_P(slp_oprnd_info, heap);
typedef struct _vect_peel_info
@@ -250,40 +240,40 @@ typedef struct _loop_vec_info {
int ptr_mask;
/* The loop nest in which the data dependences are computed. */
- VEC (loop_p, heap) *loop_nest;
+ vec<loop_p> loop_nest;
/* All data references in the loop. */
- VEC (data_reference_p, heap) *datarefs;
+ vec<data_reference_p> datarefs;
/* All data dependences in the loop. */
- VEC (ddr_p, heap) *ddrs;
+ vec<ddr_p> ddrs;
/* Data Dependence Relations defining address ranges that are candidates
for a run-time aliasing check. */
- VEC (ddr_p, heap) *may_alias_ddrs;
+ vec<ddr_p> may_alias_ddrs;
/* Statements in the loop that have data references that are candidates for a
runtime (loop versioning) misalignment check. */
- VEC(gimple,heap) *may_misalign_stmts;
+ vec<gimple> may_misalign_stmts;
/* All interleaving chains of stores in the loop, represented by the first
stmt in the chain. */
- VEC(gimple, heap) *grouped_stores;
+ vec<gimple> grouped_stores;
/* All SLP instances in the loop. This is a subset of the set of GROUP_STORES
of the loop. */
- VEC(slp_instance, heap) *slp_instances;
+ vec<slp_instance> slp_instances;
/* The unrolling factor needed to SLP the loop. In case of that pure SLP is
applied to the loop, i.e., no unrolling is needed, this is 1. */
unsigned slp_unrolling_factor;
/* Reduction cycles detected in the loop. Used in loop-aware SLP. */
- VEC (gimple, heap) *reductions;
+ vec<gimple> reductions;
/* All reduction chains in the loop, represented by the first
stmt in the chain. */
- VEC (gimple, heap) *reduction_chains;
+ vec<gimple> reduction_chains;
/* Hash table used to choose the best peeling option. */
htab_t peeling_htab;
@@ -335,9 +325,9 @@ typedef struct _loop_vec_info {
#define LOOP_VINFO_OPERANDS_SWAPPED(L) (L)->operands_swapped
#define LOOP_REQUIRES_VERSIONING_FOR_ALIGNMENT(L) \
-VEC_length (gimple, (L)->may_misalign_stmts) > 0
+(L)->may_misalign_stmts.length () > 0
#define LOOP_REQUIRES_VERSIONING_FOR_ALIAS(L) \
-VEC_length (ddr_p, (L)->may_alias_ddrs) > 0
+(L)->may_alias_ddrs.length () > 0
#define NITERS_KNOWN_P(n) \
(host_integerp ((n),0) \
@@ -364,17 +354,17 @@ typedef struct _bb_vec_info {
basic_block bb;
/* All interleaving chains of stores in the basic block, represented by the
first stmt in the chain. */
- VEC(gimple, heap) *grouped_stores;
+ vec<gimple> grouped_stores;
/* All SLP instances in the basic block. This is a subset of the set of
GROUP_STORES of the basic block. */
- VEC(slp_instance, heap) *slp_instances;
+ vec<slp_instance> slp_instances;
/* All data references in the basic block. */
- VEC (data_reference_p, heap) *datarefs;
+ vec<data_reference_p> datarefs;
/* All data dependences in the basic block. */
- VEC (ddr_p, heap) *ddrs;
+ vec<ddr_p> ddrs;
/* Cost data used by the target cost model. */
void *target_cost_data;
@@ -460,8 +450,6 @@ enum slp_vect_type {
typedef struct data_reference *dr_p;
-DEF_VEC_P(dr_p);
-DEF_VEC_ALLOC_P(dr_p,heap);
typedef struct _stmt_vec_info {
@@ -530,7 +518,7 @@ typedef struct _stmt_vec_info {
/* List of datarefs that are known to have the same alignment as the dataref
of this stmt. */
- VEC(dr_p,heap) *same_align_refs;
+ vec<dr_p> same_align_refs;
/* Classify the def of this stmt. */
enum vect_def_type def_type;
@@ -632,10 +620,8 @@ typedef struct _stmt_vec_info {
/* Avoid GTY(()) on stmt_vec_info. */
typedef void *vec_void_p;
-DEF_VEC_P (vec_void_p);
-DEF_VEC_ALLOC_P (vec_void_p, heap);
-extern VEC(vec_void_p,heap) *stmt_vec_info_vec;
+extern vec<vec_void_p> stmt_vec_info_vec;
void init_stmt_vec_info_vec (void);
void free_stmt_vec_info_vec (void);
@@ -649,7 +635,7 @@ vinfo_for_stmt (gimple stmt)
if (uid == 0)
return NULL;
- return (stmt_vec_info) VEC_index (vec_void_p, stmt_vec_info_vec, uid - 1);
+ return (stmt_vec_info) stmt_vec_info_vec[uid - 1];
}
/* Set vectorizer information INFO for STMT. */
@@ -661,12 +647,12 @@ set_vinfo_for_stmt (gimple stmt, stmt_vec_info info)
if (uid == 0)
{
gcc_checking_assert (info);
- uid = VEC_length (vec_void_p, stmt_vec_info_vec) + 1;
+ uid = stmt_vec_info_vec.length () + 1;
gimple_set_uid (stmt, uid);
- VEC_safe_push (vec_void_p, heap, stmt_vec_info_vec, (vec_void_p) info);
+ stmt_vec_info_vec.safe_push ((vec_void_p) info);
}
else
- VEC_replace (vec_void_p, stmt_vec_info_vec, uid - 1, (vec_void_p) info);
+ stmt_vec_info_vec[uid - 1] = (vec_void_p) info;
}
/* Return the earlier statement between STMT1 and STMT2. */
@@ -688,8 +674,8 @@ get_earlier_stmt (gimple stmt1, gimple stmt2)
if (uid1 == 0 || uid2 == 0)
return NULL;
- gcc_checking_assert (uid1 <= VEC_length (vec_void_p, stmt_vec_info_vec)
- && uid2 <= VEC_length (vec_void_p, stmt_vec_info_vec));
+ gcc_checking_assert (uid1 <= stmt_vec_info_vec.length ()
+ && uid2 <= stmt_vec_info_vec.length ());
if (uid1 < uid2)
return stmt1;
@@ -716,8 +702,8 @@ get_later_stmt (gimple stmt1, gimple stmt2)
if (uid1 == 0 || uid2 == 0)
return NULL;
- gcc_assert (uid1 <= VEC_length (vec_void_p, stmt_vec_info_vec));
- gcc_assert (uid2 <= VEC_length (vec_void_p, stmt_vec_info_vec));
+ gcc_assert (uid1 <= stmt_vec_info_vec.length ());
+ gcc_assert (uid2 <= stmt_vec_info_vec.length ());
if (uid1 > uid2)
return stmt1;
@@ -878,10 +864,10 @@ extern bool vect_is_simple_use_1 (tree, gimple, loop_vec_info,
tree *, enum vect_def_type *, tree *);
extern bool supportable_widening_operation (enum tree_code, gimple, tree, tree,
enum tree_code *, enum tree_code *,
- int *, VEC (tree, heap) **);
+ int *, vec<tree> *);
extern bool supportable_narrowing_operation (enum tree_code, tree, tree,
enum tree_code *,
- int *, VEC (tree, heap) **);
+ int *, vec<tree> *);
extern stmt_vec_info new_stmt_vec_info (gimple stmt, loop_vec_info,
bb_vec_info);
extern void free_stmt_vec_info (gimple stmt);
@@ -919,8 +905,8 @@ extern void vect_get_load_cost (struct data_reference *, int, bool,
extern void vect_get_store_cost (struct data_reference *, int,
unsigned int *, stmt_vector_for_cost *);
extern bool vect_supportable_shift (enum tree_code, tree);
-extern void vect_get_vec_defs (tree, tree, gimple, VEC (tree, heap) **,
- VEC (tree, heap) **, slp_tree, int);
+extern void vect_get_vec_defs (tree, tree, gimple, vec<tree> *,
+ vec<tree> *, slp_tree, int);
extern tree vect_gen_perm_mask (tree, unsigned char *);
/* In tree-vect-data-refs.c. */
@@ -949,14 +935,14 @@ extern bool vect_grouped_store_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_store_lanes_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_grouped_load_supported (tree, unsigned HOST_WIDE_INT);
extern bool vect_load_lanes_supported (tree, unsigned HOST_WIDE_INT);
-extern void vect_permute_store_chain (VEC(tree,heap) *,unsigned int, gimple,
- gimple_stmt_iterator *, VEC(tree,heap) **);
+extern void vect_permute_store_chain (vec<tree> ,unsigned int, gimple,
+ gimple_stmt_iterator *, vec<tree> *);
extern tree vect_setup_realignment (gimple, gimple_stmt_iterator *, tree *,
enum dr_alignment_support, tree,
struct loop **);
-extern void vect_transform_grouped_load (gimple, VEC(tree,heap) *, int,
+extern void vect_transform_grouped_load (gimple, vec<tree> , int,
gimple_stmt_iterator *);
-extern void vect_record_grouped_load_vectors (gimple, VEC(tree,heap) *);
+extern void vect_record_grouped_load_vectors (gimple, vec<tree> );
extern int vect_get_place_in_interleaving_chain (gimple, gimple);
extern tree vect_get_new_vect_var (tree, enum vect_var_kind, const char *);
extern tree vect_create_addr_base_for_vector_ref (gimple, gimple_seq *,
@@ -985,7 +971,7 @@ extern int vect_get_single_scalar_iteration_cost (loop_vec_info);
/* In tree-vect-slp.c. */
extern void vect_free_slp_instance (slp_instance);
-extern bool vect_transform_slp_perm_load (gimple, VEC (tree, heap) *,
+extern bool vect_transform_slp_perm_load (gimple, vec<tree> ,
gimple_stmt_iterator *, int,
slp_instance, bool);
extern bool vect_schedule_slp (loop_vec_info, bb_vec_info);
@@ -993,8 +979,8 @@ extern void vect_update_slp_costs_according_to_vf (loop_vec_info);
extern bool vect_analyze_slp (loop_vec_info, bb_vec_info);
extern bool vect_make_slp_decision (loop_vec_info);
extern void vect_detect_hybrid_slp (loop_vec_info);
-extern void vect_get_slp_defs (VEC (tree, heap) *, slp_tree,
- VEC (slp_void_p, heap) **, int);
+extern void vect_get_slp_defs (vec<tree> , slp_tree,
+ vec<slp_void_p> *, int);
extern LOC find_bb_location (basic_block);
extern bb_vec_info vect_slp_analyze_bb (basic_block);
@@ -1004,7 +990,7 @@ extern void vect_slp_transform_bb (basic_block);
/* Pattern recognition functions.
Additional pattern recognition functions can (and will) be added
in the future. */
-typedef gimple (* vect_recog_func_ptr) (VEC (gimple, heap) **, tree *, tree *);
+typedef gimple (* vect_recog_func_ptr) (vec<gimple> *, tree *, tree *);
#define NUM_PATTERNS 10
void vect_pattern_recog (loop_vec_info, bb_vec_info);
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 213c15851a4..7f3e082446a 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -155,10 +155,8 @@ typedef struct {
tree vec;
} switch_update;
-static VEC (edge, heap) *to_remove_edges;
-DEF_VEC_O(switch_update);
-DEF_VEC_ALLOC_O(switch_update, heap);
-static VEC (switch_update, heap) *to_update_switch_stmts;
+static vec<edge> to_remove_edges;
+static vec<switch_update> to_update_switch_stmts;
/* Return the maximum value for TYPE. */
@@ -8568,14 +8566,14 @@ simplify_switch_using_ranges (gimple stmt)
{
fprintf (dump_file, "removing unreachable case label\n");
}
- VEC_safe_push (edge, heap, to_remove_edges, e);
+ to_remove_edges.safe_push (e);
e->flags &= ~EDGE_EXECUTABLE;
}
/* And queue an update for the stmt. */
su.stmt = stmt;
su.vec = vec2;
- VEC_safe_push (switch_update, heap, to_update_switch_stmts, su);
+ to_update_switch_stmts.safe_push (su);
return false;
}
@@ -8918,7 +8916,7 @@ vrp_fold_stmt (gimple_stmt_iterator *si)
A NULL entry is used to mark the end of pairs which need to be
restored. */
-static VEC(tree,heap) *equiv_stack;
+static vec<tree> equiv_stack;
/* A trivial wrapper so that we can present the generic jump threading
code with a simple API for simplifying statements. STMT is the
@@ -8981,12 +8979,12 @@ identify_jump_threads (void)
/* Do not thread across edges we are about to remove. Just marking
them as EDGE_DFS_BACK will do. */
- FOR_EACH_VEC_ELT (edge, to_remove_edges, i, e)
+ FOR_EACH_VEC_ELT (to_remove_edges, i, e)
e->flags |= EDGE_DFS_BACK;
/* Allocate our unwinder stack to unwind any temporary equivalences
that might be recorded. */
- equiv_stack = VEC_alloc (tree, heap, 20);
+ equiv_stack.create (20);
/* To avoid lots of silly node creation, we create a single
conditional and just modify it in-place when attempting to
@@ -9061,7 +9059,7 @@ static void
finalize_jump_threads (void)
{
thread_through_all_blocks (false);
- VEC_free (tree, heap, equiv_stack);
+ equiv_stack.release ();
}
@@ -9166,8 +9164,8 @@ execute_vrp (void)
insert_range_assertions ();
- to_remove_edges = VEC_alloc (edge, heap, 10);
- to_update_switch_stmts = VEC_alloc (switch_update, heap, 5);
+ to_remove_edges.create (10);
+ to_update_switch_stmts.create (5);
threadedge_initialize_values ();
vrp_initialize ();
@@ -9192,10 +9190,10 @@ execute_vrp (void)
/* Remove dead edges from SWITCH_EXPR optimization. This leaves the
CFG in a broken state and requires a cfg_cleanup run. */
- FOR_EACH_VEC_ELT (edge, to_remove_edges, i, e)
+ FOR_EACH_VEC_ELT (to_remove_edges, i, e)
remove_edge (e);
/* Update SWITCH_EXPR case label vector. */
- FOR_EACH_VEC_ELT (switch_update, to_update_switch_stmts, i, su)
+ FOR_EACH_VEC_ELT (to_update_switch_stmts, i, su)
{
size_t j;
size_t n = TREE_VEC_LENGTH (su->vec);
@@ -9211,11 +9209,11 @@ execute_vrp (void)
CASE_HIGH (label) = NULL_TREE;
}
- if (VEC_length (edge, to_remove_edges) > 0)
+ if (to_remove_edges.length () > 0)
free_dominance_info (CDI_DOMINATORS);
- VEC_free (edge, heap, to_remove_edges);
- VEC_free (switch_update, heap, to_update_switch_stmts);
+ to_remove_edges.release ();
+ to_update_switch_stmts.release ();
threadedge_finalize_values ();
scev_finalize ();
diff --git a/gcc/tree.c b/gcc/tree.c
index cb8b81c659c..8b2a4f9f735 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -715,7 +715,8 @@ tree_size (const_tree node)
{
case TREE_BINFO:
return (offsetof (struct tree_binfo, base_binfos)
- + VEC_embedded_size (tree, BINFO_N_BASE_BINFOS (node)));
+ + vec<tree, va_gc>
+ ::embedded_size (BINFO_N_BASE_BINFOS (node)));
case TREE_VEC:
return (sizeof (struct tree_vec)
@@ -1357,7 +1358,7 @@ build_vector_stat (tree type, tree *vals MEM_STAT_DECL)
are extracted from V, a vector of CONSTRUCTOR_ELT. */
tree
-build_vector_from_ctor (tree type, VEC(constructor_elt,gc) *v)
+build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v)
{
tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
unsigned HOST_WIDE_INT idx;
@@ -1398,7 +1399,8 @@ build_vector_from_val (tree vectype, tree sc)
}
else
{
- VEC(constructor_elt, gc) *v = VEC_alloc (constructor_elt, gc, nunits);
+ vec<constructor_elt, va_gc> *v;
+ vec_alloc (v, nunits);
for (i = 0; i < nunits; ++i)
CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc);
return build_constructor (vectype, v);
@@ -1406,9 +1408,9 @@ build_vector_from_val (tree vectype, tree sc)
}
/* Return a new CONSTRUCTOR node whose type is TYPE and whose values
- are in the VEC pointed to by VALS. */
+ are in the vec pointed to by VALS. */
tree
-build_constructor (tree type, VEC(constructor_elt,gc) *vals)
+build_constructor (tree type, vec<constructor_elt, va_gc> *vals)
{
tree c = make_node (CONSTRUCTOR);
unsigned int i;
@@ -1419,7 +1421,7 @@ build_constructor (tree type, VEC(constructor_elt,gc) *vals)
TREE_TYPE (c) = type;
CONSTRUCTOR_ELTS (c) = vals;
- FOR_EACH_VEC_ELT (constructor_elt, vals, i, elt)
+ FOR_EACH_VEC_SAFE_ELT (vals, i, elt)
{
/* Mostly ctors will have elts that don't have side-effects, so
the usual case is to scan all the elements. Hence a single
@@ -1442,11 +1444,11 @@ build_constructor (tree type, VEC(constructor_elt,gc) *vals)
tree
build_constructor_single (tree type, tree index, tree value)
{
- VEC(constructor_elt,gc) *v;
+ vec<constructor_elt, va_gc> *v;
constructor_elt elt = {index, value};
- v = VEC_alloc (constructor_elt, gc, 1);
- VEC_quick_push (constructor_elt, v, elt);
+ vec_alloc (v, 1);
+ v->quick_push (elt);
return build_constructor (type, v);
}
@@ -1458,11 +1460,11 @@ tree
build_constructor_from_list (tree type, tree vals)
{
tree t;
- VEC(constructor_elt,gc) *v = NULL;
+ vec<constructor_elt, va_gc> *v = NULL;
if (vals)
{
- v = VEC_alloc (constructor_elt, gc, list_length (vals));
+ vec_alloc (v, list_length (vals));
for (t = vals; t; t = TREE_CHAIN (t))
CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t));
}
@@ -1674,7 +1676,7 @@ make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
{
tree t;
size_t length = (offsetof (struct tree_binfo, base_binfos)
- + VEC_embedded_size (tree, base_binfos));
+ + vec<tree, va_gc>::embedded_size (base_binfos));
record_node_allocation_statistics (TREE_BINFO, length);
@@ -1684,7 +1686,7 @@ make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL)
TREE_SET_CODE (t, TREE_BINFO);
- VEC_embedded_init (tree, BINFO_BASE_BINFOS (t), base_binfos);
+ BINFO_BASE_BINFOS (t)->embedded_init (base_binfos);
return t;
}
@@ -2146,11 +2148,11 @@ purpose_member (const_tree elem, tree list)
/* Return true if ELEM is in V. */
bool
-vec_member (const_tree elem, VEC(tree,gc) *v)
+vec_member (const_tree elem, vec<tree, va_gc> *v)
{
unsigned ix;
tree t;
- FOR_EACH_VEC_ELT (tree, v, ix, t)
+ FOR_EACH_VEC_SAFE_ELT (v, ix, t)
if (elem == t)
return true;
return false;
@@ -2311,13 +2313,13 @@ build_tree_list_stat (tree parm, tree value MEM_STAT_DECL)
/* Build a chain of TREE_LIST nodes from a vector. */
tree
-build_tree_list_vec_stat (const VEC(tree,gc) *vec MEM_STAT_DECL)
+build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL)
{
tree ret = NULL_TREE;
tree *pp = &ret;
unsigned int i;
tree t;
- FOR_EACH_VEC_ELT (tree, vec, i, t)
+ FOR_EACH_VEC_SAFE_ELT (vec, i, t)
{
*pp = build_tree_list_stat (NULL, t PASS_MEM_STAT);
pp = &TREE_CHAIN (*pp);
@@ -2350,15 +2352,16 @@ tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL)
/* Return the values of the elements of a CONSTRUCTOR as a vector of
trees. */
-VEC(tree,gc) *
+vec<tree, va_gc> *
ctor_to_vec (tree ctor)
{
- VEC(tree, gc) *vec = VEC_alloc (tree, gc, CONSTRUCTOR_NELTS (ctor));
+ vec<tree, va_gc> *vec;
+ vec_alloc (vec, CONSTRUCTOR_NELTS (ctor));
unsigned int ix;
tree val;
FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val)
- VEC_quick_push (tree, vec, val);
+ vec->quick_push (val);
return vec;
}
@@ -3093,17 +3096,17 @@ type_contains_placeholder_p (tree type)
/* Push tree EXP onto vector QUEUE if it is not already present. */
static void
-push_without_duplicates (tree exp, VEC (tree, heap) **queue)
+push_without_duplicates (tree exp, vec<tree> *queue)
{
unsigned int i;
tree iter;
- FOR_EACH_VEC_ELT (tree, *queue, i, iter)
+ FOR_EACH_VEC_ELT (*queue, i, iter)
if (simple_cst_equal (iter, exp) == 1)
break;
if (!iter)
- VEC_safe_push (tree, heap, *queue, exp);
+ queue->safe_push (exp);
}
/* Given a tree EXP, find all occurrences of references to fields
@@ -3114,7 +3117,7 @@ push_without_duplicates (tree exp, VEC (tree, heap) **queue)
argument list. */
void
-find_placeholder_in_expr (tree exp, VEC (tree, heap) **refs)
+find_placeholder_in_expr (tree exp, vec<tree> *refs)
{
enum tree_code code = TREE_CODE (exp);
tree inner;
@@ -4110,18 +4113,18 @@ build_nt (enum tree_code code, ...)
}
/* Similar to build_nt, but for creating a CALL_EXPR object with a
- tree VEC. */
+ tree vec. */
tree
-build_nt_call_vec (tree fn, VEC(tree,gc) *args)
+build_nt_call_vec (tree fn, vec<tree, va_gc> *args)
{
tree ret, t;
unsigned int ix;
- ret = build_vl_exp (CALL_EXPR, VEC_length (tree, args) + 3);
+ ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3);
CALL_EXPR_FN (ret) = fn;
CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE;
- FOR_EACH_VEC_ELT (tree, args, ix, t)
+ FOR_EACH_VEC_SAFE_ELT (args, ix, t)
CALL_EXPR_ARG (ret, ix) = t;
return ret;
}
@@ -4173,7 +4176,7 @@ build_fn_decl (const char *name, tree type)
return decl;
}
-VEC(tree,gc) *all_translation_units;
+vec<tree, va_gc> *all_translation_units;
/* Builds a new translation-unit decl with name NAME, queues it in the
global list of translation-unit decls and returns it. */
@@ -4184,7 +4187,7 @@ build_translation_unit_decl (tree name)
tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL,
name, NULL_TREE);
TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name;
- VEC_safe_push (tree, gc, all_translation_units, tu);
+ vec_safe_push (all_translation_units, tu);
return tu;
}
@@ -4484,7 +4487,7 @@ free_lang_data_in_binfo (tree binfo)
BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE;
BINFO_SUBVTT_INDEX (binfo) = NULL_TREE;
- FOR_EACH_VEC_ELT (tree, BINFO_BASE_BINFOS (binfo), i, t)
+ FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t)
free_lang_data_in_binfo (t);
}
@@ -4754,16 +4757,16 @@ free_lang_data_in_decl (tree decl)
struct free_lang_data_d
{
/* Worklist to avoid excessive recursion. */
- VEC(tree,heap) *worklist;
+ vec<tree> worklist;
/* Set of traversed objects. Used to avoid duplicate visits. */
struct pointer_set_t *pset;
/* Array of symbols to process with free_lang_data_in_decl. */
- VEC(tree,heap) *decls;
+ vec<tree> decls;
/* Array of types to process with free_lang_data_in_type. */
- VEC(tree,heap) *types;
+ vec<tree> types;
};
@@ -4803,13 +4806,13 @@ add_tree_to_fld_list (tree t, struct free_lang_data_d *fld)
{
if (DECL_P (t))
{
- VEC_safe_push (tree, heap, fld->decls, t);
+ fld->decls.safe_push (t);
if (debug_info_level > DINFO_LEVEL_TERSE)
save_debug_info_for_decl (t);
}
else if (TYPE_P (t))
{
- VEC_safe_push (tree, heap, fld->types, t);
+ fld->types.safe_push (t);
if (debug_info_level > DINFO_LEVEL_TERSE)
save_debug_info_for_type (t);
}
@@ -4823,7 +4826,7 @@ static inline void
fld_worklist_push (tree t, struct free_lang_data_d *fld)
{
if (t && !is_lang_specific (t) && !pointer_set_contains (fld->pset, t))
- VEC_safe_push (tree, heap, fld->worklist, (t));
+ fld->worklist.safe_push ((t));
}
@@ -4939,8 +4942,7 @@ find_decls_types_r (tree *tp, int *ws, void *data)
{
unsigned i;
tree tem;
- for (i = 0; VEC_iterate (tree, BINFO_BASE_BINFOS (TYPE_BINFO (t)),
- i, tem); ++i)
+ FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem)
fld_worklist_push (TREE_TYPE (tem), fld);
tem = BINFO_VIRTUALS (TYPE_BINFO (t));
if (tem
@@ -4998,9 +5000,9 @@ find_decls_types (tree t, struct free_lang_data_d *fld)
{
if (!pointer_set_contains (fld->pset, t))
walk_tree (&t, find_decls_types_r, fld, fld->pset);
- if (VEC_empty (tree, fld->worklist))
+ if (fld->worklist.is_empty ())
break;
- t = VEC_pop (tree, fld->worklist);
+ t = fld->worklist.pop ();
}
}
@@ -5207,15 +5209,15 @@ free_lang_data_in_cgraph (void)
/* Initialize sets and arrays to store referenced decls and types. */
fld.pset = pointer_set_create ();
- fld.worklist = NULL;
- fld.decls = VEC_alloc (tree, heap, 100);
- fld.types = VEC_alloc (tree, heap, 100);
+ fld.worklist.create (0);
+ fld.decls.create (100);
+ fld.types.create (100);
/* Find decls and types in the body of every function in the callgraph. */
FOR_EACH_FUNCTION (n)
find_decls_types_in_node (n, &fld);
- FOR_EACH_VEC_ELT (alias_pair, alias_pairs, i, p)
+ FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p)
find_decls_types (p->decl, &fld);
/* Find decls and types in every varpool symbol. */
@@ -5225,21 +5227,21 @@ free_lang_data_in_cgraph (void)
/* Set the assembler name on every decl found. We need to do this
now because free_lang_data_in_decl will invalidate data needed
for mangling. This breaks mangling on interdependent decls. */
- FOR_EACH_VEC_ELT (tree, fld.decls, i, t)
+ FOR_EACH_VEC_ELT (fld.decls, i, t)
assign_assembler_name_if_neeeded (t);
/* Traverse every decl found freeing its language data. */
- FOR_EACH_VEC_ELT (tree, fld.decls, i, t)
+ FOR_EACH_VEC_ELT (fld.decls, i, t)
free_lang_data_in_decl (t);
/* Traverse every type found freeing its language data. */
- FOR_EACH_VEC_ELT (tree, fld.types, i, t)
+ FOR_EACH_VEC_ELT (fld.types, i, t)
free_lang_data_in_type (t);
pointer_set_destroy (fld.pset);
- VEC_free (tree, heap, fld.worklist);
- VEC_free (tree, heap, fld.decls);
- VEC_free (tree, heap, fld.types);
+ fld.worklist.release ();
+ fld.decls.release ();
+ fld.types.release ();
}
@@ -6112,7 +6114,7 @@ decl_value_expr_insert (tree from, tree to)
/* Lookup a vector of debug arguments for FROM, and return it if we
find one. */
-VEC(tree, gc) **
+vec<tree, va_gc> **
decl_debug_args_lookup (tree from)
{
struct tree_vec_map *h, in;
@@ -6131,7 +6133,7 @@ decl_debug_args_lookup (tree from)
/* Insert a mapping FROM->empty vector of debug arguments in the value
expression hashtable. */
-VEC(tree, gc) **
+vec<tree, va_gc> **
decl_debug_args_insert (tree from)
{
struct tree_vec_map *h;
@@ -6751,16 +6753,15 @@ simple_cst_equal (const_tree t1, const_tree t2)
case CONSTRUCTOR:
{
unsigned HOST_WIDE_INT idx;
- VEC(constructor_elt, gc) *v1 = CONSTRUCTOR_ELTS (t1);
- VEC(constructor_elt, gc) *v2 = CONSTRUCTOR_ELTS (t2);
+ vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1);
+ vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2);
- if (VEC_length (constructor_elt, v1) != VEC_length (constructor_elt, v2))
+ if (vec_safe_length (v1) != vec_safe_length (v2))
return false;
- for (idx = 0; idx < VEC_length (constructor_elt, v1); ++idx)
+ for (idx = 0; idx < vec_safe_length (v1); ++idx)
/* ??? Should we handle also fields here? */
- if (!simple_cst_equal (VEC_index (constructor_elt, v1, idx).value,
- VEC_index (constructor_elt, v2, idx).value))
+ if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value))
return false;
return true;
}
@@ -10157,16 +10158,16 @@ build_call_array_loc (location_t loc, tree return_type, tree fn,
return t;
}
-/* Like build_call_array, but takes a VEC. */
+/* Like build_call_array, but takes a vec. */
tree
-build_call_vec (tree return_type, tree fn, VEC(tree,gc) *args)
+build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args)
{
tree ret, t;
unsigned int ix;
- ret = build_call_1 (return_type, fn, VEC_length (tree, args));
- FOR_EACH_VEC_ELT (tree, args, ix, t)
+ ret = build_call_1 (return_type, fn, vec_safe_length (args));
+ FOR_EACH_VEC_SAFE_ELT (args, ix, t)
CALL_EXPR_ARG (ret, ix) = t;
process_call_operands (ret);
return ret;
@@ -10726,9 +10727,7 @@ walk_tree_1 (tree *tp, walk_tree_fn func, void *data,
unsigned HOST_WIDE_INT idx;
constructor_elt *ce;
- for (idx = 0;
- VEC_iterate(constructor_elt, CONSTRUCTOR_ELTS (*tp), idx, ce);
- idx++)
+ for (idx = 0; vec_safe_iterate(CONSTRUCTOR_ELTS (*tp), idx, &ce); idx++)
WALK_SUBTREE (ce->value);
}
break;
diff --git a/gcc/tree.h b/gcc/tree.h
index cb577f0ecce..a830e28d682 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -27,7 +27,6 @@ along with GCC; see the file COPYING3. If not see
#include "input.h"
#include "statistics.h"
#include "vec.h"
-#include "vecir.h"
#include "double-int.h"
#include "real.h"
#include "fixed-value.h"
@@ -250,10 +249,8 @@ typedef struct GTY(()) alias_pair
} alias_pair;
/* Define gc'd vector type. */
-DEF_VEC_O(alias_pair);
-DEF_VEC_ALLOC_O(alias_pair,gc);
-extern GTY(()) VEC(alias_pair,gc) * alias_pairs;
+extern GTY(()) vec<alias_pair, va_gc> *alias_pairs;
/* Classify which part of the compiler has defined a given builtin function.
@@ -1530,17 +1527,17 @@ struct GTY(()) tree_vec {
/* In a CONSTRUCTOR node. */
#define CONSTRUCTOR_ELTS(NODE) (CONSTRUCTOR_CHECK (NODE)->constructor.elts)
#define CONSTRUCTOR_ELT(NODE,IDX) \
- (&VEC_index (constructor_elt, CONSTRUCTOR_ELTS (NODE), IDX))
+ (&(*CONSTRUCTOR_ELTS (NODE))[IDX])
#define CONSTRUCTOR_NELTS(NODE) \
- (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (NODE)))
+ (vec_safe_length (CONSTRUCTOR_ELTS (NODE)))
/* Iterate through the vector V of CONSTRUCTOR_ELT elements, yielding the
value of each element (stored within VAL). IX must be a scratch variable
of unsigned integer type. */
#define FOR_EACH_CONSTRUCTOR_VALUE(V, IX, VAL) \
- for (IX = 0; (IX >= VEC_length (constructor_elt, V)) \
+ for (IX = 0; (IX >= vec_safe_length (V)) \
? false \
- : ((VAL = VEC_index (constructor_elt, V, IX).value), \
+ : ((VAL = (*(V))[IX].value), \
true); \
(IX)++)
@@ -1548,10 +1545,10 @@ struct GTY(()) tree_vec {
the value of each element (stored within VAL) and its index (stored
within INDEX). IX must be a scratch variable of unsigned integer type. */
#define FOR_EACH_CONSTRUCTOR_ELT(V, IX, INDEX, VAL) \
- for (IX = 0; (IX >= VEC_length (constructor_elt, V)) \
+ for (IX = 0; (IX >= vec_safe_length (V)) \
? false \
- : (((void) (VAL = VEC_index (constructor_elt, V, IX).value)), \
- (INDEX = VEC_index (constructor_elt, V, IX).index), \
+ : (((void) (VAL = (*V)[IX].value)), \
+ (INDEX = (*V)[IX].index), \
true); \
(IX)++)
@@ -1559,7 +1556,7 @@ struct GTY(()) tree_vec {
#define CONSTRUCTOR_APPEND_ELT(V, INDEX, VALUE) \
do { \
constructor_elt _ce___ = {INDEX, VALUE}; \
- VEC_safe_push (constructor_elt, gc, V, _ce___); \
+ vec_safe_push ((V), _ce___); \
} while (0)
/* True if NODE, a FIELD_DECL, is to be processed as a bitfield for
@@ -1584,12 +1581,10 @@ typedef struct GTY(()) constructor_elt_d {
tree value;
} constructor_elt;
-DEF_VEC_O(constructor_elt);
-DEF_VEC_ALLOC_O(constructor_elt,gc);
struct GTY(()) tree_constructor {
struct tree_typed typed;
- VEC(constructor_elt,gc) *elts;
+ vec<constructor_elt, va_gc> *elts;
};
/* Define fields and accessors for some nodes that represent expressions. */
@@ -2037,9 +2032,8 @@ struct GTY(()) tree_omp_clause {
#define BLOCK_NONLOCALIZED_VARS(NODE) \
(BLOCK_CHECK (NODE)->block.nonlocalized_vars)
#define BLOCK_NUM_NONLOCALIZED_VARS(NODE) \
- VEC_length (tree, BLOCK_NONLOCALIZED_VARS (NODE))
-#define BLOCK_NONLOCALIZED_VAR(NODE,N) \
- VEC_index (tree, BLOCK_NONLOCALIZED_VARS (NODE), N)
+ vec_safe_length (BLOCK_NONLOCALIZED_VARS (NODE))
+#define BLOCK_NONLOCALIZED_VAR(NODE,N) (*BLOCK_NONLOCALIZED_VARS (NODE))[N]
#define BLOCK_SUBBLOCKS(NODE) (BLOCK_CHECK (NODE)->block.subblocks)
#define BLOCK_SUPERCONTEXT(NODE) (BLOCK_CHECK (NODE)->block.supercontext)
#define BLOCK_CHAIN(NODE) (BLOCK_CHECK (NODE)->block.chain)
@@ -2095,7 +2089,7 @@ struct GTY(()) tree_block {
location_t locus;
tree vars;
- VEC(tree,gc) *nonlocalized_vars;
+ vec<tree, va_gc> *nonlocalized_vars;
tree subblocks;
tree supercontext;
@@ -2512,15 +2506,15 @@ struct GTY(()) tree_type_non_common {
#define BINFO_BASE_BINFOS(NODE) (&TREE_BINFO_CHECK(NODE)->binfo.base_binfos)
/* The number of basetypes for NODE. */
-#define BINFO_N_BASE_BINFOS(NODE) (VEC_length (tree, BINFO_BASE_BINFOS (NODE)))
+#define BINFO_N_BASE_BINFOS(NODE) (BINFO_BASE_BINFOS (NODE)->length ())
/* Accessor macro to get to the Nth base binfo of this binfo. */
#define BINFO_BASE_BINFO(NODE,N) \
- (VEC_index (tree, BINFO_BASE_BINFOS (NODE), (N)))
+ ((*BINFO_BASE_BINFOS (NODE))[(N)])
#define BINFO_BASE_ITERATE(NODE,N,B) \
- (VEC_iterate (tree, BINFO_BASE_BINFOS (NODE), (N), (B)))
+ (BINFO_BASE_BINFOS (NODE)->iterate ((N), &(B)))
#define BINFO_BASE_APPEND(NODE,T) \
- (VEC_quick_push (tree, BINFO_BASE_BINFOS (NODE), (T)))
+ (BINFO_BASE_BINFOS (NODE)->quick_push ((T)))
/* For a BINFO record describing a virtual base class, i.e., one where
TREE_VIA_VIRTUAL is set, this field assists in locating the virtual
@@ -2535,9 +2529,9 @@ struct GTY(()) tree_type_non_common {
#define BINFO_BASE_ACCESSES(NODE) (TREE_BINFO_CHECK(NODE)->binfo.base_accesses)
#define BINFO_BASE_ACCESS(NODE,N) \
- VEC_index (tree, BINFO_BASE_ACCESSES (NODE), (N))
+ (*BINFO_BASE_ACCESSES (NODE))[(N)]
#define BINFO_BASE_ACCESS_APPEND(NODE,T) \
- VEC_quick_push (tree, BINFO_BASE_ACCESSES (NODE), (T))
+ BINFO_BASE_ACCESSES (NODE)->quick_push ((T))
/* The index in the VTT where this subobject's sub-VTT can be found.
NULL_TREE if there is no sub-VTT. */
@@ -2561,13 +2555,13 @@ struct GTY (()) tree_binfo {
tree vtable;
tree virtuals;
tree vptr_field;
- VEC(tree,gc) *base_accesses;
+ vec<tree, va_gc> *base_accesses;
tree inheritance;
tree vtt_subvtt;
tree vtt_vptr;
- VEC(tree,none) base_binfos;
+ vec<tree, va_gc> base_binfos;
};
@@ -3443,8 +3437,8 @@ struct GTY(())
#define DECL_DISREGARD_INLINE_LIMITS(NODE) \
(FUNCTION_DECL_CHECK (NODE)->function_decl.disregard_inline_limits)
-extern VEC(tree, gc) **decl_debug_args_lookup (tree);
-extern VEC(tree, gc) **decl_debug_args_insert (tree);
+extern vec<tree, va_gc> **decl_debug_args_lookup (tree);
+extern vec<tree, va_gc> **decl_debug_args_insert (tree);
/* Nonzero if a FUNCTION_DECL has DEBUG arguments attached to it. */
#define DECL_HAS_DEBUG_ARGS_P(NODE) \
@@ -3549,7 +3543,7 @@ struct GTY(()) tree_translation_unit_decl {
};
/* A vector of all translation-units. */
-extern GTY (()) VEC(tree,gc) *all_translation_units;
+extern GTY (()) vec<tree, va_gc> *all_translation_units;
/* For a TYPE_DECL, holds the "original" type. (TREE_TYPE has the copy.) */
#define DECL_ORIGINAL_TYPE(NODE) \
@@ -4645,7 +4639,7 @@ extern tree maybe_get_identifier (const char *);
/* Construct various types of nodes. */
extern tree build_nt (enum tree_code, ...);
-extern tree build_nt_call_vec (tree, VEC(tree,gc) *);
+extern tree build_nt_call_vec (tree, vec<tree, va_gc> *);
extern tree build0_stat (enum tree_code, tree MEM_STAT_DECL);
#define build0(c,t) build0_stat (c,t MEM_STAT_INFO)
@@ -4754,9 +4748,9 @@ extern tree make_vector_stat (unsigned MEM_STAT_DECL);
#define make_vector(n) make_vector_stat (n MEM_STAT_INFO)
extern tree build_vector_stat (tree, tree * MEM_STAT_DECL);
#define build_vector(t,v) build_vector_stat (t, v MEM_STAT_INFO)
-extern tree build_vector_from_ctor (tree, VEC(constructor_elt,gc) *);
+extern tree build_vector_from_ctor (tree, vec<constructor_elt, va_gc> *);
extern tree build_vector_from_val (tree, tree);
-extern tree build_constructor (tree, VEC(constructor_elt,gc) *);
+extern tree build_constructor (tree, vec<constructor_elt, va_gc> *);
extern tree build_constructor_single (tree, tree, tree);
extern tree build_constructor_from_list (tree, tree);
extern tree build_real_from_int_cst (tree, const_tree);
@@ -4766,7 +4760,7 @@ extern tree build_zero_cst (tree);
extern tree build_string (int, const char *);
extern tree build_tree_list_stat (tree, tree MEM_STAT_DECL);
#define build_tree_list(t,q) build_tree_list_stat(t,q MEM_STAT_INFO)
-extern tree build_tree_list_vec_stat (const VEC(tree,gc) * MEM_STAT_DECL);
+extern tree build_tree_list_vec_stat (const vec<tree, va_gc> *MEM_STAT_DECL);
#define build_tree_list_vec(v) build_tree_list_vec_stat (v MEM_STAT_INFO)
extern tree build_decl_stat (location_t, enum tree_code,
tree, tree MEM_STAT_DECL);
@@ -4785,7 +4779,7 @@ extern tree build_call_valist (tree, tree, int, va_list);
#define build_call_array(T1,T2,N,T3)\
build_call_array_loc (UNKNOWN_LOCATION, T1, T2, N, T3)
extern tree build_call_array_loc (location_t, tree, tree, int, const tree *);
-extern tree build_call_vec (tree, tree, VEC(tree,gc) *);
+extern tree build_call_vec (tree, tree, vec<tree, va_gc> *);
/* Construct various nodes representing data types. */
@@ -4816,10 +4810,10 @@ extern tree build_varargs_function_type_list (tree, ...);
extern tree build_function_type_array (tree, int, tree *);
extern tree build_varargs_function_type_array (tree, int, tree *);
#define build_function_type_vec(RET, V) \
- build_function_type_array (RET, VEC_length (tree, V), VEC_address (tree, V))
+ build_function_type_array (RET, vec_safe_length (V), vec_safe_address (V))
#define build_varargs_function_type_vec(RET, V) \
- build_varargs_function_type_array (RET, VEC_length (tree, V), \
- VEC_address (tree, V))
+ build_varargs_function_type_array (RET, vec_safe_length (V), \
+ vec_safe_address (V))
extern tree build_method_type_directly (tree, tree, tree);
extern tree build_method_type (tree, tree);
extern tree build_offset_type (tree, tree);
@@ -4830,7 +4824,7 @@ extern bool range_in_array_bounds_p (tree);
extern tree value_member (tree, tree);
extern tree purpose_member (const_tree, tree);
-extern bool vec_member (const_tree, VEC(tree,gc) *);
+extern bool vec_member (const_tree, vec<tree, va_gc> *);
extern tree chain_index (int, tree);
extern int attribute_list_equal (const_tree, const_tree);
@@ -5132,7 +5126,7 @@ typedef struct record_layout_info_s
tree prev_field;
/* The static variables (i.e., class variables, as opposed to
instance variables) encountered in T. */
- VEC(tree,gc) *pending_statics;
+ vec<tree, va_gc> *pending_statics;
/* Bits remaining in the current alignment group */
int remaining_in_alignment;
/* True if we've seen a packed field that didn't have normal
@@ -5286,7 +5280,7 @@ extern bool initializer_zerop (const_tree);
/* Given a CONSTRUCTOR CTOR, return the element values as a vector. */
-extern VEC(tree,gc) *ctor_to_vec (tree);
+extern vec<tree, va_gc> *ctor_to_vec (tree);
extern bool categorize_ctor_elements (const_tree, HOST_WIDE_INT *,
HOST_WIDE_INT *, bool *);
@@ -5368,7 +5362,7 @@ extern bool type_contains_placeholder_p (tree);
or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their
argument list. */
-extern void find_placeholder_in_expr (tree, VEC (tree, heap) **);
+extern void find_placeholder_in_expr (tree, vec<tree> *);
/* This macro calls the above function but short-circuits the common
case of a constant to save time and also checks for NULL. */
@@ -5866,7 +5860,7 @@ extern bool fold_builtin_next_arg (tree, bool);
extern enum built_in_function builtin_mathfn_code (const_tree);
extern tree fold_builtin_call_array (location_t, tree, tree, int, tree *);
extern tree build_call_expr_loc_array (location_t, tree, int, tree *);
-extern tree build_call_expr_loc_vec (location_t, tree, VEC(tree,gc) *);
+extern tree build_call_expr_loc_vec (location_t, tree, vec<tree, va_gc> *);
extern tree build_call_expr_loc (location_t, tree, int, ...);
extern tree build_call_expr (tree, int, ...);
extern tree mathfn_built_in (tree, enum built_in_function fn);
@@ -5984,11 +5978,11 @@ extern void print_rtl (FILE *, const_rtx);
/* In print-tree.c */
extern void debug_tree (tree);
-extern void debug_vec_tree (VEC(tree,gc) *);
+extern void debug_vec_tree (vec<tree, va_gc> *);
#ifdef BUFSIZ
extern void dump_addr (FILE*, const char *, const void *);
extern void print_node (FILE *, const char *, tree, int);
-extern void print_vec_tree (FILE *, const char *, VEC(tree,gc) *, int);
+extern void print_vec_tree (FILE *, const char *, vec<tree, va_gc> *, int);
extern void print_node_brief (FILE *, const char *, const_tree, int);
extern void indent_to (FILE *, int);
#endif
@@ -6289,7 +6283,7 @@ struct GTY(()) tree_priority_map {
struct GTY(()) tree_vec_map {
struct tree_map_base base;
- VEC(tree,gc) *to;
+ vec<tree, va_gc> *to;
};
#define tree_vec_map_eq tree_map_base_eq
diff --git a/gcc/value-prof.c b/gcc/value-prof.c
index 25445a10924..ac8a0b61458 100644
--- a/gcc/value-prof.c
+++ b/gcc/value-prof.c
@@ -73,7 +73,7 @@ along with GCC; see the file COPYING3. If not see
to profile. There are different histogram types (see HIST_TYPE_* in
value-prof.h) and each transformation can request one or more histogram
types per GIMPLE statement. The function gimple_find_values_to_profile()
- collects the values to profile in a VEC, and adds the number of counters
+ collects the values to profile in a vec, and adds the number of counters
required for the different histogram types.
For a -fprofile-generate run, the statements for which values should be
@@ -1085,7 +1085,8 @@ gimple_mod_subtract_transform (gimple_stmt_iterator *si)
return true;
}
-static VEC(cgraph_node_ptr, heap) *cgraph_node_map = NULL;
+static vec<cgraph_node_ptr> cgraph_node_map
+ = vec<cgraph_node_ptr>();
/* Initialize map from FUNCDEF_NO to CGRAPH_NODE. */
@@ -1095,14 +1096,12 @@ init_node_map (void)
struct cgraph_node *n;
if (get_last_funcdef_no ())
- VEC_safe_grow_cleared (cgraph_node_ptr, heap,
- cgraph_node_map, get_last_funcdef_no ());
+ cgraph_node_map.safe_grow_cleared (get_last_funcdef_no ());
FOR_EACH_FUNCTION (n)
{
if (DECL_STRUCT_FUNCTION (n->symbol.decl))
- VEC_replace (cgraph_node_ptr, cgraph_node_map,
- DECL_STRUCT_FUNCTION (n->symbol.decl)->funcdef_no, n);
+ cgraph_node_map[DECL_STRUCT_FUNCTION (n->symbol.decl)->funcdef_no] = n;
}
}
@@ -1111,8 +1110,7 @@ init_node_map (void)
void
del_node_map (void)
{
- VEC_free (cgraph_node_ptr, heap, cgraph_node_map);
- cgraph_node_map = NULL;
+ cgraph_node_map.release ();
}
/* Return cgraph node for function with pid */
@@ -1121,9 +1119,7 @@ static inline struct cgraph_node*
find_func_by_funcdef_no (int func_id)
{
int max_id = get_last_funcdef_no ();
- if (func_id >= max_id || VEC_index (cgraph_node_ptr,
- cgraph_node_map,
- func_id) == NULL)
+ if (func_id >= max_id || cgraph_node_map[func_id] == NULL)
{
if (flag_profile_correction)
inform (DECL_SOURCE_LOCATION (current_function_decl),
@@ -1134,7 +1130,7 @@ find_func_by_funcdef_no (int func_id)
return NULL;
}
- return VEC_index (cgraph_node_ptr, cgraph_node_map, func_id);
+ return cgraph_node_map[func_id];
}
/* Perform sanity check on the indirect call target. Due to race conditions,
@@ -1667,13 +1663,12 @@ gimple_divmod_values_to_profile (gimple stmt, histogram_values *values)
divisor = gimple_assign_rhs2 (stmt);
op0 = gimple_assign_rhs1 (stmt);
- VEC_reserve (histogram_value, heap, *values, 3);
+ values->reserve (3);
if (TREE_CODE (divisor) == SSA_NAME)
/* Check for the case where the divisor is the same value most
of the time. */
- VEC_quick_push (histogram_value, *values,
- gimple_alloc_histogram_value (cfun,
+ values->quick_push (gimple_alloc_histogram_value (cfun,
HIST_TYPE_SINGLE_VALUE,
stmt, divisor));
@@ -1684,16 +1679,16 @@ gimple_divmod_values_to_profile (gimple stmt, histogram_values *values)
{
tree val;
/* Check for a special case where the divisor is power of 2. */
- VEC_quick_push (histogram_value, *values,
- gimple_alloc_histogram_value (cfun, HIST_TYPE_POW2,
- stmt, divisor));
+ values->quick_push (gimple_alloc_histogram_value (cfun,
+ HIST_TYPE_POW2,
+ stmt, divisor));
val = build2 (TRUNC_DIV_EXPR, type, op0, divisor);
hist = gimple_alloc_histogram_value (cfun, HIST_TYPE_INTERVAL,
stmt, val);
hist->hdata.intvl.int_start = 0;
hist->hdata.intvl.steps = 2;
- VEC_quick_push (histogram_value, *values, hist);
+ values->quick_push (hist);
}
return;
@@ -1717,11 +1712,10 @@ gimple_indirect_call_to_profile (gimple stmt, histogram_values *values)
callee = gimple_call_fn (stmt);
- VEC_reserve (histogram_value, heap, *values, 3);
+ values->reserve (3);
- VEC_quick_push (histogram_value, *values,
- gimple_alloc_histogram_value (cfun, HIST_TYPE_INDIR_CALL,
- stmt, callee));
+ values->quick_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_INDIR_CALL,
+ stmt, callee));
return;
}
@@ -1750,17 +1744,15 @@ gimple_stringops_values_to_profile (gimple stmt, histogram_values *values)
if (TREE_CODE (blck_size) != INTEGER_CST)
{
- VEC_safe_push (histogram_value, heap, *values,
- gimple_alloc_histogram_value (cfun, HIST_TYPE_SINGLE_VALUE,
- stmt, blck_size));
- VEC_safe_push (histogram_value, heap, *values,
- gimple_alloc_histogram_value (cfun, HIST_TYPE_AVERAGE,
- stmt, blck_size));
+ values->safe_push (gimple_alloc_histogram_value (cfun,
+ HIST_TYPE_SINGLE_VALUE,
+ stmt, blck_size));
+ values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_AVERAGE,
+ stmt, blck_size));
}
if (TREE_CODE (blck_size) != INTEGER_CST)
- VEC_safe_push (histogram_value, heap, *values,
- gimple_alloc_histogram_value (cfun, HIST_TYPE_IOR,
- stmt, dest));
+ values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_IOR,
+ stmt, dest));
}
/* Find values inside STMT for that we want to measure histograms and adds
@@ -1782,12 +1774,12 @@ gimple_find_values_to_profile (histogram_values *values)
unsigned i;
histogram_value hist = NULL;
- *values = NULL;
+ values->create (0);
FOR_EACH_BB (bb)
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
gimple_values_to_profile (gsi_stmt (gsi), values);
- FOR_EACH_VEC_ELT (histogram_value, *values, i, hist)
+ FOR_EACH_VEC_ELT (*values, i, hist)
{
switch (hist->type)
{
diff --git a/gcc/value-prof.h b/gcc/value-prof.h
index b7215b8be0a..6abe42e6551 100644
--- a/gcc/value-prof.h
+++ b/gcc/value-prof.h
@@ -67,10 +67,8 @@ struct histogram_value_t
typedef struct histogram_value_t *histogram_value;
typedef const struct histogram_value_t *const_histogram_value;
-DEF_VEC_P(histogram_value);
-DEF_VEC_ALLOC_P(histogram_value,heap);
-typedef VEC(histogram_value,heap) *histogram_values;
+typedef vec<histogram_value> histogram_values;
extern void gimple_find_values_to_profile (histogram_values *);
extern bool gimple_value_profile_transformations (void);
diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c
index adccf7caa0d..e95cf87a752 100644
--- a/gcc/var-tracking.c
+++ b/gcc/var-tracking.c
@@ -192,8 +192,6 @@ typedef struct micro_operation_def
} u;
} micro_operation;
-DEF_VEC_O(micro_operation);
-DEF_VEC_ALLOC_O(micro_operation,heap);
/* A declaration of a variable, or an RTL value being handled like a
declaration. */
@@ -264,7 +262,7 @@ typedef struct dataflow_set_def
typedef struct variable_tracking_info_def
{
/* The vector of micro operations. */
- VEC(micro_operation, heap) *mos;
+ vec<micro_operation> mos;
/* The IN and OUT set for dataflow analysis. */
dataflow_set in;
@@ -317,7 +315,6 @@ typedef struct loc_exp_dep_s
struct loc_exp_dep_s **pprev;
} loc_exp_dep;
-DEF_VEC_O (loc_exp_dep);
/* This data structure holds information about the depth of a variable
expansion. */
@@ -352,7 +349,7 @@ struct onepart_aux
/* The depth of the cur_loc expression. */
expand_depth depth;
/* Dependencies actively used when expand FROM into cur_loc. */
- VEC (loc_exp_dep, none) deps;
+ vec<loc_exp_dep, va_heap, vl_embed> deps;
};
/* Structure describing one part of variable. */
@@ -499,11 +496,9 @@ typedef struct GTY(()) parm_reg {
rtx incoming;
} parm_reg_t;
-DEF_VEC_O(parm_reg_t);
-DEF_VEC_ALLOC_O(parm_reg_t, gc);
/* Vector of windowed parameter registers, if any. */
-static VEC(parm_reg_t, gc) *windowed_parm_regs = NULL;
+static vec<parm_reg_t, va_gc> *windowed_parm_regs = NULL;
#endif
/* Variable used to tell whether cselib_process_insn called our hook. */
@@ -1079,11 +1074,11 @@ adjust_insn (basic_block bb, rtx insn)
if (RTX_FRAME_RELATED_P (insn)
&& find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX))
{
- unsigned int i, nregs = VEC_length(parm_reg_t, windowed_parm_regs);
+ unsigned int i, nregs = vec_safe_length (windowed_parm_regs);
rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2));
parm_reg_t *p;
- FOR_EACH_VEC_ELT (parm_reg_t, windowed_parm_regs, i, p)
+ FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p)
{
XVECEXP (rtl, 0, i * 2)
= gen_rtx_SET (VOIDmode, p->incoming, p->outgoing);
@@ -5296,7 +5291,7 @@ log_op_type (rtx x, basic_block bb, rtx insn,
enum micro_operation_type mopt, FILE *out)
{
fprintf (out, "bb %i op %i insn %i %s ",
- bb->index, VEC_length (micro_operation, VTI (bb)->mos),
+ bb->index, VTI (bb)->mos.length (),
INSN_UID (insn), micro_operation_type_name[mopt]);
print_inline_rtx (out, x, 2);
fputc ('\n', out);
@@ -5321,7 +5316,7 @@ log_op_type (rtx x, basic_block bb, rtx insn,
(RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging)
/* All preserved VALUEs. */
-static VEC (rtx, heap) *preserved_values;
+static vec<rtx> preserved_values;
/* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */
@@ -5329,7 +5324,7 @@ static void
preserve_value (cselib_val *val)
{
cselib_preserve_value (val);
- VEC_safe_push (rtx, heap, preserved_values, val->val_rtx);
+ preserved_values.safe_push (val->val_rtx);
}
/* Helper function for MO_VAL_LOC handling. Return non-zero if
@@ -5512,7 +5507,7 @@ add_uses (rtx *ploc, void *data)
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos, mo);
+ VTI (bb)->mos.safe_push (mo);
}
return 0;
@@ -5801,7 +5796,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip)
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (moa.u.loc, cui->bb, cui->insn,
moa.type, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos, moa);
+ VTI (bb)->mos.safe_push (moa);
}
resolve = false;
@@ -5888,7 +5883,7 @@ add_stores (rtx loc, const_rtx expr, void *cuip)
log_and_return:
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos, mo);
+ VTI (bb)->mos.safe_push (mo);
}
/* Arguments to the call. */
@@ -6156,15 +6151,15 @@ prepare_call_arguments (basic_block bb, rtx insn)
&& TREE_CODE (fndecl) == FUNCTION_DECL
&& DECL_HAS_DEBUG_ARGS_P (fndecl))
{
- VEC(tree, gc) **debug_args = decl_debug_args_lookup (fndecl);
+ vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl);
if (debug_args)
{
unsigned int ix;
tree param;
- for (ix = 0; VEC_iterate (tree, *debug_args, ix, param); ix += 2)
+ for (ix = 0; vec_safe_iterate (*debug_args, ix, &param); ix += 2)
{
rtx item;
- tree dtemp = VEC_index (tree, *debug_args, ix + 1);
+ tree dtemp = (**debug_args)[ix + 1];
enum machine_mode mode = DECL_MODE (dtemp);
item = gen_rtx_DEBUG_PARAMETER_REF (mode, param);
item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp));
@@ -6247,11 +6242,11 @@ add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
cui.sets = sets;
cui.n_sets = n_sets;
- n1 = VEC_length (micro_operation, VTI (bb)->mos);
+ n1 = VTI (bb)->mos.length ();
cui.store_p = false;
note_uses (&PATTERN (insn), add_uses_1, &cui);
- n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
- mos = VEC_address (micro_operation, VTI (bb)->mos);
+ n2 = VTI (bb)->mos.length () - 1;
+ mos = VTI (bb)->mos.address ();
/* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and
MO_VAL_LOC last. */
@@ -6271,7 +6266,7 @@ add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
}
}
- n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
+ n2 = VTI (bb)->mos.length () - 1;
while (n1 < n2)
{
while (n1 < n2 && mos[n1].type != MO_VAL_LOC)
@@ -6299,17 +6294,17 @@ add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos, mo);
+ VTI (bb)->mos.safe_push (mo);
}
- n1 = VEC_length (micro_operation, VTI (bb)->mos);
+ n1 = VTI (bb)->mos.length ();
/* This will record NEXT_INSN (insn), such that we can
insert notes before it without worrying about any
notes that MO_USEs might emit after the insn. */
cui.store_p = true;
note_stores (PATTERN (insn), add_stores, &cui);
- n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
- mos = VEC_address (micro_operation, VTI (bb)->mos);
+ n2 = VTI (bb)->mos.length () - 1;
+ mos = VTI (bb)->mos.address ();
/* Order the MO_VAL_USEs first (note_stores does nothing
on DEBUG_INSNs, so there are no MO_VAL_LOCs from this
@@ -6330,7 +6325,7 @@ add_with_sets (rtx insn, struct cselib_set *sets, int n_sets)
}
}
- n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1;
+ n2 = VTI (bb)->mos.length () - 1;
while (n1 < n2)
{
while (n1 < n2 && mos[n1].type == MO_CLOBBER)
@@ -6426,7 +6421,7 @@ compute_bb_dataflow (basic_block bb)
dataflow_set_copy (&old_out, out);
dataflow_set_copy (out, in);
- FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
+ FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
{
rtx insn = mo->insn;
@@ -7734,11 +7729,6 @@ delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv,
delete_slot_part (set, loc, slot, offset);
}
-DEF_VEC_P (variable);
-DEF_VEC_ALLOC_P (variable, heap);
-
-DEF_VEC_ALLOC_P_STACK (rtx);
-#define VEC_rtx_stack_alloc(alloc) VEC_stack_alloc (rtx, alloc)
/* Structure for passing some other parameters to function
vt_expand_loc_callback. */
@@ -7749,7 +7739,7 @@ struct expand_loc_callback_data
/* Stack of values and debug_exprs under expansion, and their
children. */
- VEC (rtx, stack) *expanding;
+ vec<rtx, va_stack> expanding;
/* Stack of values and debug_exprs whose expansion hit recursion
cycles. They will have VALUE_RECURSED_INTO marked when added to
@@ -7757,7 +7747,7 @@ struct expand_loc_callback_data
resolves to a valid location. So, if the flag remains set at the
end of the search, we know no valid location for this one can
possibly exist. */
- VEC (rtx, stack) *pending;
+ vec<rtx, va_stack> pending;
/* The maximum depth among the sub-expressions under expansion.
Zero indicates no expansion so far. */
@@ -7782,14 +7772,14 @@ loc_exp_dep_alloc (variable var, int count)
in the algorithm, so we instead leave an assertion to catch
errors. */
gcc_checking_assert (!count
- || VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
+ || VAR_LOC_DEP_VEC (var) == NULL
+ || VAR_LOC_DEP_VEC (var)->is_empty ());
- if (VAR_LOC_1PAUX (var)
- && VEC_space (loc_exp_dep, VAR_LOC_DEP_VEC (var), count))
+ if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count))
return;
allocsize = offsetof (struct onepart_aux, deps)
- + VEC_embedded_size (loc_exp_dep, count);
+ + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count);
if (VAR_LOC_1PAUX (var))
{
@@ -7809,7 +7799,7 @@ loc_exp_dep_alloc (variable var, int count)
VAR_LOC_DEPTH (var).complexity = 0;
VAR_LOC_DEPTH (var).entryvals = 0;
}
- VEC_embedded_init (loc_exp_dep, VAR_LOC_DEP_VEC (var), count);
+ VAR_LOC_DEP_VEC (var)->embedded_init (count);
}
/* Remove all entries from the vector of active dependencies of VAR,
@@ -7818,14 +7808,14 @@ loc_exp_dep_alloc (variable var, int count)
static void
loc_exp_dep_clear (variable var)
{
- while (!VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)))
+ while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ())
{
- loc_exp_dep *led = &VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
+ loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last ();
if (led->next)
led->next->pprev = led->pprev;
if (led->pprev)
*led->pprev = led->next;
- VEC_pop (loc_exp_dep, VAR_LOC_DEP_VEC (var));
+ VAR_LOC_DEP_VEC (var)->pop ();
}
}
@@ -7865,8 +7855,8 @@ loc_exp_insert_dep (variable var, rtx x, htab_t vars)
{
loc_exp_dep empty;
memset (&empty, 0, sizeof (empty));
- VEC_quick_push (loc_exp_dep, VAR_LOC_DEP_VEC (var), empty);
- led = &VEC_last (loc_exp_dep, VAR_LOC_DEP_VEC (var));
+ VAR_LOC_DEP_VEC (var)->quick_push (empty);
+ led = &VAR_LOC_DEP_VEC (var)->last ();
}
led->dv = var->dv;
led->value = x;
@@ -7888,7 +7878,8 @@ loc_exp_dep_set (variable var, rtx result, rtx *value, int count, htab_t vars)
{
bool pending_recursion = false;
- gcc_checking_assert (VEC_empty (loc_exp_dep, VAR_LOC_DEP_VEC (var)));
+ gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL
+ || VAR_LOC_DEP_VEC (var)->is_empty ());
/* Set up all dependencies from last_child (as set up at the end of
the loop above) to the end. */
@@ -8032,7 +8023,7 @@ vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
}
first_child = result_first_child = last_child
- = VEC_length (rtx, elcd->expanding);
+ = elcd->expanding.length ();
wanted_entryvals = found_entryvals;
@@ -8061,7 +8052,7 @@ vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
elcd->depth.complexity = elcd->depth.entryvals = 0;
result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH,
vt_expand_loc_callback, data);
- last_child = VEC_length (rtx, elcd->expanding);
+ last_child = elcd->expanding.length ();
if (result)
{
@@ -8105,16 +8096,16 @@ vt_expand_var_loc_chain (variable var, bitmap regs, void *data, bool *pendrecp)
attempted locs as dependencies, so that we retry the
expansion should any of them change, in the hope it can give
us a new entry without an ENTRY_VALUE? */
- VEC_truncate (rtx, elcd->expanding, first_child);
+ elcd->expanding.truncate (first_child);
goto retry;
}
/* Register all encountered dependencies as active. */
pending_recursion = loc_exp_dep_set
- (var, result, VEC_address (rtx, elcd->expanding) + result_first_child,
+ (var, result, elcd->expanding.address () + result_first_child,
last_child - result_first_child, elcd->vars);
- VEC_truncate (rtx, elcd->expanding, first_child);
+ elcd->expanding.truncate (first_child);
/* Record where the expansion came from. */
gcc_checking_assert (!result || !pending_recursion);
@@ -8183,7 +8174,7 @@ vt_expand_loc_callback (rtx x, bitmap regs,
return x;
}
- VEC_safe_push (rtx, stack, elcd->expanding, x);
+ elcd->expanding.safe_push (x);
/* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */
gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x));
@@ -8227,7 +8218,7 @@ vt_expand_loc_callback (rtx x, bitmap regs,
if (pending_recursion)
{
gcc_checking_assert (!result);
- VEC_safe_push (rtx, stack, elcd->pending, x);
+ elcd->pending.safe_push (x);
}
else
{
@@ -8257,11 +8248,11 @@ vt_expand_loc_callback (rtx x, bitmap regs,
This function performs this finalization of NULL locations. */
static void
-resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
+resolve_expansions_pending_recursion (vec<rtx, va_stack> pending)
{
- while (!VEC_empty (rtx, pending))
+ while (!pending.is_empty ())
{
- rtx x = VEC_pop (rtx, pending);
+ rtx x = pending.pop ();
decl_or_value dv;
if (!VALUE_RECURSED_INTO (x))
@@ -8276,13 +8267,13 @@ resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
}
/* Initialize expand_loc_callback_data D with variable hash table V.
- It must be a macro because of alloca (VEC stack). */
+ It must be a macro because of alloca (vec stack). */
#define INIT_ELCD(d, v) \
do \
{ \
(d).vars = (v); \
- (d).expanding = VEC_alloc (rtx, stack, 4); \
- (d).pending = VEC_alloc (rtx, stack, 4); \
+ vec_stack_alloc (rtx, (d).expanding, 4); \
+ vec_stack_alloc (rtx, (d).pending, 4); \
(d).depth.complexity = (d).depth.entryvals = 0; \
} \
while (0)
@@ -8291,8 +8282,8 @@ resolve_expansions_pending_recursion (VEC (rtx, stack) *pending)
do \
{ \
resolve_expansions_pending_recursion ((d).pending); \
- VEC_free (rtx, stack, (d).pending); \
- VEC_free (rtx, stack, (d).expanding); \
+ (d).pending.release (); \
+ (d).expanding.release (); \
\
if ((l) && MEM_P (l)) \
(l) = targetm.delegitimize_address (l); \
@@ -8339,7 +8330,7 @@ vt_expand_1pvar (variable var, htab_t vars)
loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL);
- gcc_checking_assert (VEC_empty (rtx, data.expanding));
+ gcc_checking_assert (data.expanding.is_empty ());
FINI_ELCD (data, loc);
@@ -8585,14 +8576,13 @@ emit_note_insn_var_location (void **varp, void *data)
static int
values_to_stack (void **slot, void *data)
{
- VEC (rtx, stack) **changed_values_stack = (VEC (rtx, stack) **)data;
+ vec<rtx, va_stack> *changed_values_stack = (vec<rtx, va_stack> *) data;
variable var = (variable) *slot;
if (var->onepart == ONEPART_VALUE)
- VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_value (var->dv));
+ changed_values_stack->safe_push (dv_as_value (var->dv));
else if (var->onepart == ONEPART_DEXPR)
- VEC_safe_push (rtx, stack, *changed_values_stack,
- DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
+ changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv)));
return 1;
}
@@ -8620,7 +8610,7 @@ remove_value_from_changed_variables (rtx val)
static void
notify_dependents_of_changed_value (rtx val, htab_t htab,
- VEC (rtx, stack) **changed_values_stack)
+ vec<rtx, va_stack> *changed_values_stack)
{
void **slot;
variable var;
@@ -8661,7 +8651,7 @@ notify_dependents_of_changed_value (rtx val, htab_t htab,
case ONEPART_VALUE:
case ONEPART_DEXPR:
set_dv_changed (ldv, true);
- VEC_safe_push (rtx, stack, *changed_values_stack, dv_as_rtx (ldv));
+ changed_values_stack->safe_push (dv_as_rtx (ldv));
break;
case ONEPART_VDECL:
@@ -8706,17 +8696,19 @@ process_changed_values (htab_t htab)
{
int i, n;
rtx val;
- VEC (rtx, stack) *changed_values_stack = VEC_alloc (rtx, stack, 20);
+ vec<rtx, va_stack> changed_values_stack;
+
+ vec_stack_alloc (rtx, changed_values_stack, 20);
/* Move values from changed_variables to changed_values_stack. */
htab_traverse (changed_variables, values_to_stack, &changed_values_stack);
/* Back-propagate change notifications in values while popping
them from the stack. */
- for (n = i = VEC_length (rtx, changed_values_stack);
- i > 0; i = VEC_length (rtx, changed_values_stack))
+ for (n = i = changed_values_stack.length ();
+ i > 0; i = changed_values_stack.length ())
{
- val = VEC_pop (rtx, changed_values_stack);
+ val = changed_values_stack.pop ();
notify_dependents_of_changed_value (val, htab, &changed_values_stack);
/* This condition will hold when visiting each of the entries
@@ -8730,7 +8722,7 @@ process_changed_values (htab_t htab)
}
}
- VEC_free (rtx, stack, changed_values_stack);
+ changed_values_stack.release ();
}
/* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain
@@ -8899,7 +8891,7 @@ emit_notes_in_bb (basic_block bb, dataflow_set *set)
dataflow_set_clear (set);
dataflow_set_copy (set, &VTI (bb)->in);
- FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo)
+ FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo)
{
rtx insn = mo->insn;
rtx next_insn = next_non_note_insn_var_location (insn);
@@ -9361,7 +9353,7 @@ vt_add_function_parameter (tree parm)
= gen_rtx_REG_offset (incoming, GET_MODE (incoming),
OUTGOING_REGNO (REGNO (incoming)), 0);
p.outgoing = incoming;
- VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, p);
+ vec_safe_push (windowed_parm_regs, p);
}
else if (MEM_P (incoming)
&& REG_P (XEXP (incoming, 0))
@@ -9374,7 +9366,7 @@ vt_add_function_parameter (tree parm)
p.incoming = reg;
reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
p.outgoing = reg;
- VEC_safe_push (parm_reg_t, gc, windowed_parm_regs, p);
+ vec_safe_push (windowed_parm_regs, p);
incoming = replace_equiv_address_nv (incoming, reg);
}
}
@@ -9649,7 +9641,7 @@ vt_initialize (void)
scratch_regs = BITMAP_ALLOC (NULL);
valvar_pool = create_alloc_pool ("small variable_def pool",
sizeof (struct variable_def), 256);
- preserved_values = VEC_alloc (rtx, heap, 256);
+ preserved_values.create (256);
}
else
{
@@ -9830,8 +9822,7 @@ vt_initialize (void)
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
- mo);
+ VTI (bb)->mos.safe_push (mo);
VTI (bb)->out.stack_adjust += pre;
}
}
@@ -9862,8 +9853,7 @@ vt_initialize (void)
if (dump_file && (dump_flags & TDF_DETAILS))
log_op_type (PATTERN (insn), bb, insn,
MO_ADJUST, dump_file);
- VEC_safe_push (micro_operation, heap, VTI (bb)->mos,
- mo);
+ VTI (bb)->mos.safe_push (mo);
VTI (bb)->out.stack_adjust += post;
}
@@ -9971,7 +9961,7 @@ vt_finalize (void)
FOR_EACH_BB (bb)
{
- VEC_free (micro_operation, heap, VTI (bb)->mos);
+ VTI (bb)->mos.release ();
}
FOR_ALL_BB (bb)
@@ -9998,14 +9988,14 @@ vt_finalize (void)
free_alloc_pool (loc_exp_dep_pool);
loc_exp_dep_pool = NULL;
free_alloc_pool (valvar_pool);
- VEC_free (rtx, heap, preserved_values);
+ preserved_values.release ();
cselib_finish ();
BITMAP_FREE (scratch_regs);
scratch_regs = NULL;
}
#ifdef HAVE_window_save
- VEC_free (parm_reg_t, gc, windowed_parm_regs);
+ vec_free (windowed_parm_regs);
#endif
if (vui_vec)
diff --git a/gcc/varasm.c b/gcc/varasm.c
index 641ce0c43e8..3aa58cf71e4 100644
--- a/gcc/varasm.c
+++ b/gcc/varasm.c
@@ -2859,7 +2859,7 @@ compare_constant (const tree t1, const tree t2)
case CONSTRUCTOR:
{
- VEC(constructor_elt, gc) *v1, *v2;
+ vec<constructor_elt, va_gc> *v1, *v2;
unsigned HOST_WIDE_INT idx;
typecode = TREE_CODE (TREE_TYPE (t1));
@@ -2885,14 +2885,13 @@ compare_constant (const tree t1, const tree t2)
v1 = CONSTRUCTOR_ELTS (t1);
v2 = CONSTRUCTOR_ELTS (t2);
- if (VEC_length (constructor_elt, v1)
- != VEC_length (constructor_elt, v2))
- return 0;
+ if (vec_safe_length (v1) != vec_safe_length (v2))
+ return 0;
- for (idx = 0; idx < VEC_length (constructor_elt, v1); ++idx)
+ for (idx = 0; idx < vec_safe_length (v1); ++idx)
{
- constructor_elt *c1 = &VEC_index (constructor_elt, v1, idx);
- constructor_elt *c2 = &VEC_index (constructor_elt, v2, idx);
+ constructor_elt *c1 = &(*v1)[idx];
+ constructor_elt *c2 = &(*v2)[idx];
/* Check that each value is the same... */
if (!compare_constant (c1->value, c2->value))
@@ -3011,16 +3010,15 @@ copy_constant (tree exp)
case CONSTRUCTOR:
{
tree copy = copy_node (exp);
- VEC(constructor_elt, gc) *v;
+ vec<constructor_elt, va_gc> *v;
unsigned HOST_WIDE_INT idx;
tree purpose, value;
- v = VEC_alloc(constructor_elt, gc, VEC_length(constructor_elt,
- CONSTRUCTOR_ELTS (exp)));
+ vec_alloc (v, vec_safe_length (CONSTRUCTOR_ELTS (exp)));
FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, purpose, value)
{
constructor_elt ce = {purpose, copy_constant (value)};
- VEC_quick_push (constructor_elt, v, ce);
+ v->quick_push (ce);
}
CONSTRUCTOR_ELTS (copy) = v;
return copy;
@@ -4532,7 +4530,7 @@ output_constant (tree exp, unsigned HOST_WIDE_INT size, unsigned int align)
/* Allow a constructor with no elements for any data type.
This means to fill the space with zeros. */
if (TREE_CODE (exp) == CONSTRUCTOR
- && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (exp)))
+ && vec_safe_is_empty (CONSTRUCTOR_ELTS (exp)))
{
assemble_zeros (size);
return;
@@ -5067,7 +5065,7 @@ output_constructor (tree exp, unsigned HOST_WIDE_INT size,
local.field = TYPE_FIELDS (local.type);
for (cnt = 0;
- VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), cnt, ce);
+ vec_safe_iterate (CONSTRUCTOR_ELTS (exp), cnt, &ce);
cnt++, local.field = local.field ? DECL_CHAIN (local.field) : 0)
{
local.val = ce->value;
@@ -5410,7 +5408,7 @@ globalize_decl (tree decl)
targetm.asm_out.globalize_decl_name (asm_out_file, decl);
}
-VEC(alias_pair,gc) *alias_pairs;
+vec<alias_pair, va_gc> *alias_pairs;
/* Output the assembler code for a define (equate) using ASM_OUTPUT_DEF
or ASM_OUTPUT_DEF_FROM_DECLS. The function defines the symbol whose
@@ -5583,7 +5581,7 @@ assemble_alias (tree decl, tree target)
else
{
alias_pair p = {decl, target};
- VEC_safe_push (alias_pair, gc, alias_pairs, p);
+ vec_safe_push (alias_pairs, p);
}
}
@@ -5636,8 +5634,6 @@ typedef struct tm_alias_pair
tree to;
} tm_alias_pair;
-DEF_VEC_O(tm_alias_pair);
-DEF_VEC_ALLOC_O(tm_alias_pair,heap);
/* Helper function for finish_tm_clone_pairs. Dump a hash table entry
into a VEC in INFO. */
@@ -5646,22 +5642,22 @@ static int
dump_tm_clone_to_vec (void **slot, void *info)
{
struct tree_map *map = (struct tree_map *) *slot;
- VEC(tm_alias_pair,heap) **tm_alias_pairs = (VEC(tm_alias_pair, heap) **) info;
+ vec<tm_alias_pair> *tm_alias_pairs = (vec<tm_alias_pair> *) info;
tm_alias_pair p = {DECL_UID (map->base.from), map->base.from, map->to};
- VEC_safe_push (tm_alias_pair, heap, *tm_alias_pairs, p);
+ tm_alias_pairs->safe_push (p);
return 1;
}
/* Dump the actual pairs to the .tm_clone_table section. */
static void
-dump_tm_clone_pairs (VEC(tm_alias_pair,heap) *tm_alias_pairs)
+dump_tm_clone_pairs (vec<tm_alias_pair> tm_alias_pairs)
{
unsigned i;
tm_alias_pair *p;
bool switched = false;
- FOR_EACH_VEC_ELT (tm_alias_pair, tm_alias_pairs, i, p)
+ FOR_EACH_VEC_ELT (tm_alias_pairs, i, p)
{
tree src = p->from;
tree dst = p->to;
@@ -5722,7 +5718,7 @@ tm_alias_pair_cmp (const void *x, const void *y)
void
finish_tm_clone_pairs (void)
{
- VEC(tm_alias_pair,heap) *tm_alias_pairs = NULL;
+ vec<tm_alias_pair> tm_alias_pairs = vec<tm_alias_pair>();
if (tm_clone_hash == NULL)
return;
@@ -5735,14 +5731,14 @@ finish_tm_clone_pairs (void)
htab_traverse_noresize (tm_clone_hash, dump_tm_clone_to_vec,
(void *) &tm_alias_pairs);
/* Sort it. */
- VEC_qsort (tm_alias_pair, tm_alias_pairs, tm_alias_pair_cmp);
+ tm_alias_pairs.qsort (tm_alias_pair_cmp);
/* Dump it. */
dump_tm_clone_pairs (tm_alias_pairs);
htab_delete (tm_clone_hash);
tm_clone_hash = NULL;
- VEC_free (tm_alias_pair, heap, tm_alias_pairs);
+ tm_alias_pairs.release ();
}
@@ -6960,7 +6956,7 @@ place_block_symbol (rtx symbol)
block->alignment = MAX (block->alignment, alignment);
block->size = offset + size;
- VEC_safe_push (rtx, gc, block->objects, symbol);
+ vec_safe_push (block->objects, symbol);
}
/* Return the anchor that should be used to address byte offset OFFSET
@@ -7019,11 +7015,11 @@ get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
/* Do a binary search to see if there's already an anchor we can use.
Set BEGIN to the new anchor's index if not. */
begin = 0;
- end = VEC_length (rtx, block->anchors);
+ end = vec_safe_length (block->anchors);
while (begin != end)
{
middle = (end + begin) / 2;
- anchor = VEC_index (rtx, block->anchors, middle);
+ anchor = (*block->anchors)[middle];
if (SYMBOL_REF_BLOCK_OFFSET (anchor) > offset)
end = middle;
else if (SYMBOL_REF_BLOCK_OFFSET (anchor) < offset)
@@ -7043,7 +7039,7 @@ get_section_anchor (struct object_block *block, HOST_WIDE_INT offset,
SYMBOL_REF_FLAGS (anchor) |= model << SYMBOL_FLAG_TLS_SHIFT;
/* Insert it at index BEGIN. */
- VEC_safe_insert (rtx, gc, block->anchors, begin, anchor);
+ vec_safe_insert (block->anchors, begin, anchor);
return anchor;
}
@@ -7058,7 +7054,7 @@ output_object_block (struct object_block *block)
tree decl;
rtx symbol;
- if (block->objects == NULL)
+ if (!block->objects)
return;
/* Switch to the section and make sure that the first byte is
@@ -7068,12 +7064,12 @@ output_object_block (struct object_block *block)
/* Define the values of all anchors relative to the current section
position. */
- FOR_EACH_VEC_ELT (rtx, block->anchors, i, symbol)
+ FOR_EACH_VEC_SAFE_ELT (block->anchors, i, symbol)
targetm.asm_out.output_anchor (symbol);
/* Output the objects themselves. */
offset = 0;
- FOR_EACH_VEC_ELT (rtx, block->objects, i, symbol)
+ FOR_EACH_VEC_ELT (*block->objects, i, symbol)
{
/* Move to the object's offset, padding with zeros if necessary. */
assemble_zeros (SYMBOL_REF_BLOCK_OFFSET (symbol) - offset);
diff --git a/gcc/varpool.c b/gcc/varpool.c
index b88ec613ff6..87f29a02f09 100644
--- a/gcc/varpool.c
+++ b/gcc/varpool.c
@@ -243,7 +243,7 @@ varpool_analyze_node (struct varpool_node *node)
node->alias = false;
continue;
}
- if (!VEC_length (ipa_ref_t, node->symbol.ref_list.references))
+ if (!vec_safe_length (node->symbol.ref_list.references))
ipa_record_reference ((symtab_node)node, (symtab_node)tgt, IPA_REF_ALIAS, NULL);
if (node->extra_name_alias)
{
diff --git a/gcc/vec.c b/gcc/vec.c
index be9f54a5b57..b213aba852c 100644
--- a/gcc/vec.c
+++ b/gcc/vec.c
@@ -119,20 +119,22 @@ vec_descriptor (const char *name, int line, const char *function)
}
/* Account the overhead. */
-static void
-register_overhead (struct vec_prefix *ptr, size_t size,
- const char *name, int line, const char *function)
+
+void
+vec_prefix::register_overhead (size_t size, const char *name, int line,
+ const char *function)
{
struct vec_descriptor *loc = vec_descriptor (name, line, function);
struct ptr_hash_entry *p = XNEW (struct ptr_hash_entry);
PTR *slot;
- p->ptr = ptr;
+ p->ptr = this;
p->loc = loc;
p->allocated = size;
if (!ptr_hash)
ptr_hash = htab_create (10, hash_ptr, eq_ptr, NULL);
- slot = htab_find_slot_with_hash (ptr_hash, ptr, htab_hash_pointer (ptr), INSERT);
+ slot = htab_find_slot_with_hash (ptr_hash, this, htab_hash_pointer (this),
+ INSERT);
gcc_assert (!*slot);
*slot = p;
@@ -142,49 +144,44 @@ register_overhead (struct vec_prefix *ptr, size_t size,
loc->times++;
}
-/* Notice that the pointer has been freed. */
-static void
-free_overhead (struct vec_prefix *ptr)
+
+/* Notice that the memory allocated for the vector has been freed. */
+
+void
+vec_prefix::release_overhead (void)
{
- PTR *slot = htab_find_slot_with_hash (ptr_hash, ptr, htab_hash_pointer (ptr),
+ PTR *slot = htab_find_slot_with_hash (ptr_hash, this,
+ htab_hash_pointer (this),
NO_INSERT);
struct ptr_hash_entry *p = (struct ptr_hash_entry *) *slot;
p->loc->allocated -= p->allocated;
htab_clear_slot (ptr_hash, slot);
- free (p);
+ ::free (p);
}
-void
-vec_heap_free (void *ptr)
-{
- if (GATHER_STATISTICS)
- free_overhead ((struct vec_prefix *)ptr);
- free (ptr);
-}
-/* Calculate the new ALLOC value, making sure that RESERVE slots are
- free. If EXACT grow exactly, otherwise grow exponentially. */
+/* Calculate the number of slots to reserve a vector, making sure that
+ RESERVE slots are free. If EXACT grow exactly, otherwise grow
+ exponentially. PFX is the control data for the vector. */
-static inline unsigned
-calculate_allocation (const struct vec_prefix *pfx, int reserve, bool exact)
+unsigned
+vec_prefix::calculate_allocation (vec_prefix *pfx, unsigned reserve, bool exact)
{
unsigned alloc = 0;
unsigned num = 0;
- gcc_assert (reserve >= 0);
-
if (pfx)
{
alloc = pfx->alloc_;
num = pfx->num_;
}
else if (!reserve)
- /* If there's no prefix, and we've not requested anything, then we
+ /* If there's no vector, and we've not requested anything, then we
will create a NULL vector. */
return 0;
/* We must have run out of room. */
- gcc_assert (alloc - num < (unsigned) reserve);
+ gcc_assert (alloc - num < reserve);
if (exact)
/* Exact size. */
@@ -208,79 +205,9 @@ calculate_allocation (const struct vec_prefix *pfx, int reserve, bool exact)
return alloc;
}
-/* Ensure there are at least RESERVE free slots in VEC. If EXACT grow
- exactly, else grow exponentially. As a special case, if VEC is
- NULL and RESERVE is 0, no vector will be created. The vector's
- trailing array is at VEC_OFFSET offset and consists of ELT_SIZE
- sized elements. */
-
-void *
-vec_gc_o_reserve_1 (void *vec, int reserve, size_t vec_offset, size_t elt_size,
- bool exact MEM_STAT_DECL)
-{
- struct vec_prefix *pfx = (struct vec_prefix *) vec;
- unsigned alloc = calculate_allocation (pfx, reserve, exact);
- size_t size;
-
- if (!alloc)
- {
- if (pfx)
- ggc_free (pfx);
- return NULL;
- }
-
- /* Calculate the amount of space we want. */
- size = vec_offset + alloc * elt_size;
- /* Ask the allocator how much space it will really give us. */
- size = ggc_round_alloc_size (size);
- /* Adjust the number of slots accordingly. */
- alloc = (size - vec_offset) / elt_size;
- /* And finally, recalculate the amount of space we ask for. */
- size = vec_offset + alloc * elt_size;
-
- vec = ggc_realloc_stat (vec, size PASS_MEM_STAT);
-
- ((struct vec_prefix *)vec)->alloc_ = alloc;
- if (!pfx)
- ((struct vec_prefix *)vec)->num_ = 0;
-
- return vec;
-}
-
-
-/* As for vec_gc_o_reserve_1, but for heap allocated vectors. */
-
-void *
-vec_heap_o_reserve_1 (void *vec, int reserve, size_t vec_offset,
- size_t elt_size, bool exact MEM_STAT_DECL)
-{
- struct vec_prefix *pfx = (struct vec_prefix *) vec;
- unsigned alloc = calculate_allocation (pfx, reserve, exact);
-
- if (!alloc)
- {
- if (pfx)
- vec_heap_free (pfx);
- return NULL;
- }
-
- if (GATHER_STATISTICS && vec)
- free_overhead (pfx);
-
- vec = xrealloc (vec, vec_offset + alloc * elt_size);
- ((struct vec_prefix *)vec)->alloc_ = alloc;
- if (!pfx)
- ((struct vec_prefix *)vec)->num_ = 0;
- if (GATHER_STATISTICS && vec)
- register_overhead ((struct vec_prefix *)vec,
- vec_offset + alloc * elt_size FINAL_PASS_MEM_STAT);
-
- return vec;
-}
-
/* Stack vectors are a little different. VEC_alloc turns into a call
- to vec_stack_p_reserve_exact1 and passes in space allocated via a
+ to vec<T, A>::stack_reserve and passes in space allocated via a
call to alloca. We record that pointer so that we know that we
shouldn't free it. If the vector is resized, we resize it on the
heap. We record the pointers in a vector and search it in LIFO
@@ -289,128 +216,41 @@ vec_heap_o_reserve_1 (void *vec, int reserve, size_t vec_offset,
the end should normally be efficient even if they are used in a
recursive function. */
-typedef void *void_p;
-DEF_VEC_P(void_p);
-DEF_VEC_ALLOC_P(void_p,heap);
-
-static VEC(void_p,heap) *stack_vecs;
+static vec<void *> stack_vecs;
-/* Allocate a vector which uses alloca for the initial allocation.
- SPACE is space allocated using alloca, ALLOC is the number of
- entries allocated. */
+/* Add a stack vector to STACK_VECS. */
-void *
-vec_stack_p_reserve_exact_1 (int alloc, void *space)
+void
+register_stack_vec (void *vec)
{
- struct vec_prefix *pfx = (struct vec_prefix *) space;
-
- VEC_safe_push (void_p, heap, stack_vecs, space);
-
- pfx->num_ = 0;
- pfx->alloc_ = alloc;
-
- return space;
+ stack_vecs.safe_push (vec);
}
-/* Grow a vector allocated using alloca. When this happens, we switch
- back to heap allocation. We remove the vector from stack_vecs, if
- it is there, since we no longer need to avoid freeing it. */
-
-static void *
-vec_stack_o_reserve_1 (void *vec, int reserve, size_t vec_offset,
- size_t elt_size, bool exact MEM_STAT_DECL)
-{
- bool found;
- unsigned int ix;
- void *newvec;
-
- found = false;
- for (ix = VEC_length (void_p, stack_vecs); ix > 0; --ix)
- {
- if (VEC_index (void_p, stack_vecs, ix - 1) == vec)
- {
- VEC_unordered_remove (void_p, stack_vecs, ix - 1);
- found = true;
- break;
- }
- }
-
- if (!found)
- {
- /* VEC is already on the heap. */
- return vec_heap_o_reserve_1 (vec, reserve, vec_offset, elt_size,
- exact PASS_MEM_STAT);
- }
-
- /* Move VEC to the heap. */
- reserve += ((struct vec_prefix *) vec)->num_;
- newvec = vec_heap_o_reserve_1 (NULL, reserve, vec_offset, elt_size,
- exact PASS_MEM_STAT);
- if (newvec && vec)
- {
- ((struct vec_prefix *) newvec)->num_ = ((struct vec_prefix *) vec)->num_;
- memcpy (((struct vec_prefix *) newvec)+1,
- ((struct vec_prefix *) vec)+1,
- ((struct vec_prefix *) vec)->num_ * elt_size);
- }
- return newvec;
-}
-/* Grow a vector allocated on the stack. */
+/* If VEC is registered in STACK_VECS, return its index.
+ Otherwise, return -1. */
-void *
-vec_stack_o_reserve (void *vec, int reserve, size_t vec_offset,
- size_t elt_size MEM_STAT_DECL)
+int
+stack_vec_register_index (void *vec)
{
- return vec_stack_o_reserve_1 (vec, reserve, vec_offset, elt_size, false
- PASS_MEM_STAT);
+ for (unsigned ix = stack_vecs.length (); ix > 0; --ix)
+ if (stack_vecs[ix - 1] == vec)
+ return static_cast<int> (ix - 1);
+ return -1;
}
-/* Exact version of vec_stack_o_reserve. */
-void *
-vec_stack_o_reserve_exact (void *vec, int reserve, size_t vec_offset,
- size_t elt_size MEM_STAT_DECL)
-{
- return vec_stack_o_reserve_1 (vec, reserve, vec_offset, elt_size, true
- PASS_MEM_STAT);
-}
-
-/* Free a vector allocated on the stack. Don't actually free it if we
- find it in the hash table. */
+/* Remove vector at slot IX from the list of registered stack vectors. */
void
-vec_stack_free (void *vec)
+unregister_stack_vec (unsigned ix)
{
- unsigned int ix;
-
- for (ix = VEC_length (void_p, stack_vecs); ix > 0; --ix)
- {
- if (VEC_index (void_p, stack_vecs, ix - 1) == vec)
- {
- VEC_unordered_remove (void_p, stack_vecs, ix - 1);
- return;
- }
- }
-
- /* VEC was not on the list of vecs allocated on the stack, so it
- must be allocated on the heap. */
- vec_heap_free (vec);
+ stack_vecs.unordered_remove (ix);
}
-#if ENABLE_CHECKING
-/* Issue a vector domain error, and then fall over. */
-
-void
-vec_assert_fail (const char *op, const char *struct_name,
- const char *file, unsigned int line, const char *function)
-{
- internal_error ("vector %s %s domain error, in %s at %s:%u",
- struct_name, op, function, trim_filename (file), line);
-}
-#endif
/* Helper for qsort; sort descriptors by amount of memory consumed. */
+
static int
cmp_statistic (const void *loc1, const void *loc2)
{
@@ -426,7 +266,10 @@ cmp_statistic (const void *loc1, const void *loc2)
diff = l1->times - l2->times;
return diff > 0 ? 1 : diff < 0 ? -1 : 0;
}
+
+
/* Collect array of the descriptors from hashtable. */
+
static struct vec_descriptor **loc_array;
static int
add_statistics (void **slot, void *b)
diff --git a/gcc/vec.h b/gcc/vec.h
index 8858f6afea1..b9be85c293a 100644
--- a/gcc/vec.h
+++ b/gcc/vec.h
@@ -23,7 +23,39 @@ along with GCC; see the file COPYING3. If not see
#ifndef GCC_VEC_H
#define GCC_VEC_H
-#include "statistics.h" /* For MEM_STAT_DECL. */
+/* FIXME - When compiling some of the gen* binaries, we cannot enable GC
+ support because the headers generated by gengtype are still not
+ present. In particular, the header file gtype-desc.h is missing,
+ so compilation may fail if we try to include ggc.h.
+
+ Since we use some of those declarations, we need to provide them
+ (even if the GC-based templates are not used). This is not a
+ problem because the code that runs before gengtype is built will
+ never need to use GC vectors. But it does force us to declare
+ these functions more than once. */
+#ifdef GENERATOR_FILE
+#define VEC_GC_ENABLED 0
+#else
+#define VEC_GC_ENABLED 1
+#endif // GENERATOR_FILE
+
+#include "statistics.h" // For CXX_MEM_STAT_INFO.
+
+#if VEC_GC_ENABLED
+#include "ggc.h"
+#else
+# ifndef GCC_GGC_H
+ /* Even if we think that GC is not enabled, the test that sets it is
+ weak. There are files compiled with -DGENERATOR_FILE that already
+ include ggc.h. We only need to provide these definitions if ggc.h
+ has not been included. Sigh. */
+ extern void ggc_free (void *);
+ extern size_t ggc_round_alloc_size (size_t requested_size);
+ extern void *ggc_internal_cleared_alloc_stat (size_t MEM_STAT_DECL)
+ ATTRIBUTE_MALLOC;
+ extern void *ggc_realloc_stat (void *, size_t MEM_STAT_DECL);
+# endif // GCC_GGC_H
+#endif // VEC_GC_ENABLED
/* Templated vector type and associated interfaces.
@@ -37,18 +69,6 @@ along with GCC; see the file COPYING3. If not see
reference. Because the iterator will be inlined, the address-of
can be optimized away.
- The vectors are implemented using the trailing array idiom, thus
- they are not resizeable without changing the address of the vector
- object itself. This means you cannot have variables or fields of
- vector type -- always use a pointer to a vector. The one exception
- is the final field of a structure, which could be a vector type.
- You will have to use the embedded_size & embedded_init calls to
- create such objects, and they will probably not be resizeable (so
- don't use the 'safe' allocation variants). The trailing array
- idiom is used (rather than a pointer to an array of data), because,
- if we allow NULL to also represent an empty vector, empty vectors
- occupy minimal space in the structure containing them.
-
Each operation that increases the number of active elements is
available in 'quick' and 'safe' variants. The former presumes that
there is sufficient allocated space for the operation to succeed
@@ -75,438 +95,745 @@ along with GCC; see the file COPYING3. If not see
'lower_bound' function will determine where to place an item in the
array using insert that will maintain sorted order.
- When a vector type is defined, first a non-memory managed version
- is created. You can then define either or both garbage collected
- and heap allocated versions. The allocation mechanism is specified
- when the vector is allocated. This can occur via the VEC_alloc
- call or one of the VEC_safe_* functions that add elements to a
- vector. If the vector is NULL, it will be allocated using the
- allocation strategy selected in the call. The valid allocations
- are defined in enum vec_allocation_t.
+ Vectors are template types with three arguments: the type of the
+ elements in the vector, the allocation strategy, and the physical
+ layout to use
+
+ Four allocation strategies are supported:
+
+ - Heap: allocation is done using malloc/free. This is the
+ default allocation strategy.
+
+ - Stack: allocation is done using alloca.
+
+ - GC: allocation is done using ggc_alloc/ggc_free.
+
+ - GC atomic: same as GC with the exception that the elements
+ themselves are assumed to be of an atomic type that does
+ not need to be garbage collected. This means that marking
+ routines do not need to traverse the array marking the
+ individual elements. This increases the performance of
+ GC activities.
+
+ Two physical layouts are supported:
+
+ - Embedded: The vector is structured using the trailing array
+ idiom. The last member of the structure is an array of size
+ 1. When the vector is initially allocated, a single memory
+ block is created to hold the vector's control data and the
+ array of elements. These vectors cannot grow without
+ reallocation (see discussion on embeddable vectors below).
+
+ - Space efficient: The vector is structured as a pointer to an
+ embedded vector. This is the default layout. It means that
+ vectors occupy a single word of storage before initial
+ allocation. Vectors are allowed to grow (the internal
+ pointer is reallocated but the main vector instance does not
+ need to relocate).
+
+ The type, allocation and layout are specified when the vector is
+ declared.
If you need to directly manipulate a vector, then the 'address'
accessor will return the address of the start of the vector. Also
the 'space' predicate will tell you whether there is spare capacity
in the vector. You will not normally need to use these two functions.
- Variables of vector type are of type vec_t<ETYPE> where ETYPE is
- the type of the elements of the vector. Due to the way GTY works,
- you must annotate any structures you wish to insert or reference
- from a vector with a GTY(()) tag. You need to do this even if you
- never use the GC allocated variants.
+ Notes on the different layout strategies
+
+ * Embeddable vectors (vec<T, A, vl_embed>)
+
+ These vectors are suitable to be embedded in other data
+ structures so that they can be pre-allocated in a contiguous
+ memory block.
+
+ Embeddable vectors are implemented using the trailing array
+ idiom, thus they are not resizeable without changing the address
+ of the vector object itself. This means you cannot have
+ variables or fields of embeddable vector type -- always use a
+ pointer to a vector. The one exception is the final field of a
+ structure, which could be a vector type.
+
+ You will have to use the embedded_size & embedded_init calls to
+ create such objects, and they will not be resizeable (so the
+ 'safe' allocation variants are not available).
+
+ Properties of embeddable vectors:
+
+ - The whole vector and control data are allocated in a single
+ contiguous block. It uses the trailing-vector idiom, so
+ allocation must reserve enough space for all the elements
+ in the vector plus its control data.
+ - The vector cannot be re-allocated.
+ - The vector cannot grow nor shrink.
+ - No indirections needed for access/manipulation.
+ - It requires 2 words of storage (prior to vector allocation).
+
+
+ * Space efficient vector (vec<T, A, vl_ptr>)
+
+ These vectors can grow dynamically and are allocated together
+ with their control data. They are suited to be included in data
+ structures. Prior to initial allocation, they only take a single
+ word of storage.
+
+ These vectors are implemented as a pointer to embeddable vectors.
+ The semantics allow for this pointer to be NULL to represent
+ empty vectors. This way, empty vectors occupy minimal space in
+ the structure containing them.
+
+ Properties:
+
+ - The whole vector and control data are allocated in a single
+ contiguous block.
+ - The whole vector may be re-allocated.
+ - Vector data may grow and shrink.
+ - Access and manipulation requires a pointer test and
+ indirection.
+ - It requires 1 word of storage (prior to vector allocation).
An example of their use would be,
struct my_struct {
- vec_t<tree> *v; // A (pointer to) a vector of tree pointers.
+ // A space-efficient vector of tree pointers in GC memory.
+ vec<tree, va_gc, vl_ptr> v;
};
struct my_struct *s;
- if (VEC_length(tree,s->v)) { we have some contents }
- VEC_safe_push(tree,gc,s->v,decl); // append some decl onto the end
- for (ix = 0; VEC_iterate(tree,s->v,ix,elt); ix++)
+ if (s->v.length ()) { we have some contents }
+ s->v.safe_push (decl); // append some decl onto the end
+ for (ix = 0; s->v.iterate (ix, &elt); ix++)
{ do something with elt }
*/
-#if ENABLE_CHECKING
-#define ALONE_VEC_CHECK_INFO __FILE__, __LINE__, __FUNCTION__
-#define VEC_CHECK_INFO , ALONE_VEC_CHECK_INFO
-#define ALONE_VEC_CHECK_DECL const char *file_, unsigned line_, const char *function_
-#define VEC_CHECK_DECL , ALONE_VEC_CHECK_DECL
-#define ALONE_VEC_CHECK_PASS file_, line_, function_
-#define VEC_CHECK_PASS , ALONE_VEC_CHECK_PASS
-
-#define VEC_ASSERT(EXPR,OP,T,A) \
- (void)((EXPR) ? 0 : (VEC_ASSERT_FAIL(OP,VEC(T,A)), 0))
-
-extern void vec_assert_fail (const char *, const char * VEC_CHECK_DECL)
- ATTRIBUTE_NORETURN;
-#define VEC_ASSERT_FAIL(OP,VEC) vec_assert_fail (OP,#VEC VEC_CHECK_PASS)
-#else
-#define ALONE_VEC_CHECK_INFO
-#define VEC_CHECK_INFO
-#define ALONE_VEC_CHECK_DECL void
-#define VEC_CHECK_DECL
-#define ALONE_VEC_CHECK_PASS
-#define VEC_CHECK_PASS
-#define VEC_ASSERT(EXPR,OP,T,A) (void)(EXPR)
-#endif
+/* Support function for statistics. */
+extern void dump_vec_loc_statistics (void);
-#define VEC(T,A) vec_t<T>
-enum vec_allocation_t { heap, gc, stack };
+/* Control data for vectors. This contains the number of allocated
+ and used slots inside a vector. */
-struct vec_prefix
+class vec_prefix
{
- unsigned num_;
+protected:
+ /* Memory allocation support routines in vec.c. */
+ void register_overhead (size_t, const char *, int, const char *);
+ void release_overhead (void);
+ static unsigned calculate_allocation (vec_prefix *, unsigned, bool);
+
+ /* Note that vec_prefix should be a base class for vec, but we use
+ offsetof() on vector fields of tree structures (e.g.,
+ tree_binfo::base_binfos), and offsetof only supports base types.
+
+ To compensate, we make vec_prefix a field inside vec and make
+ vec a friend class of vec_prefix so it can access its fields. */
+ template <typename, typename, typename> friend class vec;
+
+ /* The allocator types also need access to our internals. */
+ friend struct va_gc;
+ friend struct va_gc_atomic;
+ friend struct va_heap;
+ friend struct va_stack;
+
unsigned alloc_;
+ unsigned num_;
};
-/* Vector type, user visible. */
-template<typename T>
-struct GTY(()) vec_t
-{
- unsigned length (void) const;
- bool empty (void) const;
- T *address (void);
- T &last (ALONE_VEC_CHECK_DECL);
- const T &operator[] (unsigned) const;
- T &operator[] (unsigned);
- void embedded_init (int, int = 0);
-
- template<enum vec_allocation_t A>
- vec_t<T> *copy (ALONE_MEM_STAT_DECL);
-
- bool space (int VEC_CHECK_DECL);
- void splice (vec_t<T> * VEC_CHECK_DECL);
- T *quick_push (const T & VEC_CHECK_DECL);
- T &pop (ALONE_VEC_CHECK_DECL);
- void truncate (unsigned VEC_CHECK_DECL);
- void replace (unsigned, const T & VEC_CHECK_DECL);
- void quick_insert (unsigned, const T & VEC_CHECK_DECL);
- void ordered_remove (unsigned VEC_CHECK_DECL);
- void unordered_remove (unsigned VEC_CHECK_DECL);
- void block_remove (unsigned, unsigned VEC_CHECK_DECL);
- unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
+template<typename, typename, typename> class vec;
- /* Class-static member functions. Some of these will become member
- functions of a future handler class wrapping vec_t. */
- static size_t embedded_size (int);
+/* Valid vector layouts
- template<enum vec_allocation_t A>
- static vec_t<T> *alloc (int MEM_STAT_DECL);
+ vl_embed - Embeddable vector that uses the trailing array idiom.
+ vl_ptr - Space efficient vector that uses a pointer to an
+ embeddable vector. */
+struct vl_embed { };
+struct vl_ptr { };
- static vec_t<T> *alloc (int, vec_t<T> *);
- template<enum vec_allocation_t A>
- static void free (vec_t<T> **);
+/* Types of supported allocations
- template<enum vec_allocation_t A>
- static vec_t<T> *reserve_exact (vec_t<T> *, int MEM_STAT_DECL);
+ va_heap - Allocation uses malloc/free.
+ va_gc - Allocation uses ggc_alloc.
+ va_gc_atomic - Same as GC, but individual elements of the array
+ do not need to be marked during collection.
+ va_stack - Allocation uses alloca. */
- template<enum vec_allocation_t A>
- static bool reserve_exact (vec_t<T> **, int VEC_CHECK_DECL MEM_STAT_DECL);
+/* Allocator type for heap vectors. */
+struct va_heap
+{
+ /* Heap vectors are frequently regular instances, so use the vl_ptr
+ layout for them. */
+ typedef vl_ptr default_layout;
- template<enum vec_allocation_t A>
- static vec_t<T> *reserve (vec_t<T> *, int MEM_STAT_DECL);
+ template<typename T>
+ static void reserve (vec<T, va_heap, vl_embed> *&, unsigned, bool
+ CXX_MEM_STAT_INFO);
- template<enum vec_allocation_t A>
- static bool reserve (vec_t<T> **, int VEC_CHECK_DECL MEM_STAT_DECL);
+ template<typename T>
+ static void release (vec<T, va_heap, vl_embed> *&);
+};
- template<enum vec_allocation_t A>
- static void safe_splice (vec_t<T> **, vec_t<T> * VEC_CHECK_DECL
- MEM_STAT_DECL);
- template<enum vec_allocation_t A>
- static T *safe_push (vec_t<T> **, const T & VEC_CHECK_DECL MEM_STAT_DECL);
+/* Allocator for heap memory. Ensure there are at least RESERVE free
+ slots in V. If EXACT is true, grow exactly, else grow
+ exponentially. As a special case, if the vector had not been
+ allocated and and RESERVE is 0, no vector will be created. */
- template<enum vec_allocation_t A>
- static void safe_grow (vec_t<T> **, int VEC_CHECK_DECL MEM_STAT_DECL);
+template<typename T>
+inline void
+va_heap::reserve (vec<T, va_heap, vl_embed> *&v, unsigned reserve, bool exact
+ MEM_STAT_DECL)
+{
+ unsigned alloc = vec_prefix::calculate_allocation (v ? &v->pfx_ : 0, reserve,
+ exact);
+ if (!alloc)
+ {
+ release (v);
+ return;
+ }
+
+ if (GATHER_STATISTICS && v)
+ v->pfx_.release_overhead ();
+
+ size_t size = vec<T, va_heap, vl_embed>::embedded_size (alloc);
+ unsigned nelem = v ? v->length () : 0;
+ v = static_cast <vec<T, va_heap, vl_embed> *> (xrealloc (v, size));
+ v->embedded_init (alloc, nelem);
+
+ if (GATHER_STATISTICS)
+ v->pfx_.register_overhead (size FINAL_PASS_MEM_STAT);
+}
+
+
+/* Free the heap space allocated for vector V. */
- template<enum vec_allocation_t A>
- static void safe_grow_cleared (vec_t<T> **, int VEC_CHECK_DECL MEM_STAT_DECL);
+template<typename T>
+void
+va_heap::release (vec<T, va_heap, vl_embed> *&v)
+{
+ if (GATHER_STATISTICS)
+ v->pfx_.release_overhead ();
+ ::free (v);
+ v = NULL;
+}
- template<enum vec_allocation_t A>
- static void safe_insert (vec_t<T> **, unsigned, const T & VEC_CHECK_DECL
- MEM_STAT_DECL);
- static bool iterate (const vec_t<T> *, unsigned, T *);
- static bool iterate (const vec_t<T> *, unsigned, T **);
+/* Allocator type for GC vectors. Notice that we need the structure
+ declaration even if GC is not enabled. */
- vec_prefix prefix_;
- T vec_[1];
+struct va_gc
+{
+ /* Use vl_embed as the default layout for GC vectors. Due to GTY
+ limitations, GC vectors must always be pointers, so it is more
+ efficient to use a pointer to the vl_embed layout, rather than
+ using a pointer to a pointer as would be the case with vl_ptr. */
+ typedef vl_embed default_layout;
+
+ template<typename T, typename A>
+ static void reserve (vec<T, A, vl_embed> *&, unsigned, bool
+ CXX_MEM_STAT_INFO);
+
+ template<typename T, typename A>
+ static void release (vec<T, A, vl_embed> *&v) { v = NULL; }
};
-/* Garbage collection support for vec_t. */
+/* Allocator for GC memory. Ensure there are at least RESERVE free
+ slots in V. If EXACT is true, grow exactly, else grow
+ exponentially. As a special case, if the vector had not been
+ allocated and and RESERVE is 0, no vector will be created. */
-template<typename T>
+template<typename T, typename A>
void
-gt_ggc_mx (vec_t<T> *v)
+va_gc::reserve (vec<T, A, vl_embed> *&v, unsigned reserve, bool exact
+ MEM_STAT_DECL)
{
- extern void gt_ggc_mx (T &);
- for (unsigned i = 0; i < v->length (); i++)
- gt_ggc_mx ((*v)[i]);
+ unsigned alloc = vec_prefix::calculate_allocation (v ? &v->pfx_ : 0, reserve,
+ exact);
+ if (!alloc)
+ {
+ ::ggc_free (v);
+ v = NULL;
+ return;
+ }
+
+ /* Calculate the amount of space we want. */
+ size_t size = vec<T, A, vl_embed>::embedded_size (alloc);
+
+ /* Ask the allocator how much space it will really give us. */
+ size = ggc_round_alloc_size (size);
+
+ /* Adjust the number of slots accordingly. */
+ size_t vec_offset = sizeof (vec_prefix);
+ size_t elt_size = sizeof (T);
+ alloc = (size - vec_offset) / elt_size;
+
+ /* And finally, recalculate the amount of space we ask for. */
+ size = vec_offset + alloc * elt_size;
+
+ unsigned nelem = v ? v->length () : 0;
+ v = static_cast <vec<T, A, vl_embed> *> (ggc_realloc_stat (v, size));
+ v->embedded_init (alloc, nelem);
}
-/* PCH support for vec_t. */
+/* Allocator type for GC vectors. This is for vectors of types
+ atomics w.r.t. collection, so allocation and deallocation is
+ completely inherited from va_gc. */
+struct va_gc_atomic : va_gc
+{
+};
+
+
+/* Allocator type for stack vectors. */
+struct va_stack
+{
+ /* Use vl_ptr as the default layout for stack vectors. */
+ typedef vl_ptr default_layout;
+
+ template<typename T>
+ static void alloc (vec<T, va_stack, vl_ptr>&, unsigned,
+ vec<T, va_stack, vl_embed> *);
+
+ template <typename T>
+ static void reserve (vec<T, va_stack, vl_embed> *&, unsigned, bool
+ CXX_MEM_STAT_INFO);
+
+ template <typename T>
+ static void release (vec<T, va_stack, vl_embed> *&);
+};
+
+/* Helper functions to keep track of vectors allocated on the stack. */
+void register_stack_vec (void *);
+int stack_vec_register_index (void *);
+void unregister_stack_vec (unsigned);
+
+/* Allocate a vector V which uses alloca for the initial allocation.
+ SPACE is space allocated using alloca. NELEMS is the number of
+ entries allocated. */
template<typename T>
void
-gt_pch_nx (vec_t<T> *v)
+va_stack::alloc (vec<T, va_stack, vl_ptr> &v, unsigned nelems,
+ vec<T, va_stack, vl_embed> *space)
{
- extern void gt_pch_nx (T &);
- for (unsigned i = 0; i < v->length (); i++)
- gt_pch_nx ((*v)[i]);
+ v.vec_ = space;
+ register_stack_vec (static_cast<void *> (v.vec_));
+ v.vec_->embedded_init (nelems, 0);
}
+
+/* Reserve NELEMS slots for a vector initially allocated on the stack.
+ When this happens, we switch back to heap allocation. We remove
+ the vector from stack_vecs, if it is there, since we no longer need
+ to avoid freeing it. If EXACT is true, grow exactly, otherwise
+ grow exponentially. */
+
template<typename T>
void
-gt_pch_nx (vec_t<T *> *v, gt_pointer_operator op, void *cookie)
+va_stack::reserve (vec<T, va_stack, vl_embed> *&v, unsigned nelems, bool exact
+ MEM_STAT_DECL)
{
- for (unsigned i = 0; i < v->length (); i++)
- op (&((*v)[i]), cookie);
+ int ix = stack_vec_register_index (static_cast<void *> (v));
+ if (ix >= 0)
+ unregister_stack_vec (ix);
+ else
+ {
+ /* V is already on the heap. */
+ va_heap::reserve (reinterpret_cast<vec<T, va_heap, vl_embed> *&> (v),
+ nelems, exact);
+ return;
+ }
+
+ /* Move VEC_ to the heap. */
+ nelems += v->pfx_.num_;
+ vec<T, va_stack, vl_embed> *oldvec = v;
+ v = NULL;
+ va_heap::reserve (reinterpret_cast<vec<T, va_heap, vl_embed> *&>(v), nelems,
+ exact);
+ if (v && oldvec)
+ {
+ v->pfx_.num_ = oldvec->length ();
+ memcpy (v->data_, oldvec->data_, oldvec->length () * sizeof (T));
+ }
}
+
+/* Free a vector allocated on the stack. Don't actually free it if we
+ find it in the hash table. */
+
template<typename T>
void
-gt_pch_nx (vec_t<T> *v, gt_pointer_operator op, void *cookie)
+va_stack::release (vec<T, va_stack, vl_embed> *&v)
{
- extern void gt_pch_nx (T *, gt_pointer_operator, void *);
- for (unsigned i = 0; i < v->length (); i++)
- gt_pch_nx (&((*v)[i]), op, cookie);
+ int ix = stack_vec_register_index (static_cast<void *> (v));
+ if (ix >= 0)
+ {
+ unregister_stack_vec (ix);
+ v = NULL;
+ }
+ else
+ {
+ /* The vector was not on the list of vectors allocated on the stack, so it
+ must be allocated on the heap. */
+ va_heap::release (reinterpret_cast<vec<T, va_heap, vl_embed> *&> (v));
+ }
}
-/* FIXME. Remove these definitions and update all calling sites after
- the handler class for vec_t is implemented. */
-
-/* Vector of integer-like object. */
-#define DEF_VEC_I(T) struct vec_swallow_trailing_semi
-#define DEF_VEC_ALLOC_I(T,A) struct vec_swallow_trailing_semi
-
-/* Vector of pointer to object. */
-#define DEF_VEC_P(T) struct vec_swallow_trailing_semi
-#define DEF_VEC_ALLOC_P(T,A) struct vec_swallow_trailing_semi
-
-/* Vector of object. */
-#define DEF_VEC_O(T) struct vec_swallow_trailing_semi
-#define DEF_VEC_ALLOC_O(T,A) struct vec_swallow_trailing_semi
-
-/* Vectors on the stack. */
-#define DEF_VEC_ALLOC_P_STACK(T) struct vec_swallow_trailing_semi
-#define DEF_VEC_ALLOC_O_STACK(T) struct vec_swallow_trailing_semi
-#define DEF_VEC_ALLOC_I_STACK(T) struct vec_swallow_trailing_semi
-
-/* Vectors of atomic types. Atomic types do not need to have its
- elements marked for GC and PCH. To avoid unnecessary traversals,
- we provide template instantiations for the GC/PCH functions that
- do not traverse the vector.
-
- FIXME cxx-conversion - Once vec_t users are converted this can
- be provided in some other way (e.g., adding an additional template
- parameter to the vec_t class). */
-#define DEF_VEC_A(TYPE) \
-template<typename T> \
-void \
-gt_ggc_mx (vec_t<TYPE> *v ATTRIBUTE_UNUSED) \
-{ \
-} \
- \
-template<typename T> \
-void \
-gt_pch_nx (vec_t<TYPE> *v ATTRIBUTE_UNUSED) \
-{ \
-} \
- \
-template<typename T> \
-void \
-gt_pch_nx (vec_t<TYPE> *v ATTRIBUTE_UNUSED, \
- gt_pointer_operator op ATTRIBUTE_UNUSED, \
- void *cookie ATTRIBUTE_UNUSED) \
-{ \
-} \
-struct vec_swallow_trailing_semi
-
-#define DEF_VEC_ALLOC_A(T,A) struct vec_swallow_trailing_semi
-
-/* Support functions for stack vectors. */
-extern void *vec_stack_p_reserve_exact_1 (int, void *);
-extern void *vec_stack_o_reserve (void *, int, size_t, size_t MEM_STAT_DECL);
-extern void *vec_stack_o_reserve_exact (void *, int, size_t, size_t
- MEM_STAT_DECL);
-extern void vec_stack_free (void *);
-
-extern void dump_vec_loc_statistics (void);
-extern void ggc_free (void *);
-extern void vec_heap_free (void *);
+/* Generic vector template. Default values for A and L indicate the
+ most commonly used strategies.
+ FIXME - Ideally, they would all be vl_ptr to encourage using regular
+ instances for vectors, but the existing GTY machinery is limited
+ in that it can only deal with GC objects that are pointers
+ themselves.
-/* API compatibility macros (to be removed). */
-#define VEC_length(T,V) \
- ((V) ? (V)->length () : 0)
+ This means that vector operations that need to deal with
+ potentially NULL pointers, must be provided as free
+ functions (see the vec_safe_* functions above). */
+template<typename T,
+ typename A = va_heap,
+ typename L = typename A::default_layout>
+class GTY((user)) vec
+{
+};
-#define VEC_empty(T,V) \
- ((V) ? (V)->empty () : true)
-#define VEC_address(T,V) \
- vec_address<T> (V)
+/* Embeddable vector. These vectors are suitable to be embedded
+ in other data structures so that they can be pre-allocated in a
+ contiguous memory block.
-/* FIXME. For now, we need to continue expanding VEC_address into a
- function call. Otherwise, the warning machinery for -Wnonnull gets
- confused thinking that VEC_address may return null in calls to
- memcpy and qsort. This will disappear once vec_address becomes
- a member function for a handler class wrapping vec_t. */
+ Embeddable vectors are implemented using the trailing array idiom,
+ thus they are not resizeable without changing the address of the
+ vector object itself. This means you cannot have variables or
+ fields of embeddable vector type -- always use a pointer to a
+ vector. The one exception is the final field of a structure, which
+ could be a vector type.
-template<typename T>
-static inline T *
-vec_address (vec_t<T> *vec)
+ You will have to use the embedded_size & embedded_init calls to
+ create such objects, and they will not be resizeable (so the 'safe'
+ allocation variants are not available).
+
+ Properties:
+
+ - The whole vector and control data are allocated in a single
+ contiguous block. It uses the trailing-vector idiom, so
+ allocation must reserve enough space for all the elements
+ in the vector plus its control data.
+ - The vector cannot be re-allocated.
+ - The vector cannot grow nor shrink.
+ - No indirections needed for access/manipulation.
+ - It requires 2 words of storage (prior to vector allocation). */
+
+template<typename T, typename A>
+class GTY((user)) vec<T, A, vl_embed>
{
- return vec ? vec->address() : NULL;
-}
+public:
+ unsigned allocated (void) const { return pfx_.alloc_; }
+ unsigned length (void) const { return pfx_.num_; }
+ bool is_empty (void) const { return pfx_.num_ == 0; }
+ T *address (void) { return data_; }
+ const T *address (void) const { return data_; }
+ const T &operator[] (unsigned) const;
+ T &operator[] (unsigned);
+ T &last (void);
+ bool space (unsigned) const;
+ bool iterate (unsigned, T *) const;
+ bool iterate (unsigned, T **) const;
+ vec *copy (ALONE_MEM_STAT_DECL) const;
+ void splice (vec &);
+ void splice (vec *src);
+ T *quick_push (const T &);
+ T &pop (void);
+ void truncate (unsigned);
+ void quick_insert (unsigned, const T &);
+ void ordered_remove (unsigned);
+ void unordered_remove (unsigned);
+ void block_remove (unsigned, unsigned);
+ void qsort (int (*) (const void *, const void *));
+ unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
+ static size_t embedded_size (unsigned);
+ void embedded_init (unsigned, unsigned = 0);
+ void quick_grow (unsigned len);
+ void quick_grow_cleared (unsigned len);
+
+ /* vec class can access our internal data and functions. */
+ template <typename, typename, typename> friend class vec;
+
+ /* The allocator types also need access to our internals. */
+ friend struct va_gc;
+ friend struct va_gc_atomic;
+ friend struct va_heap;
+ friend struct va_stack;
+
+private:
+ vec_prefix pfx_;
+ T data_[1];
+};
-#define VEC_last(T,V) \
- ((V)->last (ALONE_VEC_CHECK_INFO))
-#define VEC_index(T,V,I) \
- ((*(V))[I])
+/* Convenience wrapper functions to use when dealing with pointers to
+ embedded vectors. Some functionality for these vectors must be
+ provided via free functions for these reasons:
-#define VEC_iterate(T,V,I,P) \
- (vec_t<T>::iterate(V, I, &(P)))
+ 1- The pointer may be NULL (e.g., before initial allocation).
-#define VEC_embedded_size(T,N) \
- (vec_t<T>::embedded_size (N))
+ 2- When the vector needs to grow, it must be reallocated, so
+ the pointer will change its value.
-#define VEC_embedded_init(T,V,N) \
- ((V)->embedded_init (N))
+ Because of limitations with the current GC machinery, all vectors
+ in GC memory *must* be pointers. */
-#define VEC_free(T,A,V) \
- (vec_t<T>::free<A> (&(V)))
-#define VEC_copy(T,A,V) \
- ((V)->copy<A> (ALONE_MEM_STAT_INFO))
+/* If V contains no room for NELEMS elements, return false. Otherwise,
+ return true. */
+template<typename T, typename A>
+inline bool
+vec_safe_space (const vec<T, A, vl_embed> *v, unsigned nelems)
+{
+ return v ? v->space (nelems) : nelems == 0;
+}
-#define VEC_space(T,V,R) \
- ((V) ? (V)->space (R VEC_CHECK_INFO) : (R) == 0)
-#define VEC_reserve(T,A,V,R) \
- (vec_t<T>::reserve<A> (&(V), (int)(R) VEC_CHECK_INFO MEM_STAT_INFO))
+/* If V is NULL, return 0. Otherwise, return V->length(). */
+template<typename T, typename A>
+inline unsigned
+vec_safe_length (const vec<T, A, vl_embed> *v)
+{
+ return v ? v->length () : 0;
+}
-#define VEC_reserve_exact(T,A,V,R) \
- (vec_t<T>::reserve_exact<A> (&(V), R VEC_CHECK_INFO MEM_STAT_INFO))
-#define VEC_splice(T,DST,SRC) \
- (DST)->splice (SRC VEC_CHECK_INFO)
+/* If V is NULL, return NULL. Otherwise, return V->address(). */
+template<typename T, typename A>
+inline T *
+vec_safe_address (vec<T, A, vl_embed> *v)
+{
+ return v ? v->address () : NULL;
+}
-#define VEC_safe_splice(T,A,DST,SRC) \
- vec_t<T>::safe_splice<A> (&(DST), SRC VEC_CHECK_INFO MEM_STAT_INFO)
-#define VEC_quick_push(T,V,O) \
- ((V)->quick_push (O VEC_CHECK_INFO))
+/* If V is NULL, return true. Otherwise, return V->is_empty(). */
+template<typename T, typename A>
+inline bool
+vec_safe_is_empty (vec<T, A, vl_embed> *v)
+{
+ return v ? v->is_empty () : true;
+}
-#define VEC_safe_push(T,A,V,O) \
- (vec_t<T>::safe_push<A> (&(V), O VEC_CHECK_INFO MEM_STAT_INFO))
-#define VEC_pop(T,V) \
- ((V)->pop (ALONE_VEC_CHECK_INFO))
+/* If V does not have space for NELEMS elements, call
+ V->reserve(NELEMS, EXACT). */
+template<typename T, typename A>
+inline bool
+vec_safe_reserve (vec<T, A, vl_embed> *&v, unsigned nelems, bool exact = false
+ MEM_STAT_DECL)
+{
+ bool extend = nelems ? !vec_safe_space (v, nelems) : false;
+ if (extend)
+ A::reserve (v, nelems, exact PASS_MEM_STAT);
+ return extend;
+}
-#define VEC_truncate(T,V,I) \
- (V \
- ? (V)->truncate ((unsigned)(I) VEC_CHECK_INFO) \
- : gcc_assert ((I) == 0))
+template<typename T, typename A>
+inline bool
+vec_safe_reserve_exact (vec<T, A, vl_embed> *&v, unsigned nelems MEM_STAT_DECL)
+{
+ return vec_safe_reserve (v, nelems, true PASS_MEM_STAT);
+}
-#define VEC_safe_grow(T,A,V,I) \
- (vec_t<T>::safe_grow<A> (&(V), (int)(I) VEC_CHECK_INFO MEM_STAT_INFO))
-#define VEC_safe_grow_cleared(T,A,V,I) \
- (vec_t<T>::safe_grow_cleared<A> (&(V), (int)(I) \
- VEC_CHECK_INFO MEM_STAT_INFO))
+/* Allocate GC memory for V with space for NELEMS slots. If NELEMS
+ is 0, V is initialized to NULL. */
-#define VEC_replace(T,V,I,O) \
- ((V)->replace ((unsigned)(I), O VEC_CHECK_INFO))
+template<typename T, typename A>
+inline void
+vec_alloc (vec<T, A, vl_embed> *&v, unsigned nelems MEM_STAT_DECL)
+{
+ v = NULL;
+ vec_safe_reserve (v, nelems);
+}
-#define VEC_quick_insert(T,V,I,O) \
- ((V)->quick_insert (I,O VEC_CHECK_INFO))
-#define VEC_safe_insert(T,A,V,I,O) \
- (vec_t<T>::safe_insert<A> (&(V), I, O VEC_CHECK_INFO MEM_STAT_INFO))
+/* Free the GC memory allocated by vector V and set it to NULL. */
-#define VEC_ordered_remove(T,V,I) \
- ((V)->ordered_remove (I VEC_CHECK_INFO))
+template<typename T, typename A>
+inline void
+vec_free (vec<T, A, vl_embed> *&v)
+{
+ A::release (v);
+}
-#define VEC_unordered_remove(T,V,I) \
- ((V)->unordered_remove (I VEC_CHECK_INFO))
-#define VEC_block_remove(T,V,I,L) \
- ((V)->block_remove (I, L VEC_CHECK_INFO))
+/* Grow V to length LEN. Allocate it, if necessary. */
+template<typename T, typename A>
+inline void
+vec_safe_grow (vec<T, A, vl_embed> *&v, unsigned len MEM_STAT_DECL)
+{
+ unsigned oldlen = vec_safe_length (v);
+ gcc_checking_assert (len >= oldlen);
+ vec_safe_reserve_exact (v, len - oldlen PASS_MEM_STAT);
+ v->quick_grow (len PASS_MEM_STAT);
+}
-#define VEC_lower_bound(T,V,O,LT) \
- ((V)->lower_bound (O, LT))
+/* If V is NULL, allocate it. Call V->safe_grow_cleared(LEN). */
+template<typename T, typename A>
+inline void
+vec_safe_grow_cleared (vec<T, A, vl_embed> *&v, unsigned len MEM_STAT_DECL)
+{
+ unsigned oldlen = vec_safe_length (v);
+ vec_safe_grow (v, len PASS_MEM_STAT);
+ memset (&(v->address()[oldlen]), 0, sizeof (T) * (len - oldlen));
+}
-/* Return the number of active elements in this vector. */
-template<typename T>
-inline unsigned
-vec_t<T>::length (void) const
+/* If V is NULL return false, otherwise return V->iterate(IX, PTR). */
+template<typename T, typename A>
+inline bool
+vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T **ptr)
{
- return prefix_.num_;
+ if (v)
+ return v->iterate (ix, ptr);
+ else
+ {
+ *ptr = 0;
+ return false;
+ }
}
+template<typename T, typename A>
+inline bool
+vec_safe_iterate (const vec<T, A, vl_embed> *v, unsigned ix, T *ptr)
+{
+ if (v)
+ return v->iterate (ix, ptr);
+ else
+ {
+ *ptr = 0;
+ return false;
+ }
+}
-/* Return true if this vector has no active elements. */
-template<typename T>
-inline bool
-vec_t<T>::empty (void) const
+/* If V has no room for one more element, reallocate it. Then call
+ V->quick_push(OBJ). */
+template<typename T, typename A>
+inline T *
+vec_safe_push (vec<T, A, vl_embed> *&v, const T &obj MEM_STAT_DECL)
{
- return length () == 0;
+ vec_safe_reserve (v, 1, false PASS_MEM_STAT);
+ return v->quick_push (obj PASS_MEM_STAT);
}
-/* Return the address of the array of elements. If you need to
- directly manipulate the array (for instance, you want to feed it
- to qsort), use this accessor. */
+/* if V has no room for one more element, reallocate it. Then call
+ V->quick_insert(IX, OBJ). */
+template<typename T, typename A>
+inline void
+vec_safe_insert (vec<T, A, vl_embed> *&v, unsigned ix, const T &obj
+ MEM_STAT_DECL)
+{
+ vec_safe_reserve (v, 1, false PASS_MEM_STAT);
+ v->quick_insert (ix, obj);
+}
+
-template<typename T>
-inline T *
-vec_t<T>::address (void)
+/* If V is NULL, do nothing. Otherwise, call V->truncate(SIZE). */
+template<typename T, typename A>
+inline void
+vec_safe_truncate (vec<T, A, vl_embed> *v, unsigned size)
{
- return vec_;
+ if (v)
+ v->truncate (size);
}
-/* Get the final element of the vector, which must not be empty. */
+/* If SRC is not NULL, return a pointer to a copy of it. */
+template<typename T, typename A>
+inline vec<T, A, vl_embed> *
+vec_safe_copy (vec<T, A, vl_embed> *src)
+{
+ return src ? src->copy () : NULL;
+}
-template<typename T>
-T &
-vec_t<T>::last (ALONE_VEC_CHECK_DECL)
+/* Copy the elements from SRC to the end of DST as if by memcpy.
+ Reallocate DST, if necessary. */
+template<typename T, typename A>
+inline void
+vec_safe_splice (vec<T, A, vl_embed> *&dst, vec<T, A, vl_embed> *src
+ MEM_STAT_DECL)
{
- VEC_ASSERT (prefix_.num_, "last", T, base);
- return (*this)[prefix_.num_ - 1];
+ unsigned src_len = vec_safe_length (src);
+ if (src_len)
+ {
+ vec_safe_reserve_exact (dst, vec_safe_length (dst) + src_len);
+ dst->splice (*src);
+ }
}
/* Index into vector. Return the IX'th element. IX must be in the
domain of the vector. */
-template<typename T>
-const T &
-vec_t<T>::operator[] (unsigned ix) const
+template<typename T, typename A>
+inline const T &
+vec<T, A, vl_embed>::operator[] (unsigned ix) const
{
- gcc_assert (ix < prefix_.num_);
- return vec_[ix];
+ gcc_checking_assert (ix < pfx_.num_);
+ return data_[ix];
}
-template<typename T>
-T &
-vec_t<T>::operator[] (unsigned ix)
+template<typename T, typename A>
+inline T &
+vec<T, A, vl_embed>::operator[] (unsigned ix)
{
- gcc_assert (ix < prefix_.num_);
- return vec_[ix];
+ gcc_checking_assert (ix < pfx_.num_);
+ return data_[ix];
+}
+
+
+/* Get the final element of the vector, which must not be empty. */
+
+template<typename T, typename A>
+inline T &
+vec<T, A, vl_embed>::last (void)
+{
+ gcc_checking_assert (pfx_.num_ > 0);
+ return (*this)[pfx_.num_ - 1];
+}
+
+
+/* If this vector has space for NELEMS additional entries, return
+ true. You usually only need to use this if you are doing your
+ own vector reallocation, for instance on an embedded vector. This
+ returns true in exactly the same circumstances that vec::reserve
+ will. */
+
+template<typename T, typename A>
+inline bool
+vec<T, A, vl_embed>::space (unsigned nelems) const
+{
+ return pfx_.alloc_ - pfx_.num_ >= nelems;
}
/* Return iteration condition and update PTR to point to the IX'th
- element of VEC. Use this to iterate over the elements of a vector
- as follows,
+ element of this vector. Use this to iterate over the elements of a
+ vector as follows,
- for (ix = 0; vec_t<T>::iterate(v, ix, &ptr); ix++)
- continue;
-
- FIXME. This is a static member function because if VEC is NULL,
- PTR should be initialized to NULL. This will become a regular
- member function of the handler class. */
+ for (ix = 0; vec<T, A>::iterate(v, ix, &ptr); ix++)
+ continue; */
-template<typename T>
-bool
-vec_t<T>::iterate (const vec_t<T> *vec, unsigned ix, T *ptr)
+template<typename T, typename A>
+inline bool
+vec<T, A, vl_embed>::iterate (unsigned ix, T *ptr) const
{
- if (vec && ix < vec->prefix_.num_)
+ if (ix < pfx_.num_)
{
- *ptr = vec->vec_[ix];
+ *ptr = data_[ix];
return true;
}
else
@@ -518,22 +845,21 @@ vec_t<T>::iterate (const vec_t<T> *vec, unsigned ix, T *ptr)
/* Return iteration condition and update *PTR to point to the
- IX'th element of VEC. Use this to iterate over the elements of a
- vector as follows,
+ IX'th element of this vector. Use this to iterate over the
+ elements of a vector as follows,
for (ix = 0; v->iterate(ix, &ptr); ix++)
continue;
- This variant is for vectors of objects. FIXME, to be removed
- once the distinction between vec_t<T> and vec_t<T *> disappears. */
+ This variant is for vectors of objects. */
-template<typename T>
-bool
-vec_t<T>::iterate (const vec_t<T> *vec, unsigned ix, T **ptr)
+template<typename T, typename A>
+inline bool
+vec<T, A, vl_embed>::iterate (unsigned ix, T **ptr) const
{
- if (vec && ix < vec->prefix_.num_)
+ if (ix < pfx_.num_)
{
- *ptr = CONST_CAST (T *, &vec->vec_[ix]);
+ *ptr = CONST_CAST (T *, &data_[ix]);
return true;
}
else
@@ -544,500 +870,835 @@ vec_t<T>::iterate (const vec_t<T> *vec, unsigned ix, T **ptr)
}
-/* Convenience macro for forward iteration. */
+/* Return a pointer to a copy of this vector. */
-#define FOR_EACH_VEC_ELT(T, V, I, P) \
- for (I = 0; VEC_iterate (T, (V), (I), (P)); ++(I))
+template<typename T, typename A>
+inline vec<T, A, vl_embed> *
+vec<T, A, vl_embed>::copy (ALONE_MEM_STAT_DECL) const
+{
+ vec<T, A, vl_embed> *new_vec = NULL;
+ unsigned len = length ();
+ if (len)
+ {
+ vec_alloc (new_vec, len PASS_MEM_STAT);
+ new_vec->embedded_init (len, len);
+ memcpy (new_vec->address(), data_, sizeof (T) * len);
+ }
+ return new_vec;
+}
-/* Likewise, but start from FROM rather than 0. */
-#define FOR_EACH_VEC_ELT_FROM(T, V, I, P, FROM) \
- for (I = (FROM); VEC_iterate (T, (V), (I), (P)); ++(I))
+/* Copy the elements from SRC to the end of this vector as if by memcpy.
+ The vector must have sufficient headroom available. */
-/* Convenience macro for reverse iteration. */
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::splice (vec<T, A, vl_embed> &src)
+{
+ unsigned len = src.length();
+ if (len)
+ {
+ gcc_checking_assert (space (len));
+ memcpy (address() + length(), src.address(), len * sizeof (T));
+ pfx_.num_ += len;
+ }
+}
-#define FOR_EACH_VEC_ELT_REVERSE(T, V, I, P) \
- for (I = VEC_length (T, (V)) - 1; \
- VEC_iterate (T, (V), (I), (P)); \
- (I)--)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::splice (vec<T, A, vl_embed> *src)
+{
+ if (src)
+ splice (*src);
+}
+
+
+/* Push OBJ (a new element) onto the end of the vector. There must be
+ sufficient space in the vector. Return a pointer to the slot
+ where OBJ was inserted. */
+
+template<typename T, typename A>
+inline T *
+vec<T, A, vl_embed>::quick_push (const T &obj)
+{
+ gcc_checking_assert (space (1));
+ T *slot = &data_[pfx_.num_++];
+ *slot = obj;
+ return slot;
+}
+
+
+/* Pop and return the last element off the end of the vector. */
+
+template<typename T, typename A>
+inline T &
+vec<T, A, vl_embed>::pop (void)
+{
+ gcc_checking_assert (length () > 0);
+ return data_[--pfx_.num_];
+}
+
+
+/* Set the length of the vector to SIZE. The new length must be less
+ than or equal to the current length. This is an O(1) operation. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::truncate (unsigned size)
+{
+ gcc_checking_assert (length () >= size);
+ pfx_.num_ = size;
+}
+
+
+/* Insert an element, OBJ, at the IXth position of this vector. There
+ must be sufficient space. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::quick_insert (unsigned ix, const T &obj)
+{
+ gcc_checking_assert (length () < allocated ());
+ gcc_checking_assert (ix <= length ());
+ T *slot = &data_[ix];
+ memmove (slot + 1, slot, (pfx_.num_++ - ix) * sizeof (T));
+ *slot = obj;
+}
+
+
+/* Remove an element from the IXth position of this vector. Ordering of
+ remaining elements is preserved. This is an O(N) operation due to
+ memmove. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::ordered_remove (unsigned ix)
+{
+ gcc_checking_assert (ix < length());
+ T *slot = &data_[ix];
+ memmove (slot, slot + 1, (--pfx_.num_ - ix) * sizeof (T));
+}
+
+
+/* Remove an element from the IXth position of this vector. Ordering of
+ remaining elements is destroyed. This is an O(1) operation. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::unordered_remove (unsigned ix)
+{
+ gcc_checking_assert (ix < length());
+ data_[ix] = data_[--pfx_.num_];
+}
+
+
+/* Remove LEN elements starting at the IXth. Ordering is retained.
+ This is an O(N) operation due to memmove. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::block_remove (unsigned ix, unsigned len)
+{
+ gcc_checking_assert (ix + len <= length());
+ T *slot = &data_[ix];
+ pfx_.num_ -= len;
+ memmove (slot, slot + len, (pfx_.num_ - ix) * sizeof (T));
+}
+
+
+/* Sort the contents of this vector with qsort. CMP is the comparison
+ function to pass to qsort. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::qsort (int (*cmp) (const void *, const void *))
+{
+ ::qsort (address(), length(), sizeof (T), cmp);
+}
-/* Return the number of bytes needed to embed an instance of vec_t inside
- another data structure.
+/* Find and return the first position in which OBJ could be inserted
+ without changing the ordering of this vector. LESSTHAN is a
+ function that returns true if the first argument is strictly less
+ than the second. */
+
+template<typename T, typename A>
+unsigned
+vec<T, A, vl_embed>::lower_bound (T obj, bool (*lessthan)(const T &, const T &))
+ const
+{
+ unsigned int len = length ();
+ unsigned int half, middle;
+ unsigned int first = 0;
+ while (len > 0)
+ {
+ half = len / 2;
+ middle = first;
+ middle += half;
+ T middle_elem = (*this)[middle];
+ if (lessthan (middle_elem, obj))
+ {
+ first = middle;
+ ++first;
+ len = len - half - 1;
+ }
+ else
+ len = half;
+ }
+ return first;
+}
+
+
+/* Return the number of bytes needed to embed an instance of an
+ embeddable vec inside another data structure.
Use these methods to determine the required size and initialization
of a vector V of type T embedded within another structure (as the
final member):
- size_t vec_t<T>::embedded_size<T> (int reserve);
- void v->embedded_init(int reserve, int active);
+ size_t vec<T, A, vl_embed>::embedded_size (unsigned alloc);
+ void v->embedded_init(unsigned alloc, unsigned num);
These allow the caller to perform the memory allocation. */
-template<typename T>
-size_t
-vec_t<T>::embedded_size (int nelems)
+template<typename T, typename A>
+inline size_t
+vec<T, A, vl_embed>::embedded_size (unsigned alloc)
{
- return offsetof (vec_t<T>, vec_) + nelems * sizeof (T);
+ typedef vec<T, A, vl_embed> vec_embedded;
+ return offsetof (vec_embedded, data_) + alloc * sizeof (T);
}
-/* Initialize the vector to contain room for NELEMS elements and
- ACTIVE active elements. */
+/* Initialize the vector to contain room for ALLOC elements and
+ NUM active elements. */
-template<typename T>
-void
-vec_t<T>::embedded_init (int nelems, int active)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::embedded_init (unsigned alloc, unsigned num)
{
- prefix_.num_ = active;
- prefix_.alloc_ = nelems;
+ pfx_.alloc_ = alloc;
+ pfx_.num_ = num;
}
-/* Allocate a new vector with space for RESERVE objects. If RESERVE
- is zero, NO vector is created.
+/* Grow the vector to a specific length. LEN must be as long or longer than
+ the current length. The new elements are uninitialized. */
- Note that this allocator must always be a macro:
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::quick_grow (unsigned len)
+{
+ gcc_checking_assert (length () <= len && len <= pfx_.alloc_);
+ pfx_.num_ = len;
+}
- We support a vector which starts out with space on the stack and
- switches to heap space when forced to reallocate. This works a
- little differently. In the case of stack vectors, vec_alloc will
- expand to a call to vec_alloc_1 that calls XALLOCAVAR to request the
- initial allocation. This uses alloca to get the initial space.
- Since alloca can not be usefully called in an inline function,
- vec_alloc must always be a macro.
- Important limitations of stack vectors:
+/* Grow the vector to a specific length. LEN must be as long or longer than
+ the current length. The new elements are initialized to zero. */
- - Only the initial allocation will be made using alloca, so pass a
- reasonable estimate that doesn't use too much stack space; don't
- pass zero.
+template<typename T, typename A>
+inline void
+vec<T, A, vl_embed>::quick_grow_cleared (unsigned len)
+{
+ unsigned oldlen = length ();
+ quick_grow (len);
+ memset (&(address()[oldlen]), 0, sizeof (T) * (len - oldlen));
+}
- - Don't return a stack-allocated vector from the function which
- allocated it. */
-#define VEC_alloc(T,A,N) \
- ((A == stack) \
- ? vec_t<T>::alloc (N, XALLOCAVAR (vec_t<T>, vec_t<T>::embedded_size (N)))\
- : vec_t<T>::alloc<A> (N MEM_STAT_INFO))
+/* Garbage collection support for vec<T, A, vl_embed>. */
template<typename T>
-template<enum vec_allocation_t A>
-vec_t<T> *
-vec_t<T>::alloc (int nelems MEM_STAT_DECL)
+void
+gt_ggc_mx (vec<T, va_gc> *v)
{
- return reserve_exact<A> ((vec_t<T> *) NULL, nelems PASS_MEM_STAT);
+ extern void gt_ggc_mx (T &);
+ for (unsigned i = 0; i < v->length (); i++)
+ gt_ggc_mx ((*v)[i]);
}
template<typename T>
-vec_t<T> *
-vec_t<T>::alloc (int nelems, vec_t<T> *space)
+void
+gt_ggc_mx (vec<T, va_gc_atomic, vl_embed> *v ATTRIBUTE_UNUSED)
{
- return static_cast <vec_t<T> *> (vec_stack_p_reserve_exact_1 (nelems, space));
+ /* Nothing to do. Vectors of atomic types wrt GC do not need to
+ be traversed. */
}
-/* Free vector *V and set it to NULL. */
+/* PCH support for vec<T, A, vl_embed>. */
-template<typename T>
-template<enum vec_allocation_t A>
+template<typename T, typename A>
void
-vec_t<T>::free (vec_t<T> **v)
+gt_pch_nx (vec<T, A, vl_embed> *v)
{
- if (*v)
- {
- if (A == heap)
- vec_heap_free (*v);
- else if (A == gc)
- ggc_free (*v);
- else if (A == stack)
- vec_stack_free (*v);
- }
- *v = NULL;
+ extern void gt_pch_nx (T &);
+ for (unsigned i = 0; i < v->length (); i++)
+ gt_pch_nx ((*v)[i]);
+}
+
+template<typename T, typename A>
+void
+gt_pch_nx (vec<T *, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
+{
+ for (unsigned i = 0; i < v->length (); i++)
+ op (&((*v)[i]), cookie);
+}
+
+template<typename T, typename A>
+void
+gt_pch_nx (vec<T, A, vl_embed> *v, gt_pointer_operator op, void *cookie)
+{
+ extern void gt_pch_nx (T *, gt_pointer_operator, void *);
+ for (unsigned i = 0; i < v->length (); i++)
+ gt_pch_nx (&((*v)[i]), op, cookie);
}
-/* Return a copy of this vector. The new and old vectors need not be
- allocated by the same mechanism. */
+/* Space efficient vector. These vectors can grow dynamically and are
+ allocated together with their control data. They are suited to be
+ included in data structures. Prior to initial allocation, they
+ only take a single word of storage.
+
+ These vectors are implemented as a pointer to an embeddable vector.
+ The semantics allow for this pointer to be NULL to represent empty
+ vectors. This way, empty vectors occupy minimal space in the
+ structure containing them.
+
+ Properties:
+
+ - The whole vector and control data are allocated in a single
+ contiguous block.
+ - The whole vector may be re-allocated.
+ - Vector data may grow and shrink.
+ - Access and manipulation requires a pointer test and
+ indirection.
+ - It requires 1 word of storage (prior to vector allocation).
+
+
+ Limitations:
+
+ These vectors must be PODs because they are stored in unions.
+ (http://en.wikipedia.org/wiki/Plain_old_data_structures).
+ As long as we use C++03, we cannot have constructors nor
+ destructors in classes that are stored in unions. */
+
+template<typename T, typename A>
+class vec<T, A, vl_ptr>
+{
+public:
+ /* Memory allocation and deallocation for the embedded vector.
+ Needed because we cannot have proper ctors/dtors defined. */
+ void create (unsigned nelems CXX_MEM_STAT_INFO);
+ void release (void);
+
+ /* Vector operations. */
+ bool exists (void) const
+ { return vec_ != NULL; }
+
+ bool is_empty (void) const
+ { return vec_ ? vec_->is_empty() : true; }
+
+ unsigned length (void) const
+ { return vec_ ? vec_->length() : 0; }
+
+ T *address (void)
+ { return vec_ ? vec_->data_ : NULL; }
+
+ const T *address (void) const
+ { return vec_ ? vec_->data_ : NULL; }
+
+ const T &operator[] (unsigned ix) const
+ { return (*vec_)[ix]; }
+
+ bool operator!=(const vec &other) const
+ { return !(*this == other); }
+
+ bool operator==(const vec &other) const
+ { return address() == other.address(); }
+
+ T &operator[] (unsigned ix)
+ { return (*vec_)[ix]; }
+
+ T &last (void)
+ { return vec_->last(); }
+
+ bool space (int nelems) const
+ { return vec_ ? vec_->space (nelems) : nelems == 0; }
+
+ bool iterate (unsigned ix, T *p) const;
+ bool iterate (unsigned ix, T **p) const;
+ vec copy (ALONE_CXX_MEM_STAT_INFO) const;
+ bool reserve (unsigned, bool = false CXX_MEM_STAT_INFO);
+ bool reserve_exact (unsigned CXX_MEM_STAT_INFO);
+ void splice (vec &);
+ void safe_splice (vec & CXX_MEM_STAT_INFO);
+ T *quick_push (const T &);
+ T *safe_push (const T &CXX_MEM_STAT_INFO);
+ T &pop (void);
+ void truncate (unsigned);
+ void safe_grow (unsigned CXX_MEM_STAT_INFO);
+ void safe_grow_cleared (unsigned CXX_MEM_STAT_INFO);
+ void quick_grow (unsigned);
+ void quick_grow_cleared (unsigned);
+ void quick_insert (unsigned, const T &);
+ void safe_insert (unsigned, const T & CXX_MEM_STAT_INFO);
+ void ordered_remove (unsigned);
+ void unordered_remove (unsigned);
+ void block_remove (unsigned, unsigned);
+ void qsort (int (*) (const void *, const void *));
+ unsigned lower_bound (T, bool (*)(const T &, const T &)) const;
+
+ template<typename T1>
+ friend void va_stack::alloc(vec<T1, va_stack, vl_ptr>&, unsigned,
+ vec<T1, va_stack, vl_embed> *);
+
+private:
+ vec<T, A, vl_embed> *vec_;
+};
+
+
+/* Empty specialization for GC allocation. This will prevent GC
+ vectors from using the vl_ptr layout. FIXME: This is needed to
+ circumvent limitations in the GTY machinery. */
template<typename T>
-template<enum vec_allocation_t A>
-vec_t<T> *
-vec_t<T>::copy (ALONE_MEM_STAT_DECL)
+class vec<T, va_gc, vl_ptr>
{
- unsigned len = VEC_length (T, this);
- vec_t<T> *new_vec = NULL;
+};
- if (len)
- {
- new_vec = reserve_exact<A> (static_cast<vec_t<T> *> (NULL),
- len PASS_MEM_STAT);
- new_vec->embedded_init (len, len);
- memcpy (new_vec->address (), vec_, sizeof (T) * len);
- }
- return new_vec;
+/* Allocate heap memory for pointer V and create the internal vector
+ with space for NELEMS elements. If NELEMS is 0, the internal
+ vector is initialized to empty. */
+
+template<typename T>
+inline void
+vec_alloc (vec<T> *&v, unsigned nelems MEM_STAT_DECL)
+{
+ v = new vec<T>;
+ v->create (nelems PASS_MEM_STAT);
}
-/* If this vector has space for RESERVE additional entries, return
- true. You usually only need to use this if you are doing your
- own vector reallocation, for instance on an embedded vector. This
- returns true in exactly the same circumstances that vec_reserve
- will. */
+/* Conditionally allocate heap memory for VEC and its internal vector. */
template<typename T>
-bool
-vec_t<T>::space (int nelems VEC_CHECK_DECL)
+inline void
+vec_check_alloc (vec<T, va_heap> *&vec, unsigned nelems MEM_STAT_DECL)
{
- VEC_ASSERT (nelems >= 0, "space", T, base);
- return prefix_.alloc_ - prefix_.num_ >= static_cast <unsigned> (nelems);
+ if (!vec)
+ vec_alloc (vec, nelems PASS_MEM_STAT);
}
-/* Ensure that the vector **VEC has at least RESERVE slots available. This
- will create additional headroom. Note this can cause **VEC to
- be reallocated. Returns true iff reallocation actually occurred. */
+/* Free the heap memory allocated by vector V and set it to NULL. */
template<typename T>
-template<enum vec_allocation_t A>
-bool
-vec_t<T>::reserve (vec_t<T> **vec, int nelems VEC_CHECK_DECL MEM_STAT_DECL)
+inline void
+vec_free (vec<T> *&v)
{
- bool extend = (*vec) ? !(*vec)->space (nelems VEC_CHECK_PASS) : nelems != 0;
+ if (v == NULL)
+ return;
- if (extend)
- *vec = reserve<A> (*vec, nelems PASS_MEM_STAT);
+ v->release ();
+ delete v;
+ v = NULL;
+}
- return extend;
+
+/* Allocate a new stack vector with space for exactly NELEMS objects.
+ If NELEMS is zero, NO vector is created.
+
+ For the stack allocator, no memory is really allocated. The vector
+ is initialized to be at address SPACE and contain NELEMS slots.
+ Memory allocation actually occurs in the expansion of VEC_alloc.
+
+ Usage notes:
+
+ * This does not allocate an instance of vec<T, A>. It allocates the
+ actual vector of elements (i.e., vec<T, A, vl_embed>) inside a
+ vec<T, A> instance.
+
+ * This allocator must always be a macro:
+
+ We support a vector which starts out with space on the stack and
+ switches to heap space when forced to reallocate. This works a
+ little differently. In the case of stack vectors, vec_alloc will
+ expand to a call to vec_alloc_1 that calls XALLOCAVAR to request
+ the initial allocation. This uses alloca to get the initial
+ space. Since alloca can not be usefully called in an inline
+ function, vec_alloc must always be a macro.
+
+ Important limitations of stack vectors:
+
+ - Only the initial allocation will be made using alloca, so pass
+ a reasonable estimate that doesn't use too much stack space;
+ don't pass zero.
+
+ - Don't return a stack-allocated vector from the function which
+ allocated it. */
+
+#define vec_stack_alloc(T,V,N) \
+ do { \
+ typedef vec<T, va_stack, vl_embed> stackv; \
+ va_stack::alloc (V, N, XALLOCAVAR (stackv, stackv::embedded_size (N)));\
+ } while (0)
+
+
+/* Return iteration condition and update PTR to point to the IX'th
+ element of this vector. Use this to iterate over the elements of a
+ vector as follows,
+
+ for (ix = 0; v.iterate(ix, &ptr); ix++)
+ continue; */
+
+template<typename T, typename A>
+inline bool
+vec<T, A, vl_ptr>::iterate (unsigned ix, T *ptr) const
+{
+ if (vec_)
+ return vec_->iterate (ix, ptr);
+ else
+ {
+ *ptr = 0;
+ return false;
+ }
}
-/* Ensure that **VEC has at least NELEMS slots available. This will not
- create additional headroom. Note this can cause VEC to be
- reallocated. Returns true iff reallocation actually occurred. */
+/* Return iteration condition and update *PTR to point to the
+ IX'th element of this vector. Use this to iterate over the
+ elements of a vector as follows,
-template<typename T>
-template<enum vec_allocation_t A>
-bool
-vec_t<T>::reserve_exact (vec_t<T> **vec, int nelems VEC_CHECK_DECL
- MEM_STAT_DECL)
+ for (ix = 0; v->iterate(ix, &ptr); ix++)
+ continue;
+
+ This variant is for vectors of objects. */
+
+template<typename T, typename A>
+inline bool
+vec<T, A, vl_ptr>::iterate (unsigned ix, T **ptr) const
{
- bool extend = (*vec) ? !(*vec)->space (nelems VEC_CHECK_PASS) : nelems != 0;
+ if (vec_)
+ return vec_->iterate (ix, ptr);
+ else
+ {
+ *ptr = 0;
+ return false;
+ }
+}
- if (extend)
- *vec = reserve_exact<A> (*vec, nelems PASS_MEM_STAT);
+/* Convenience macro for forward iteration. */
+#define FOR_EACH_VEC_ELT(V, I, P) \
+ for (I = 0; (V).iterate ((I), &(P)); ++(I))
+
+#define FOR_EACH_VEC_SAFE_ELT(V, I, P) \
+ for (I = 0; vec_safe_iterate ((V), (I), &(P)); ++(I))
+
+/* Likewise, but start from FROM rather than 0. */
+#define FOR_EACH_VEC_ELT_FROM(V, I, P, FROM) \
+ for (I = (FROM); (V).iterate ((I), &(P)); ++(I))
+
+/* Convenience macro for reverse iteration. */
+#define FOR_EACH_VEC_ELT_REVERSE(V, I, P) \
+ for (I = (V).length () - 1; \
+ (V).iterate ((I), &(P)); \
+ (I)--)
+
+#define FOR_EACH_VEC_SAFE_ELT_REVERSE(V, I, P) \
+ for (I = vec_safe_length (V) - 1; \
+ vec_safe_iterate ((V), (I), &(P)); \
+ (I)--)
+
+
+/* Return a copy of this vector. */
+
+template<typename T, typename A>
+inline vec<T, A, vl_ptr>
+vec<T, A, vl_ptr>::copy (ALONE_MEM_STAT_DECL) const
+{
+ vec<T, A, vl_ptr> new_vec = vec<T, A, vl_ptr>();
+ if (length ())
+ new_vec.vec_ = vec_->copy ();
+ return new_vec;
+}
+
+
+/* Ensure that the vector has at least RESERVE slots available (if
+ EXACT is false), or exactly RESERVE slots available (if EXACT is
+ true).
+
+ This may create additional headroom if EXACT is false.
+
+ Note that this can cause the embedded vector to be reallocated.
+ Returns true iff reallocation actually occurred. */
+
+template<typename T, typename A>
+inline bool
+vec<T, A, vl_ptr>::reserve (unsigned nelems, bool exact MEM_STAT_DECL)
+{
+ bool extend = nelems ? !space (nelems) : false;
+ if (extend)
+ A::reserve (vec_, nelems, exact PASS_MEM_STAT);
return extend;
}
+/* Ensure that this vector has exactly NELEMS slots available. This
+ will not create additional headroom. Note this can cause the
+ embedded vector to be reallocated. Returns true iff reallocation
+ actually occurred. */
+
+template<typename T, typename A>
+inline bool
+vec<T, A, vl_ptr>::reserve_exact (unsigned nelems MEM_STAT_DECL)
+{
+ return reserve (nelems, true PASS_MEM_STAT);
+}
+
+
+/* Create the internal vector and reserve NELEMS for it. This is
+ exactly like vec::reserve, but the internal vector is
+ unconditionally allocated from scratch. The old one, if it
+ existed, is lost. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::create (unsigned nelems MEM_STAT_DECL)
+{
+ vec_ = NULL;
+ if (nelems > 0)
+ reserve_exact (nelems PASS_MEM_STAT);
+}
+
+
+/* Free the memory occupied by the embedded vector. */
+
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::release (void)
+{
+ if (vec_)
+ A::release (vec_);
+}
+
+
/* Copy the elements from SRC to the end of this vector as if by memcpy.
- SRC and this vector need not be allocated with the same mechanism,
- although they most often will be. This vector is assumed to have
- sufficient headroom available. */
+ SRC and this vector must be allocated with the same memory
+ allocation mechanism. This vector is assumed to have sufficient
+ headroom available. */
-template<typename T>
-void
-vec_t<T>::splice (vec_t<T> *src VEC_CHECK_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::splice (vec<T, A, vl_ptr> &src)
{
- if (src)
- {
- unsigned len = VEC_length (T, src);
- VEC_ASSERT (VEC_length (T, this) + len <= prefix_.alloc_, "splice", T,
- base);
- memcpy (address () + VEC_length (T, this),
- src->address (),
- len * sizeof (T));
- prefix_.num_ += len;
- }
+ if (src.vec_)
+ vec_->splice (*(src.vec_));
}
-/* Copy the elements in SRC to the end of DST as if by memcpy. DST and
- SRC need not be allocated with the same mechanism, although they most
- often will be. DST need not have sufficient headroom and will be
- reallocated if needed. */
+/* Copy the elements in SRC to the end of this vector as if by memcpy.
+ SRC and this vector must be allocated with the same mechanism.
+ If there is not enough headroom in this vector, it will be reallocated
+ as needed. */
-template<typename T>
-template<enum vec_allocation_t A>
-void
-vec_t<T>::safe_splice (vec_t<T> **dst, vec_t<T> *src VEC_CHECK_DECL
- MEM_STAT_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::safe_splice (vec<T, A, vl_ptr> &src MEM_STAT_DECL)
{
- if (src)
+ if (src.length())
{
- reserve_exact<A> (dst, VEC_length (T, src) VEC_CHECK_PASS MEM_STAT_INFO);
- (*dst)->splice (src VEC_CHECK_PASS);
+ reserve_exact (src.length());
+ splice (src);
}
}
-
+
/* Push OBJ (a new element) onto the end of the vector. There must be
sufficient space in the vector. Return a pointer to the slot
where OBJ was inserted. */
-
-template<typename T>
-T *
-vec_t<T>::quick_push (const T &obj VEC_CHECK_DECL)
+template<typename T, typename A>
+inline T *
+vec<T, A, vl_ptr>::quick_push (const T &obj)
{
- VEC_ASSERT (prefix_.num_ < prefix_.alloc_, "push", T, base);
- T *slot = &vec_[prefix_.num_++];
- *slot = obj;
- return slot;
+ return vec_->quick_push (obj);
}
-/* Push a new element OBJ onto the end of VEC. Reallocates VEC, if
- needed. Return a pointer to the slot where OBJ was inserted. */
+/* Push a new element OBJ onto the end of this vector. Reallocates
+ the embedded vector, if needed. Return a pointer to the slot where
+ OBJ was inserted. */
-template<typename T>
-template<enum vec_allocation_t A>
-T *
-vec_t<T>::safe_push (vec_t<T> **vec, const T &obj VEC_CHECK_DECL MEM_STAT_DECL)
+template<typename T, typename A>
+inline T *
+vec<T, A, vl_ptr>::safe_push (const T &obj MEM_STAT_DECL)
{
- reserve<A> (vec, 1 VEC_CHECK_PASS PASS_MEM_STAT);
- return (*vec)->quick_push (obj VEC_CHECK_PASS);
+ reserve (1, false PASS_MEM_STAT);
+ return quick_push (obj);
}
/* Pop and return the last element off the end of the vector. */
-
-template<typename T>
-T &
-vec_t<T>::pop (ALONE_VEC_CHECK_DECL)
+template<typename T, typename A>
+inline T &
+vec<T, A, vl_ptr>::pop (void)
{
- VEC_ASSERT (prefix_.num_, "pop", T, base);
- return vec_[--prefix_.num_];
+ return vec_->pop ();
}
/* Set the length of the vector to LEN. The new length must be less
than or equal to the current length. This is an O(1) operation. */
-template<typename T>
-void
-vec_t<T>::truncate (unsigned size VEC_CHECK_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::truncate (unsigned size)
{
- VEC_ASSERT (prefix_.num_ >= size, "truncate", T, base);
- prefix_.num_ = size;
+ if (vec_)
+ vec_->truncate (size);
+ else
+ gcc_checking_assert (size == 0);
}
-/* Grow the vector VEC to a specific length. The LEN must be as
- long or longer than the current length. The new elements are
- uninitialized. */
+/* Grow the vector to a specific length. LEN must be as long or
+ longer than the current length. The new elements are
+ uninitialized. Reallocate the internal vector, if needed. */
-template<typename T>
-template<enum vec_allocation_t A>
-void
-vec_t<T>::safe_grow (vec_t<T> **vec, int size VEC_CHECK_DECL MEM_STAT_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::safe_grow (unsigned len MEM_STAT_DECL)
{
- VEC_ASSERT (size >= 0 && VEC_length (T, *vec) <= (unsigned)size,
- "grow", T, A);
- reserve_exact<A> (vec, size - (int)VEC_length (T, *vec)
- VEC_CHECK_PASS PASS_MEM_STAT);
- (*vec)->prefix_.num_ = size;
+ unsigned oldlen = length ();
+ gcc_checking_assert (oldlen <= len);
+ reserve_exact (len - oldlen PASS_MEM_STAT);
+ vec_->quick_grow (len);
}
-/* Grow the vector *VEC to a specific length. The LEN must be as
+/* Grow the embedded vector to a specific length. LEN must be as
long or longer than the current length. The new elements are
- initialized to zero. */
+ initialized to zero. Reallocate the internal vector, if needed. */
-template<typename T>
-template<enum vec_allocation_t A>
-void
-vec_t<T>::safe_grow_cleared (vec_t<T> **vec, int size VEC_CHECK_DECL
- MEM_STAT_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::safe_grow_cleared (unsigned len MEM_STAT_DECL)
{
- int oldsize = VEC_length (T, *vec);
- safe_grow<A> (vec, size VEC_CHECK_PASS PASS_MEM_STAT);
- memset (&((*vec)->address ()[oldsize]), 0, sizeof (T) * (size - oldsize));
+ unsigned oldlen = length ();
+ safe_grow (len PASS_MEM_STAT);
+ memset (&(address()[oldlen]), 0, sizeof (T) * (len - oldlen));
}
-/* Replace the IXth element of this vector with a new value, VAL. */
+/* Same as vec::safe_grow but without reallocation of the internal vector.
+ If the vector cannot be extended, a runtime assertion will be triggered. */
-template<typename T>
-void
-vec_t<T>::replace (unsigned ix, const T &obj VEC_CHECK_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::quick_grow (unsigned len)
{
- VEC_ASSERT (ix < prefix_.num_, "replace", T, base);
- vec_[ix] = obj;
+ gcc_checking_assert (vec_);
+ vec_->quick_grow (len);
}
-/* Insert an element, OBJ, at the IXth position of VEC. There must be
- sufficient space. */
+/* Same as vec::quick_grow_cleared but without reallocation of the
+ internal vector. If the vector cannot be extended, a runtime
+ assertion will be triggered. */
-template<typename T>
-void
-vec_t<T>::quick_insert (unsigned ix, const T &obj VEC_CHECK_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::quick_grow_cleared (unsigned len)
{
- VEC_ASSERT (prefix_.num_ < prefix_.alloc_, "insert", T, base);
- VEC_ASSERT (ix <= prefix_.num_, "insert", T, base);
- T *slot = &vec_[ix];
- memmove (slot + 1, slot, (prefix_.num_++ - ix) * sizeof (T));
- *slot = obj;
+ gcc_checking_assert (vec_);
+ vec_->quick_grow_cleared (len);
}
-/* Insert an element, OBJ, at the IXth position of VEC. Reallocate
- VEC, if necessary. */
+/* Insert an element, OBJ, at the IXth position of this vector. There
+ must be sufficient space. */
-template<typename T>
-template<enum vec_allocation_t A>
-void
-vec_t<T>::safe_insert (vec_t<T> **vec, unsigned ix, const T &obj VEC_CHECK_DECL
- MEM_STAT_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::quick_insert (unsigned ix, const T &obj)
{
- reserve<A> (vec, 1 VEC_CHECK_PASS PASS_MEM_STAT);
- (*vec)->quick_insert (ix, obj VEC_CHECK_PASS);
+ vec_->quick_insert (ix, obj);
}
-/* Remove an element from the IXth position of this vector. Ordering of
- remaining elements is preserved. This is an O(N) operation due to
- a memmove. */
+/* Insert an element, OBJ, at the IXth position of the vector.
+ Reallocate the embedded vector, if necessary. */
-template<typename T>
-void
-vec_t<T>::ordered_remove (unsigned ix VEC_CHECK_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::safe_insert (unsigned ix, const T &obj MEM_STAT_DECL)
{
- VEC_ASSERT (ix < prefix_.num_, "remove", T, base);
- T *slot = &vec_[ix];
- memmove (slot, slot + 1, (--prefix_.num_ - ix) * sizeof (T));
+ reserve (1, false PASS_MEM_STAT);
+ quick_insert (ix, obj);
}
-/* Remove an element from the IXth position of VEC. Ordering of
- remaining elements is destroyed. This is an O(1) operation. */
+/* Remove an element from the IXth position of this vector. Ordering of
+ remaining elements is preserved. This is an O(N) operation due to
+ a memmove. */
-template<typename T>
-void
-vec_t<T>::unordered_remove (unsigned ix VEC_CHECK_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::ordered_remove (unsigned ix)
{
- VEC_ASSERT (ix < prefix_.num_, "remove", T, base);
- vec_[ix] = vec_[--prefix_.num_];
+ vec_->ordered_remove (ix);
}
-/* Remove LEN elements starting at the IXth. Ordering is retained.
- This is an O(N) operation due to memmove. */
+/* Remove an element from the IXth position of this vector. Ordering
+ of remaining elements is destroyed. This is an O(1) operation. */
-template<typename T>
-void
-vec_t<T>::block_remove (unsigned ix, unsigned len VEC_CHECK_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::unordered_remove (unsigned ix)
{
- VEC_ASSERT (ix + len <= prefix_.num_, "block_remove", T, base);
- T *slot = &vec_[ix];
- prefix_.num_ -= len;
- memmove (slot, slot + len, (prefix_.num_ - ix) * sizeof (T));
+ vec_->unordered_remove (ix);
}
-/* Sort the contents of V with qsort. Use CMP as the comparison function. */
-#define VEC_qsort(T,V,CMP) \
- qsort (VEC_address (T, V), VEC_length (T, V), sizeof (T), CMP)
+/* Remove LEN elements starting at the IXth. Ordering is retained.
+ This is an O(N) operation due to memmove. */
-/* Find and return the first position in which OBJ could be inserted
- without changing the ordering of this vector. LESSTHAN is a
- function that returns true if the first argument is strictly less
- than the second. */
-
-template<typename T>
-unsigned
-vec_t<T>::lower_bound (T obj, bool (*lessthan)(const T &, const T &)) const
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::block_remove (unsigned ix, unsigned len)
{
- unsigned int len = VEC_length (T, this);
- unsigned int half, middle;
- unsigned int first = 0;
- while (len > 0)
- {
- half = len / 2;
- middle = first;
- middle += half;
- T middle_elem = (*this)[middle];
- if (lessthan (middle_elem, obj))
- {
- first = middle;
- ++first;
- len = len - half - 1;
- }
- else
- len = half;
- }
- return first;
+ vec_->block_remove (ix, len);
}
-void *vec_heap_o_reserve_1 (void *, int, size_t, size_t, bool MEM_STAT_DECL);
-void *vec_gc_o_reserve_1 (void *, int, size_t, size_t, bool MEM_STAT_DECL);
-
-/* Ensure there are at least RESERVE free slots in VEC_, growing
- exponentially. If RESERVE < 0 grow exactly, else grow
- exponentially. As a special case, if VEC_ is NULL, and RESERVE is
- 0, no vector will be created. */
+/* Sort the contents of this vector with qsort. CMP is the comparison
+ function to pass to qsort. */
-template<typename T>
-template<enum vec_allocation_t A>
-vec_t<T> *
-vec_t<T>::reserve (vec_t<T> *vec, int reserve MEM_STAT_DECL)
+template<typename T, typename A>
+inline void
+vec<T, A, vl_ptr>::qsort (int (*cmp) (const void *, const void *))
{
- void *res = NULL;
- size_t off = offsetof (vec_t<T>, vec_);
- size_t sz = sizeof (T);
-
- switch (A)
- {
- case gc:
- res = vec_gc_o_reserve_1 (vec, reserve, off, sz, false PASS_MEM_STAT);
- break;
- case heap:
- res = vec_heap_o_reserve_1 (vec, reserve, off, sz, false PASS_MEM_STAT);
- break;
- case stack:
- res = vec_stack_o_reserve (vec, reserve, off, sz PASS_MEM_STAT);
- break;
- }
-
- return static_cast <vec_t<T> *> (res);
+ if (vec_)
+ vec_->qsort (cmp);
}
-/* Ensure there are at least RESERVE free slots in VEC, growing
- exactly. If RESERVE < 0 grow exactly, else grow exponentially. As
- a special case, if VEC is NULL, and RESERVE is 0, no vector will be
- created. */
+/* Find and return the first position in which OBJ could be inserted
+ without changing the ordering of this vector. LESSTHAN is a
+ function that returns true if the first argument is strictly less
+ than the second. */
-template<typename T>
-template<enum vec_allocation_t A>
-vec_t<T> *
-vec_t<T>::reserve_exact (vec_t<T> *vec, int reserve MEM_STAT_DECL)
+template<typename T, typename A>
+inline unsigned
+vec<T, A, vl_ptr>::lower_bound (T obj, bool (*lessthan)(const T &, const T &)) const
{
- void *res = NULL;
- size_t off = sizeof (struct vec_prefix);
- size_t sz = sizeof (T);
-
- gcc_assert (offsetof (vec_t<T>, vec_) == sizeof (struct vec_prefix));
-
- switch (A)
- {
- case gc:
- res = vec_gc_o_reserve_1 (vec, reserve, off, sz, true PASS_MEM_STAT);
- break;
- case heap:
- res = vec_heap_o_reserve_1 (vec, reserve, off, sz, true PASS_MEM_STAT);
- break;
- case stack:
- res = vec_stack_o_reserve_exact (vec, reserve, off, sz PASS_MEM_STAT);
- break;
- }
-
- return static_cast <vec_t<T> *> (res);
+ return vec_ ? vec_->lower_bound (obj, lessthan) : 0;
}
-#endif /* GCC_VEC_H */
+#endif // GCC_VEC_H
diff --git a/gcc/vecir.h b/gcc/vecir.h
deleted file mode 100644
index e0d911a579e..00000000000
--- a/gcc/vecir.h
+++ /dev/null
@@ -1,57 +0,0 @@
-/* VEC types for basic types of the intermediate representations.
- Copyright (C) 2010 Free Software Foundation, Inc.
-
-This file is part of GCC.
-
-GCC is free software; you can redistribute it and/or modify it under
-the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 3, or (at your option) any later
-version.
-
-GCC is distributed in the hope that it will be useful, but WITHOUT ANY
-WARRANTY; without even the implied warranty of MERCHANTABILITY or
-FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-for more details.
-
-You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING3. If not see
-<http://www.gnu.org/licenses/>. */
-
-#ifndef GCC_VECIR_H
-#define GCC_VECIR_H
-
-#ifndef GCC_CORETYPES_H
-#error "vecir.h must be included after coretypes.h"
-#endif
-
-/* A varray of trees. */
-DEF_VEC_P(tree);
-DEF_VEC_ALLOC_P(tree,gc);
-DEF_VEC_ALLOC_P(tree,heap);
-
-/* A varray of gimple statements. */
-DEF_VEC_P(gimple);
-DEF_VEC_ALLOC_P(gimple,heap);
-DEF_VEC_ALLOC_P(gimple,gc);
-
-/* A varray of pointers to gimple statements. */
-typedef gimple *gimple_p;
-DEF_VEC_P(gimple_p);
-DEF_VEC_ALLOC_P(gimple_p,heap);
-
-/* A varray gimple statement sequences. */
-DEF_VEC_P(gimple_seq);
-DEF_VEC_ALLOC_P(gimple_seq,gc);
-DEF_VEC_ALLOC_P(gimple_seq,heap);
-
-/* A varray of RTX objects. */
-DEF_VEC_P(rtx);
-DEF_VEC_ALLOC_P(rtx,heap);
-DEF_VEC_ALLOC_P(rtx,gc);
-
-/* A varray of call graph nodes. */
-typedef struct cgraph_node *cgraph_node_p;
-DEF_VEC_P (cgraph_node_p);
-DEF_VEC_ALLOC_P (cgraph_node_p, heap);
-
-#endif /* GCC_VECIR_H */
diff --git a/gcc/vecprim.h b/gcc/vecprim.h
deleted file mode 100644
index e9ccc52bcb0..00000000000
--- a/gcc/vecprim.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/* VEC types for primitive types
- Copyright (C) 2006, 2007, 2008 Free Software Foundation, Inc.
-
-This file is part of GCC.
-
-GCC is free software; you can redistribute it and/or modify it under
-the terms of the GNU General Public License as published by the Free
-Software Foundation; either version 3, or (at your option) any later
-version.
-
-GCC is distributed in the hope that it will be useful, but WITHOUT ANY
-WARRANTY; without even the implied warranty of MERCHANTABILITY or
-FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
-for more details.
-
-You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING3. If not see
-<http://www.gnu.org/licenses/>. */
-
-#ifndef GCC_VECPRIM_H
-#define GCC_VECPRIM_H
-
-DEF_VEC_I(char);
-DEF_VEC_ALLOC_I(char,heap);
-
-typedef unsigned char uchar;
-DEF_VEC_I(uchar);
-DEF_VEC_ALLOC_I(uchar,heap);
-DEF_VEC_ALLOC_I(uchar,gc);
-
-DEF_VEC_I(int);
-DEF_VEC_ALLOC_I(int,heap);
-
-DEF_VEC_I(unsigned);
-DEF_VEC_ALLOC_I(unsigned,heap);
-
-#endif /* GCC_VECPRIM_H */
diff --git a/gcc/vmsdbgout.c b/gcc/vmsdbgout.c
index 5472ac54ee9..cb8479ff24a 100644
--- a/gcc/vmsdbgout.c
+++ b/gcc/vmsdbgout.c
@@ -101,11 +101,9 @@ static unsigned int file_info_table_in_use;
#define FILE_TABLE_INCREMENT 64
typedef char *char_p;
-DEF_VEC_P(char_p);
-DEF_VEC_ALLOC_P(char_p,heap);
-static VEC(char_p,heap) *funcnam_table;
-static VEC(unsigned,heap) *funcnum_table;
+static vec<char_p> funcnam_table;
+static vec<unsigned> funcnum_table;
#define FUNC_TABLE_INITIAL 256
/* Local pointer to the name of the main input file. Initialized in
@@ -641,7 +639,7 @@ write_rtnbeg (int rtnnum, int dosizeonly)
DST_ROUTINE_BEGIN rtnbeg;
DST_PROLOG prolog;
- rtnname = VEC_index (char_p, funcnam_table, rtnnum);
+ rtnname = funcnam_table[rtnnum];
rtnnamelen = strlen (rtnname);
rtnentryname = concat (rtnname, "..en", NULL);
@@ -714,7 +712,7 @@ write_rtnbeg (int rtnnum, int dosizeonly)
ASM_GENERATE_INTERNAL_LABEL
(label, FUNC_PROLOG_LABEL,
- VEC_index (unsigned, funcnum_table, rtnnum));
+ funcnum_table[rtnnum]);
totsize += write_debug_addr (label, "prolog breakpoint addr",
dosizeonly);
}
@@ -748,10 +746,10 @@ write_rtnend (int rtnnum, int dosizeonly)
ASM_GENERATE_INTERNAL_LABEL
(label1, FUNC_BEGIN_LABEL,
- VEC_index (unsigned, funcnum_table, rtnnum));
+ funcnum_table[rtnnum]);
ASM_GENERATE_INTERNAL_LABEL
(label2, FUNC_END_LABEL,
- VEC_index (unsigned, funcnum_table, rtnnum));
+ funcnum_table[rtnnum]);
totsize += write_debug_delta4 (label2, label1, "routine size", dosizeonly);
return totsize;
@@ -1264,9 +1262,8 @@ vmsdbgout_begin_function (tree decl)
(*dwarf2_debug_hooks.begin_function) (decl);
/* Add the new entry to the end of the function name table. */
- VEC_safe_push (char_p, heap, funcnam_table, xstrdup (name));
- VEC_safe_push (unsigned, heap, funcnum_table,
- current_function_funcdef_no);
+ funcnam_table.safe_push (xstrdup (name));
+ funcnum_table.safe_push (current_function_funcdef_no);
}
static char fullname_buff [4096];
@@ -1447,8 +1444,8 @@ vmsdbgout_init (const char *filename)
/* Skip the first entry - file numbers begin at 1. */
file_info_table_in_use = 1;
- funcnam_table = VEC_alloc (char_p, heap, FUNC_TABLE_INITIAL);
- funcnum_table = VEC_alloc (unsigned, heap, FUNC_TABLE_INITIAL);
+ funcnam_table.create (FUNC_TABLE_INITIAL);
+ funcnum_table.create (FUNC_TABLE_INITIAL);
/* Allocate the initial hunk of the line_info_table. */
line_info_table = XCNEWVEC (dst_line_info_entry, LINE_INFO_TABLE_INCREMENT);
@@ -1564,7 +1561,7 @@ vmsdbgout_finish (const char *filename ATTRIBUTE_UNUSED)
ASM_OUTPUT_ALIGN (asm_out_file, 0);
totsize = write_modbeg (1);
- FOR_EACH_VEC_ELT (unsigned, funcnum_table, i, ifunc)
+ FOR_EACH_VEC_ELT (funcnum_table, i, ifunc)
{
totsize += write_rtnbeg (i, 1);
totsize += write_rtnend (i, 1);
@@ -1572,7 +1569,7 @@ vmsdbgout_finish (const char *filename ATTRIBUTE_UNUSED)
totsize += write_pclines (1);
write_modbeg (0);
- FOR_EACH_VEC_ELT (unsigned, funcnum_table, i, ifunc)
+ FOR_EACH_VEC_ELT (funcnum_table, i, ifunc)
{
write_rtnbeg (i, 0);
write_rtnend (i, 0);