[8/8] aarch64: update for move of FINAL and OVERRIDE
Commit Message
gcc/ChangeLog:
* config/aarch64/aarch64-sve-builtins-base.cc: Replace uses of
"FINAL" and "OVERRIDE" with "final" and "override".
* config/aarch64/aarch64-sve-builtins-functions.h: Likewise.
* config/aarch64/aarch64-sve-builtins-shapes.cc: Likewise.
* config/aarch64/aarch64-sve-builtins-sve2.cc: Likewise.
Signed-off-by: David Malcolm <dmalcolm@redhat.com>
---
.../aarch64/aarch64-sve-builtins-base.cc | 260 +++++------
.../aarch64/aarch64-sve-builtins-functions.h | 48 +-
.../aarch64/aarch64-sve-builtins-shapes.cc | 416 +++++++++---------
.../aarch64/aarch64-sve-builtins-sve2.cc | 44 +-
4 files changed, 384 insertions(+), 384 deletions(-)
Comments
David Malcolm via Gcc-patches <gcc-patches@gcc.gnu.org> writes:
> gcc/ChangeLog:
> * config/aarch64/aarch64-sve-builtins-base.cc: Replace uses of
> "FINAL" and "OVERRIDE" with "final" and "override".
> * config/aarch64/aarch64-sve-builtins-functions.h: Likewise.
> * config/aarch64/aarch64-sve-builtins-shapes.cc: Likewise.
> * config/aarch64/aarch64-sve-builtins-sve2.cc: Likewise.
OK, thanks.
It was a pity that this work was ahead of the move to C++11 by
just 1 release :-)
Richard
> Signed-off-by: David Malcolm <dmalcolm@redhat.com>
> ---
> .../aarch64/aarch64-sve-builtins-base.cc | 260 +++++------
> .../aarch64/aarch64-sve-builtins-functions.h | 48 +-
> .../aarch64/aarch64-sve-builtins-shapes.cc | 416 +++++++++---------
> .../aarch64/aarch64-sve-builtins-sve2.cc | 44 +-
> 4 files changed, 384 insertions(+), 384 deletions(-)
>
> diff --git a/gcc/config/aarch64/aarch64-sve-builtins-base.cc b/gcc/config/aarch64/aarch64-sve-builtins-base.cc
> index c24c0548724..bee410929bd 100644
> --- a/gcc/config/aarch64/aarch64-sve-builtins-base.cc
> +++ b/gcc/config/aarch64/aarch64-sve-builtins-base.cc
> @@ -148,7 +148,7 @@ class svabd_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* The integer operations are represented as the subtraction of the
> minimum from the maximum, with the signedness of the instruction
> @@ -179,7 +179,7 @@ public:
> CONSTEXPR svac_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.add_ptrue_hint (0, e.gp_mode (0));
> insn_code icode = code_for_aarch64_pred_fac (m_unspec, e.vector_mode (0));
> @@ -194,7 +194,7 @@ class svadda_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Put the predicate last, as required by mask_fold_left_plus_optab. */
> e.rotate_inputs_left (0, 3);
> @@ -211,7 +211,7 @@ public:
> CONSTEXPR svadr_bhwd_impl (unsigned int shift) : m_shift (shift) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = GET_MODE (e.args[0]);
> if (m_shift == 0)
> @@ -231,7 +231,7 @@ class svbic_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert svbic of a constant into svand of its inverse. */
> if (CONST_INT_P (e.args[2]))
> @@ -261,7 +261,7 @@ public:
> CONSTEXPR svbrk_binary_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (code_for_aarch64_brk (m_unspec));
> }
> @@ -277,7 +277,7 @@ public:
> CONSTEXPR svbrk_unary_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_cond_insn (code_for_aarch64_brk (m_unspec));
> }
> @@ -290,7 +290,7 @@ class svcadd_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -311,7 +311,7 @@ public:
> CONSTEXPR svclast_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Match the fold_extract_optab order. */
> std::swap (e.args[0], e.args[1]);
> @@ -332,7 +332,7 @@ class svcmla_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -355,7 +355,7 @@ class svcmla_lane_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -384,7 +384,7 @@ public:
> : m_code (code), m_unspec_for_fp (unspec_for_fp) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree pg = gimple_call_arg (f.call, 0);
> tree rhs1 = gimple_call_arg (f.call, 1);
> @@ -406,7 +406,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
>
> @@ -442,7 +442,7 @@ public:
> m_unspec_for_uint (unspec_for_uint) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> bool unsigned_p = e.type_suffix (0).unsigned_p;
> @@ -480,7 +480,7 @@ class svcmpuo_impl : public quiet<function_base>
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.add_ptrue_hint (0, e.gp_mode (0));
> return e.use_exact_insn (code_for_aarch64_pred_fcmuo (e.vector_mode (0)));
> @@ -491,7 +491,7 @@ class svcnot_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> if (e.pred == PRED_x)
> @@ -514,7 +514,7 @@ public:
> CONSTEXPR svcnt_bhwd_impl (machine_mode ref_mode) : m_ref_mode (ref_mode) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree count = build_int_cstu (TREE_TYPE (f.lhs),
> GET_MODE_NUNITS (m_ref_mode));
> @@ -522,7 +522,7 @@ public:
> }
>
> rtx
> - expand (function_expander &) const OVERRIDE
> + expand (function_expander &) const override
> {
> return gen_int_mode (GET_MODE_NUNITS (m_ref_mode), DImode);
> }
> @@ -539,7 +539,7 @@ public:
> : svcnt_bhwd_impl (ref_mode) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree pattern_arg = gimple_call_arg (f.call, 0);
> aarch64_svpattern pattern = (aarch64_svpattern) tree_to_shwi (pattern_arg);
> @@ -562,7 +562,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> unsigned int elements_per_vq = 128 / GET_MODE_UNIT_BITSIZE (m_ref_mode);
> e.args.quick_push (gen_int_mode (elements_per_vq, DImode));
> @@ -575,7 +575,7 @@ class svcntp_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> e.add_ptrue_hint (0, mode);
> @@ -591,7 +591,7 @@ public:
> : quiet<multi_vector_function> (vectors_per_tuple) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> unsigned int nargs = gimple_call_num_args (f.call);
> tree lhs_type = TREE_TYPE (f.lhs);
> @@ -621,7 +621,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> rtx lhs_tuple = e.get_nonoverlapping_reg_target ();
>
> @@ -643,7 +643,7 @@ class svcvt_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode0 = e.vector_mode (0);
> machine_mode mode1 = e.vector_mode (1);
> @@ -706,7 +706,7 @@ class svdot_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* In the optab, the multiplication operands come before the accumulator
> operand. The optab is keyed off the multiplication mode. */
> @@ -729,7 +729,7 @@ public:
> unspec_for_float) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Use the same ordering as the dot_prod_optab, with the
> accumulator last. */
> @@ -744,7 +744,7 @@ class svdup_impl : public quiet<function_base>
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree vec_type = TREE_TYPE (f.lhs);
> tree rhs = gimple_call_arg (f.call, f.pred == PRED_none ? 0 : 1);
> @@ -784,7 +784,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> if (e.pred == PRED_none || e.pred == PRED_x)
> /* There's no benefit to using predicated instructions for _x here. */
> @@ -812,7 +812,7 @@ class svdup_lane_impl : public quiet<function_base>
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* The native DUP lane has an index range of 64 bytes. */
> machine_mode mode = e.vector_mode (0);
> @@ -829,7 +829,7 @@ class svdupq_impl : public quiet<function_base>
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree vec_type = TREE_TYPE (f.lhs);
> unsigned int nargs = gimple_call_num_args (f.call);
> @@ -851,7 +851,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> unsigned int elements_per_vq = e.args.length ();
> @@ -900,7 +900,7 @@ class svdupq_lane_impl : public quiet<function_base>
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> rtx index = e.args[1];
> @@ -964,7 +964,7 @@ public:
> : m_from_mode (from_mode) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> if (e.type_suffix (0).unsigned_p)
> {
> @@ -1006,7 +1006,7 @@ public:
> : quiet<multi_vector_function> (vectors_per_tuple) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* Fold into a normal gimple component access. */
> tree rhs_tuple = gimple_call_arg (f.call, 0);
> @@ -1020,7 +1020,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Fold the access into a subreg rvalue. */
> return simplify_gen_subreg (e.vector_mode (0), e.args[0],
> @@ -1033,7 +1033,7 @@ class svindex_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (e.direct_optab_handler (vec_series_optab));
> }
> @@ -1043,7 +1043,7 @@ class svinsr_impl : public quiet<function_base>
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> gcall *new_call = gimple_build_call_internal (IFN_VEC_SHL_INSERT, 2,
> gimple_call_arg (f.call, 0),
> @@ -1053,7 +1053,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = direct_optab_handler (vec_shl_insert_optab,
> e.vector_mode (0));
> @@ -1068,7 +1068,7 @@ public:
> CONSTEXPR svlast_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (code_for_extract (m_unspec, e.vector_mode (0)));
> }
> @@ -1081,13 +1081,13 @@ class svld1_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY;
> }
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree vectype = f.vector_type (0);
>
> @@ -1105,7 +1105,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = convert_optab_handler (maskload_optab,
> e.vector_mode (0), e.gp_mode (0));
> @@ -1121,7 +1121,7 @@ public:
> : extending_load (memory_type) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = code_for_aarch64_load (UNSPEC_LD1_SVE, extend_rtx_code (),
> e.vector_mode (0),
> @@ -1134,13 +1134,13 @@ class svld1_gather_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1);
> /* Put the predicate last, as required by mask_gather_load_optab. */
> @@ -1161,7 +1161,7 @@ public:
> : extending_load (memory_type) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1);
> /* Put the predicate last, since the extending gathers use the same
> @@ -1180,13 +1180,13 @@ class load_replicate : public function_base
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY;
> }
>
> tree
> - memory_scalar_type (const function_instance &fi) const OVERRIDE
> + memory_scalar_type (const function_instance &fi) const override
> {
> return fi.scalar_type (0);
> }
> @@ -1196,13 +1196,13 @@ class svld1rq_impl : public load_replicate
> {
> public:
> machine_mode
> - memory_vector_mode (const function_instance &fi) const OVERRIDE
> + memory_vector_mode (const function_instance &fi) const override
> {
> return aarch64_vq_mode (GET_MODE_INNER (fi.vector_mode (0))).require ();
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = code_for_aarch64_sve_ld1rq (e.vector_mode (0));
> return e.use_contiguous_load_insn (icode);
> @@ -1213,13 +1213,13 @@ class svld1ro_impl : public load_replicate
> {
> public:
> machine_mode
> - memory_vector_mode (const function_instance &) const OVERRIDE
> + memory_vector_mode (const function_instance &) const override
> {
> return OImode;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = code_for_aarch64_sve_ld1ro (e.vector_mode (0));
> return e.use_contiguous_load_insn (icode);
> @@ -1234,13 +1234,13 @@ public:
> : full_width_access (vectors_per_tuple) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY;
> }
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree tuple_type = TREE_TYPE (f.lhs);
> tree vectype = f.vector_type (0);
> @@ -1275,7 +1275,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode tuple_mode = TYPE_MODE (TREE_TYPE (e.call_expr));
> insn_code icode = convert_optab_handler (vec_mask_load_lanes_optab,
> @@ -1288,13 +1288,13 @@ class svldff1_gather_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY | CP_READ_FFR | CP_WRITE_FFR;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* See the block comment in aarch64-sve.md for details about the
> FFR handling. */
> @@ -1317,7 +1317,7 @@ public:
> : extending_load (memory_type) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* See the block comment in aarch64-sve.md for details about the
> FFR handling. */
> @@ -1340,13 +1340,13 @@ class svldnt1_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = code_for_aarch64_ldnt1 (e.vector_mode (0));
> return e.use_contiguous_load_insn (icode);
> @@ -1360,13 +1360,13 @@ public:
> CONSTEXPR svldxf1_impl (int unspec) : m_unspec (unspec) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY | CP_READ_FFR | CP_WRITE_FFR;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* See the block comment in aarch64-sve.md for details about the
> FFR handling. */
> @@ -1388,13 +1388,13 @@ public:
> : extending_load (memory_type), m_unspec (unspec) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY | CP_READ_FFR | CP_WRITE_FFR;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* See the block comment in aarch64-sve.md for details about the
> FFR handling. */
> @@ -1414,7 +1414,7 @@ class svlen_impl : public quiet<function_base>
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* The argument only exists for its type. */
> tree rhs_type = TREE_TYPE (gimple_call_arg (f.call, 0));
> @@ -1424,7 +1424,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* The argument only exists for its type. */
> return gen_int_mode (GET_MODE_NUNITS (e.vector_mode (0)), DImode);
> @@ -1435,7 +1435,7 @@ class svmad_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return expand_mad (e);
> }
> @@ -1445,7 +1445,7 @@ class svmla_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Put the accumulator at the end (argument 3), but keep it as the
> merge input for _m functions. */
> @@ -1458,7 +1458,7 @@ class svmla_lane_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> if (e.type_suffix (0).integer_p)
> {
> @@ -1473,7 +1473,7 @@ class svmls_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Put the accumulator at the end (argument 3), but keep it as the
> merge input for _m functions. */
> @@ -1486,7 +1486,7 @@ class svmov_impl : public function_base
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> return gimple_build_assign (f.lhs, BIT_AND_EXPR,
> gimple_call_arg (f.call, 0),
> @@ -1494,7 +1494,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* The canonical form for the assembler alias "MOV Pa.B, Pb/Z, Pc.B"
> is "AND Pa.B, Pb/Z, Pc.B, Pc.B". */
> @@ -1508,7 +1508,7 @@ class svmls_lane_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> if (e.type_suffix (0).integer_p)
> {
> @@ -1523,7 +1523,7 @@ class svmmla_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode;
> if (e.type_suffix (0).integer_p)
> @@ -1543,7 +1543,7 @@ class svmsb_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return expand_msb (e);
> }
> @@ -1553,7 +1553,7 @@ class svnand_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> gcc_assert (e.pred == PRED_z);
> return e.use_exact_insn (CODE_FOR_aarch64_pred_nandvnx16bi_z);
> @@ -1564,7 +1564,7 @@ class svnor_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> gcc_assert (e.pred == PRED_z);
> return e.use_exact_insn (CODE_FOR_aarch64_pred_norvnx16bi_z);
> @@ -1577,7 +1577,7 @@ public:
> CONSTEXPR svnot_impl () : rtx_code_function (NOT, NOT, -1) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> if (e.type_suffix_ids[0] == TYPE_SUFFIX_b)
> {
> @@ -1595,7 +1595,7 @@ class svorn_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> gcc_assert (e.pred == PRED_z);
> return e.use_exact_insn (CODE_FOR_aarch64_pred_ornvnx16bi_z);
> @@ -1606,13 +1606,13 @@ class svpfalse_impl : public function_base
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> return f.fold_to_pfalse ();
> }
>
> rtx
> - expand (function_expander &) const OVERRIDE
> + expand (function_expander &) const override
> {
> return CONST0_RTX (VNx16BImode);
> }
> @@ -1625,7 +1625,7 @@ public:
> CONSTEXPR svpfirst_svpnext_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> e.add_ptrue_hint (0, mode);
> @@ -1643,13 +1643,13 @@ public:
> CONSTEXPR svprf_bhwd_impl (machine_mode mode) : m_mode (mode) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_PREFETCH_MEMORY;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_prefetch_operands ();
> insn_code icode = code_for_aarch64_sve_prefetch (m_mode);
> @@ -1667,19 +1667,19 @@ public:
> CONSTEXPR svprf_bhwd_gather_impl (machine_mode mode) : m_mode (mode) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_PREFETCH_MEMORY;
> }
>
> machine_mode
> - memory_vector_mode (const function_instance &) const OVERRIDE
> + memory_vector_mode (const function_instance &) const override
> {
> return m_mode;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_prefetch_operands ();
> e.prepare_gather_address_operands (1);
> @@ -1705,7 +1705,7 @@ public:
> CONSTEXPR svptest_impl (rtx_code compare) : m_compare (compare) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* See whether GP is an exact ptrue for some predicate mode;
> i.e. whether converting the GP to that mode will not drop
> @@ -1751,13 +1751,13 @@ class svptrue_impl : public function_base
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> return f.fold_to_ptrue ();
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return aarch64_ptrue_all (e.type_suffix (0).element_bytes);
> }
> @@ -1767,7 +1767,7 @@ class svptrue_pat_impl : public function_base
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree pattern_arg = gimple_call_arg (f.call, 0);
> aarch64_svpattern pattern = (aarch64_svpattern) tree_to_shwi (pattern_arg);
> @@ -1788,7 +1788,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* In rtl, the predicate is represented as the constant:
>
> @@ -1816,7 +1816,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Treat non-_pat functions in the same way as _pat functions with
> an SV_ALL argument. */
> @@ -1877,7 +1877,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> rtx_code code = (e.type_suffix (0).unsigned_p
> ? m_code_for_uint
> @@ -1908,13 +1908,13 @@ class svrdffr_impl : public function_base
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_FFR;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* See the block comment in aarch64-sve.md for details about the
> FFR handling. */
> @@ -1931,7 +1931,7 @@ class svreinterpret_impl : public quiet<function_base>
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* Punt to rtl if the effect of the reinterpret on registers does not
> conform to GCC's endianness model. */
> @@ -1947,7 +1947,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> return e.use_exact_insn (code_for_aarch64_sve_reinterpret (mode));
> @@ -1958,7 +1958,7 @@ class svrev_impl : public permute
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* Punt for now on _b16 and wider; we'd need more complex evpc logic
> to rerecognize the result. */
> @@ -1974,7 +1974,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (code_for_aarch64_sve_rev (e.vector_mode (0)));
> }
> @@ -1984,7 +1984,7 @@ class svsel_impl : public quiet<function_base>
> {
> public:
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* svsel corresponds exactly to VEC_COND_EXPR. */
> gimple_seq stmts = NULL;
> @@ -1996,7 +1996,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* svsel (cond, truev, falsev) is vcond_mask (truev, falsev, cond). */
> e.rotate_inputs_left (0, 3);
> @@ -2015,7 +2015,7 @@ public:
> : quiet<multi_vector_function> (vectors_per_tuple) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree rhs_tuple = gimple_call_arg (f.call, 0);
> tree index = gimple_call_arg (f.call, 1);
> @@ -2042,7 +2042,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> rtx rhs_tuple = e.args[0];
> unsigned int index = INTVAL (e.args[1]);
> @@ -2065,13 +2065,13 @@ class svsetffr_impl : public function_base
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_FFR;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.args.quick_push (CONSTM1_RTX (VNx16BImode));
> return e.use_exact_insn (CODE_FOR_aarch64_wrffr);
> @@ -2082,13 +2082,13 @@ class svst1_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_MEMORY;
> }
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree vectype = f.vector_type (0);
>
> @@ -2105,7 +2105,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = convert_optab_handler (maskstore_optab,
> e.vector_mode (0), e.gp_mode (0));
> @@ -2117,13 +2117,13 @@ class svst1_scatter_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_MEMORY;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1);
> /* Put the predicate last, as required by mask_scatter_store_optab. */
> @@ -2144,7 +2144,7 @@ public:
> : truncating_store (to_mode) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1);
> /* Put the predicate last, since the truncating scatters use the same
> @@ -2164,7 +2164,7 @@ public:
> : truncating_store (to_mode) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = code_for_aarch64_store_trunc (e.memory_vector_mode (),
> e.vector_mode (0));
> @@ -2180,13 +2180,13 @@ public:
> : full_width_access (vectors_per_tuple) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_MEMORY;
> }
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> tree vectype = f.vector_type (0);
>
> @@ -2208,7 +2208,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode tuple_mode = GET_MODE (e.args.last ());
> insn_code icode = convert_optab_handler (vec_mask_store_lanes_optab,
> @@ -2221,13 +2221,13 @@ class svstnt1_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_MEMORY;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = code_for_aarch64_stnt1 (e.vector_mode (0));
> return e.use_contiguous_store_insn (icode);
> @@ -2241,7 +2241,7 @@ public:
> : rtx_code_function (MINUS, MINUS, UNSPEC_COND_FSUB) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Canonicalize subtractions of constants to additions. */
> machine_mode mode = e.vector_mode (0);
> @@ -2256,7 +2256,7 @@ class svtbl_impl : public permute
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (code_for_aarch64_sve_tbl (e.vector_mode (0)));
> }
> @@ -2270,7 +2270,7 @@ public:
> : binary_permute (base ? UNSPEC_TRN2 : UNSPEC_TRN1), m_base (base) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* svtrn1: { 0, nelts, 2, nelts + 2, 4, nelts + 4, ... }
> svtrn2: as for svtrn1, but with 1 added to each index. */
> @@ -2296,7 +2296,7 @@ public:
> : quiet<multi_vector_function> (vectors_per_tuple) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> rtx target = e.get_reg_target ();
> emit_clobber (copy_rtx (target));
> @@ -2311,7 +2311,7 @@ public:
> CONSTEXPR svunpk_impl (bool high_p) : m_high_p (high_p) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* Don't fold the predicate ops, since every bit of the svbool_t
> result is significant. */
> @@ -2326,7 +2326,7 @@ public:
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = GET_MODE (e.args[0]);
> unsigned int unpacku = m_high_p ? UNSPEC_UNPACKUHI : UNSPEC_UNPACKULO;
> @@ -2353,7 +2353,7 @@ public:
> CONSTEXPR svusdot_impl (bool su) : m_su (su) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* The implementation of the ACLE function svsudot (for the non-lane
> version) is through the USDOT instruction but with the second and third
> @@ -2382,7 +2382,7 @@ public:
> : binary_permute (base ? UNSPEC_UZP2 : UNSPEC_UZP1), m_base (base) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* svuzp1: { 0, 2, 4, 6, ... }
> svuzp2: { 1, 3, 5, 7, ... }. */
> @@ -2456,7 +2456,7 @@ public:
> }
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> if (f.type_suffix (1).unsigned_p)
> return fold_type<poly_uint64> (f);
> @@ -2472,13 +2472,13 @@ class svwrffr_impl : public function_base
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_FFR;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (CODE_FOR_aarch64_wrffr);
> }
> @@ -2492,7 +2492,7 @@ public:
> : binary_permute (base ? UNSPEC_ZIP2 : UNSPEC_ZIP1), m_base (base) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> /* svzip1: { 0, nelts, 1, nelts + 1, 2, nelts + 2, ... }
> svzip2: as for svzip1, but with nelts / 2 added to each index. */
> diff --git a/gcc/config/aarch64/aarch64-sve-builtins-functions.h b/gcc/config/aarch64/aarch64-sve-builtins-functions.h
> index 9d346b6ffa7..b8a86e33d5c 100644
> --- a/gcc/config/aarch64/aarch64-sve-builtins-functions.h
> +++ b/gcc/config/aarch64/aarch64-sve-builtins-functions.h
> @@ -44,7 +44,7 @@ public:
> : T (t1, t2, t3) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return 0;
> }
> @@ -59,7 +59,7 @@ public:
> : m_vectors_per_tuple (vectors_per_tuple) {}
>
> unsigned int
> - vectors_per_tuple () const OVERRIDE
> + vectors_per_tuple () const override
> {
> return m_vectors_per_tuple;
> }
> @@ -78,13 +78,13 @@ public:
> : multi_vector_function (vectors_per_tuple) {}
>
> tree
> - memory_scalar_type (const function_instance &fi) const OVERRIDE
> + memory_scalar_type (const function_instance &fi) const override
> {
> return fi.scalar_type (0);
> }
>
> machine_mode
> - memory_vector_mode (const function_instance &fi) const OVERRIDE
> + memory_vector_mode (const function_instance &fi) const override
> {
> machine_mode mode = fi.vector_mode (0);
> if (m_vectors_per_tuple != 1)
> @@ -103,19 +103,19 @@ public:
> : m_memory_type (memory_type) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY;
> }
>
> tree
> - memory_scalar_type (const function_instance &) const OVERRIDE
> + memory_scalar_type (const function_instance &) const override
> {
> return scalar_types[type_suffixes[m_memory_type].vector_type];
> }
>
> machine_mode
> - memory_vector_mode (const function_instance &fi) const OVERRIDE
> + memory_vector_mode (const function_instance &fi) const override
> {
> machine_mode mem_mode = type_suffixes[m_memory_type].vector_mode;
> machine_mode reg_mode = fi.vector_mode (0);
> @@ -145,13 +145,13 @@ public:
> CONSTEXPR truncating_store (scalar_int_mode to_mode) : m_to_mode (to_mode) {}
>
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_MEMORY;
> }
>
> tree
> - memory_scalar_type (const function_instance &fi) const OVERRIDE
> + memory_scalar_type (const function_instance &fi) const override
> {
> /* In truncating stores, the signedness of the memory element is defined
> to be the same as the signedness of the vector element. The signedness
> @@ -163,7 +163,7 @@ public:
> }
>
> machine_mode
> - memory_vector_mode (const function_instance &fi) const OVERRIDE
> + memory_vector_mode (const function_instance &fi) const override
> {
> poly_uint64 nunits = GET_MODE_NUNITS (fi.vector_mode (0));
> return aarch64_sve_data_mode (m_to_mode, nunits).require ();
> @@ -205,7 +205,7 @@ public:
> : rtx_code_function_base (code_for_sint, code_for_uint, unspec_for_fp) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.map_to_rtx_codes (m_code_for_sint, m_code_for_uint,
> m_unspec_for_fp);
> @@ -225,7 +225,7 @@ public:
> : rtx_code_function_base (code_for_sint, code_for_uint, unspec_for_fp) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Rotate the inputs into their normal order, but continue to make _m
> functions merge with what was originally the first vector argument. */
> @@ -279,7 +279,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.map_to_unspecs (m_unspec_for_sint, m_unspec_for_uint,
> m_unspec_for_fp);
> @@ -301,7 +301,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Rotate the inputs into their normal order, but continue to make _m
> functions merge with what was originally the first vector argument. */
> @@ -329,7 +329,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (CODE (unspec_for (e), e.vector_mode (0)));
> }
> @@ -386,7 +386,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> int unspec = unspec_for (e);
> insn_code icode;
> @@ -421,7 +421,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> int unspec = unspec_for (e);
> insn_code icode;
> @@ -451,7 +451,7 @@ class code_for_mode_function : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (CODE_FOR_MODE (e.vector_mode (N)));
> }
> @@ -477,7 +477,7 @@ public:
> CONSTEXPR fixed_insn_function (insn_code code) : m_code (code) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (m_code);
> }
> @@ -519,7 +519,7 @@ public:
> CONSTEXPR binary_permute (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> insn_code icode = code_for_aarch64_sve (m_unspec, e.vector_mode (0));
> return e.use_exact_insn (icode);
> @@ -547,7 +547,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> int unspec = (!e.type_suffix (0).integer_p ? m_unspec_for_fp
> @@ -576,7 +576,7 @@ public:
> : m_code (code), m_wide_unspec (wide_unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> machine_mode elem_mode = GET_MODE_INNER (mode);
> @@ -610,7 +610,7 @@ public:
> CONSTEXPR unary_count (rtx_code code) : m_code (code) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* The md patterns treat the operand as an integer. */
> machine_mode mode = aarch64_sve_int_mode (e.vector_mode (0));
> @@ -636,7 +636,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Suffix 0 determines the predicate mode, suffix 1 determines the
> scalar mode and signedness. */
> diff --git a/gcc/config/aarch64/aarch64-sve-builtins-shapes.cc b/gcc/config/aarch64/aarch64-sve-builtins-shapes.cc
> index f57f926980d..8e26bd8a60f 100644
> --- a/gcc/config/aarch64/aarch64-sve-builtins-shapes.cc
> +++ b/gcc/config/aarch64/aarch64-sve-builtins-shapes.cc
> @@ -454,13 +454,13 @@ long_type_suffix (function_resolver &r, type_suffix_index type)
> struct nonoverloaded_base : public function_shape
> {
> bool
> - explicit_type_suffix_p (unsigned int) const OVERRIDE
> + explicit_type_suffix_p (unsigned int) const override
> {
> return true;
> }
>
> tree
> - resolve (function_resolver &) const OVERRIDE
> + resolve (function_resolver &) const override
> {
> gcc_unreachable ();
> }
> @@ -472,7 +472,7 @@ template<unsigned int EXPLICIT_MASK>
> struct overloaded_base : public function_shape
> {
> bool
> - explicit_type_suffix_p (unsigned int i) const OVERRIDE
> + explicit_type_suffix_p (unsigned int i) const override
> {
> return (EXPLICIT_MASK >> i) & 1;
> }
> @@ -484,7 +484,7 @@ struct adr_base : public overloaded_base<0>
> /* The function takes two arguments: a vector base and a vector displacement
> (either an index or an offset). Resolve based on them both. */
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> mode_suffix_index mode;
> @@ -503,7 +503,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
> struct binary_imm_narrowb_base : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
> @@ -515,7 +515,7 @@ struct binary_imm_narrowb_base : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (1, 1);
> }
> @@ -528,7 +528,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
> struct binary_imm_narrowt_base : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
> @@ -540,7 +540,7 @@ struct binary_imm_narrowt_base : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -560,14 +560,14 @@ struct binary_imm_narrowt_base : public overloaded_base<0>
> struct binary_imm_long_base : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> build_all (b, "v0,vh0,su64", group, MODE_n);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type, result_type;
> @@ -623,7 +623,7 @@ struct inc_dec_base : public overloaded_base<0>
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_range (m_pat_p ? 2 : 1, 1, 16);
> }
> @@ -637,7 +637,7 @@ struct load_contiguous_base : public overloaded_base<0>
> /* Resolve a call based purely on a pointer argument. The other arguments
> are a governing predicate and (for MODE_vnum) a vnum offset. */
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> bool vnum_p = r.mode_suffix_id == MODE_vnum;
> gcc_assert (r.mode_suffix_id == MODE_none || vnum_p);
> @@ -658,7 +658,7 @@ struct load_contiguous_base : public overloaded_base<0>
> struct load_gather_sv_base : public overloaded_base<0>
> {
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> mode_suffix_index mode;
> @@ -686,7 +686,7 @@ struct load_ext_gather_base : public overloaded_base<1>
> The function has an explicit type suffix that determines the type
> of the loaded data. */
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> /* No resolution is needed for a vector base with no displacement;
> there's a one-to-one mapping between short and long names. */
> @@ -713,7 +713,7 @@ struct load_ext_gather_base : public overloaded_base<1>
> struct mmla_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> /* svmmla is distributed over several extensions. Allow the common
> @@ -729,7 +729,7 @@ struct mmla_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -769,7 +769,7 @@ struct prefetch_gather_base : public overloaded_base<0>
> The prefetch operation is the final argument. This is purely a
> mode-based resolution; there are no type suffixes. */
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> bool has_displacement_p = r.displacement_units () != UNITS_none;
>
> @@ -791,7 +791,7 @@ template<typename BASE, unsigned int N>
> struct shift_right_imm_narrow_wrapper : public BASE
> {
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int bits = c.type_suffix (0).element_bits / 2;
> return c.require_immediate_range (N, 1, bits);
> @@ -811,7 +811,7 @@ struct store_scatter_base : public overloaded_base<0>
> The stored data is the final argument, and it determines the
> type suffix. */
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> bool has_displacement_p = r.displacement_units () != UNITS_none;
>
> @@ -832,14 +832,14 @@ struct store_scatter_base : public overloaded_base<0>
> struct ternary_shift_imm_base : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> build_all (b, "v0,v0,v0,su64", group, MODE_n);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2, 1);
> }
> @@ -862,7 +862,7 @@ template<unsigned int MODIFIER,
> struct ternary_resize2_opt_n_base : public overloaded_base<0>
> {
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -884,7 +884,7 @@ template<unsigned int MODIFIER,
> struct ternary_resize2_base : public overloaded_base<0>
> {
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -908,7 +908,7 @@ template<unsigned int MODIFIER,
> struct ternary_resize2_lane_base : public overloaded_base<0>
> {
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -932,14 +932,14 @@ struct ternary_bfloat_lane_base
> : public ternary_resize2_lane_base<16, TYPE_bfloat, TYPE_bfloat>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vB,vB,su64", group, MODE_none);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_lane_index (3, N);
> }
> @@ -954,7 +954,7 @@ struct ternary_qq_lane_base
> TYPE_CLASS2, TYPE_CLASS3>
> {
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_lane_index (3, 4);
> }
> @@ -966,7 +966,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
> struct unary_narrowb_base : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
> @@ -978,7 +978,7 @@ struct unary_narrowb_base : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_unary (CLASS, r.HALF_SIZE);
> }
> @@ -991,7 +991,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
> struct unary_narrowt_base : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
> @@ -1003,7 +1003,7 @@ struct unary_narrowt_base : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1023,7 +1023,7 @@ struct unary_narrowt_base : public overloaded_base<0>
> struct adr_index_def : public adr_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_index);
> build_all (b, "b,b,d", group, MODE_u32base_s32index);
> @@ -1041,7 +1041,7 @@ SHAPE (adr_index)
> struct adr_offset_def : public adr_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_offset);
> build_all (b, "b,b,d", group, MODE_u32base_s32offset);
> @@ -1058,14 +1058,14 @@ SHAPE (adr_offset)
> struct binary_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2);
> }
> @@ -1080,7 +1080,7 @@ SHAPE (binary)
> struct binary_int_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vs0", group, MODE_none);
> @@ -1088,7 +1088,7 @@ struct binary_int_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1108,20 +1108,20 @@ SHAPE (binary_int_opt_n)
> struct binary_lane_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_lane_index (2);
> }
> @@ -1135,14 +1135,14 @@ SHAPE (binary_lane)
> struct binary_long_lane_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,vh0,vh0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type, result_type;
> @@ -1160,7 +1160,7 @@ struct binary_long_lane_def : public overloaded_base<0>
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_lane_index (2);
> }
> @@ -1172,7 +1172,7 @@ SHAPE (binary_long_lane)
> struct binary_long_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,vh0,vh0", group, MODE_none);
> @@ -1180,7 +1180,7 @@ struct binary_long_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type, result_type;
> @@ -1202,14 +1202,14 @@ SHAPE (binary_long_opt_n)
> struct binary_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> build_all (b, "v0,v0,s0", group, MODE_n);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1231,7 +1231,7 @@ SHAPE (binary_n)
> struct binary_narrowb_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vh0,v0,v0", group, MODE_none);
> @@ -1239,7 +1239,7 @@ struct binary_narrowb_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform_opt_n (2);
> }
> @@ -1253,7 +1253,7 @@ SHAPE (binary_narrowb_opt_n)
> struct binary_narrowt_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vh0,vh0,v0,v0", group, MODE_none);
> @@ -1261,7 +1261,7 @@ struct binary_narrowt_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1284,7 +1284,7 @@ SHAPE (binary_narrowt_opt_n)
> struct binary_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0", group, MODE_none);
> @@ -1298,7 +1298,7 @@ struct binary_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform_opt_n (2);
> }
> @@ -1309,7 +1309,7 @@ SHAPE (binary_opt_n)
> struct binary_pred_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "v0,v0,v0", group, MODE_none);
> }
> @@ -1322,20 +1322,20 @@ SHAPE (binary_pred)
> struct binary_rotate_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_either_or (2, 90, 270);
> }
> @@ -1349,7 +1349,7 @@ SHAPE (binary_rotate)
> struct binary_scalar_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "v0,s0,s0", group, MODE_none);
> }
> @@ -1362,14 +1362,14 @@ SHAPE (binary_scalar)
> struct binary_to_uint_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vu0,v0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2);
> }
> @@ -1383,14 +1383,14 @@ SHAPE (binary_to_uint)
> struct binary_uint_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vu0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1411,14 +1411,14 @@ SHAPE (binary_uint)
> struct binary_uint_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,su0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1440,7 +1440,7 @@ SHAPE (binary_uint_n)
> struct binary_uint_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vu0", group, MODE_none);
> @@ -1448,7 +1448,7 @@ struct binary_uint_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1468,14 +1468,14 @@ SHAPE (binary_uint_opt_n)
> struct binary_uint64_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1497,7 +1497,7 @@ SHAPE (binary_uint64_n)
> struct binary_uint64_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vu64", group, MODE_none);
> @@ -1505,7 +1505,7 @@ struct binary_uint64_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1522,14 +1522,14 @@ SHAPE (binary_uint64_opt_n)
> struct binary_wide_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vh0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1549,7 +1549,7 @@ SHAPE (binary_wide)
> struct binary_wide_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vh0", group, MODE_none);
> @@ -1557,7 +1557,7 @@ struct binary_wide_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1576,7 +1576,7 @@ SHAPE (binary_wide_opt_n)
> struct clast_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0", group, MODE_none);
> @@ -1584,7 +1584,7 @@ struct clast_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> if (!r.check_gp_argument (2, i, nargs)
> @@ -1615,14 +1615,14 @@ SHAPE (clast)
> struct compare_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vp,v0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2);
> }
> @@ -1636,7 +1636,7 @@ SHAPE (compare)
> struct compare_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vp,v0,v0", group, MODE_none);
> @@ -1644,7 +1644,7 @@ struct compare_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform_opt_n (2);
> }
> @@ -1655,14 +1655,14 @@ SHAPE (compare_opt_n)
> struct compare_ptr_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vp,al,al", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1684,14 +1684,14 @@ SHAPE (compare_ptr)
> struct compare_scalar_def : public overloaded_base<1>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vp,s1,s1", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1714,7 +1714,7 @@ SHAPE (compare_scalar)
> struct compare_wide_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vp,v0,vw0", group, MODE_none);
> @@ -1722,7 +1722,7 @@ struct compare_wide_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1739,7 +1739,7 @@ SHAPE (compare_wide_opt_n)
> struct count_inherent_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "su64", group, MODE_none);
> }
> @@ -1750,7 +1750,7 @@ SHAPE (count_inherent)
> struct count_pat_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "su64,epattern", group, MODE_none);
> }
> @@ -1761,7 +1761,7 @@ SHAPE (count_pat)
> struct count_pred_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "su64,vp", group, MODE_none);
> }
> @@ -1772,14 +1772,14 @@ SHAPE (count_pred)
> struct count_vector_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "su64,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (1);
> }
> @@ -1792,14 +1792,14 @@ SHAPE (count_vector)
> struct create_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "t0,v0*t", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (r.vectors_per_tuple ());
> }
> @@ -1813,7 +1813,7 @@ SHAPE (create)
> struct dupq_def : public overloaded_base<1>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> /* The "_n" suffix is optional; the full name has it, but the short
> name doesn't. */
> @@ -1821,7 +1821,7 @@ struct dupq_def : public overloaded_base<1>
> }
>
> tree
> - resolve (function_resolver &) const OVERRIDE
> + resolve (function_resolver &) const override
> {
> /* The short forms just make "_n" implicit, so no resolution is needed. */
> gcc_unreachable ();
> @@ -1836,20 +1836,20 @@ SHAPE (dupq)
> struct ext_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int bytes = c.type_suffix (0).element_bytes;
> return c.require_immediate_range (2, 0, 256 / bytes - 1);
> @@ -1861,14 +1861,14 @@ SHAPE (ext)
> struct fold_left_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "s0,s0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1889,14 +1889,14 @@ SHAPE (fold_left)
> struct get_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,t0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1909,7 +1909,7 @@ struct get_def : public overloaded_base<0>
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int nvectors = c.vectors_per_tuple ();
> return c.require_immediate_range (1, 0, nvectors - 1);
> @@ -1927,7 +1927,7 @@ struct inc_dec_def : public inc_dec_base
> CONSTEXPR inc_dec_def () : inc_dec_base (false) {}
>
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> /* These functions are unusual in that the type suffixes for
> @@ -1952,7 +1952,7 @@ struct inc_dec_pat_def : public inc_dec_base
> CONSTEXPR inc_dec_pat_def () : inc_dec_base (true) {}
>
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> /* These functions are unusual in that the type suffixes for
> @@ -1971,14 +1971,14 @@ SHAPE (inc_dec_pat)
> struct inc_dec_pred_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vp", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -1998,14 +1998,14 @@ SHAPE (inc_dec_pred)
> struct inc_dec_pred_scalar_def : public overloaded_base<2>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> build_all (b, "s0,s0,vp", group, MODE_n);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -2023,7 +2023,7 @@ SHAPE (inc_dec_pred_scalar)
> struct inherent_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "t0", group, MODE_none);
> }
> @@ -2034,7 +2034,7 @@ SHAPE (inherent)
> struct inherent_b_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> /* The "_b" suffix is optional; the full name has it, but the short
> name doesn't. */
> @@ -2042,7 +2042,7 @@ struct inherent_b_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &) const OVERRIDE
> + resolve (function_resolver &) const override
> {
> /* The short forms just make "_b" implicit, so no resolution is needed. */
> gcc_unreachable ();
> @@ -2055,7 +2055,7 @@ SHAPE (inherent_b)
> struct load_def : public load_contiguous_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> b.add_overloaded_functions (group, MODE_vnum);
> @@ -2072,7 +2072,7 @@ SHAPE (load)
> struct load_ext_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "t0,al", group, MODE_none);
> build_all (b, "t0,al,ss64", group, MODE_vnum);
> @@ -2092,7 +2092,7 @@ SHAPE (load_ext)
> struct load_ext_gather_index_def : public load_ext_gather_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_index);
> build_sv_index (b, "t0,al,d", group);
> @@ -2112,7 +2112,7 @@ SHAPE (load_ext_gather_index)
> struct load_ext_gather_index_restricted_def : public load_ext_gather_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_index);
> build_sv_index64 (b, "t0,al,d", group);
> @@ -2136,7 +2136,7 @@ SHAPE (load_ext_gather_index_restricted)
> struct load_ext_gather_offset_def : public load_ext_gather_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_offset);
> build_sv_offset (b, "t0,al,d", group);
> @@ -2161,7 +2161,7 @@ SHAPE (load_ext_gather_offset)
> struct load_ext_gather_offset_restricted_def : public load_ext_gather_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_offset);
> build_sv_uint_offset (b, "t0,al,d", group);
> @@ -2183,7 +2183,7 @@ SHAPE (load_ext_gather_offset_restricted)
> struct load_gather_sv_def : public load_gather_sv_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_index);
> b.add_overloaded_functions (group, MODE_offset);
> @@ -2205,7 +2205,7 @@ SHAPE (load_gather_sv)
> struct load_gather_sv_restricted_def : public load_gather_sv_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_index);
> b.add_overloaded_functions (group, MODE_offset);
> @@ -2226,7 +2226,7 @@ SHAPE (load_gather_sv_restricted)
> struct load_gather_vs_def : public overloaded_base<1>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> /* The base vector mode is optional; the full name has it but the
> short name doesn't. There is no ambiguity with SHAPE_load_gather_sv
> @@ -2237,7 +2237,7 @@ struct load_gather_vs_def : public overloaded_base<1>
> }
>
> tree
> - resolve (function_resolver &) const OVERRIDE
> + resolve (function_resolver &) const override
> {
> /* The short name just makes the base vector mode implicit;
> no resolution is needed. */
> @@ -2252,7 +2252,7 @@ SHAPE (load_gather_vs)
> struct load_replicate_def : public load_contiguous_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "t0,al", group, MODE_none);
> @@ -2264,7 +2264,7 @@ SHAPE (load_replicate)
> struct pattern_pred_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "vp,epattern", group, MODE_none);
> }
> @@ -2276,7 +2276,7 @@ SHAPE (pattern_pred)
> struct prefetch_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "_,ap,eprfop", group, MODE_none);
> build_all (b, "_,ap,ss64,eprfop", group, MODE_vnum);
> @@ -2297,7 +2297,7 @@ SHAPE (prefetch)
> struct prefetch_gather_index_def : public prefetch_gather_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> b.add_overloaded_functions (group, MODE_index);
> @@ -2321,7 +2321,7 @@ SHAPE (prefetch_gather_index)
> struct prefetch_gather_offset_def : public prefetch_gather_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> b.add_overloaded_functions (group, MODE_offset);
> @@ -2336,7 +2336,7 @@ SHAPE (prefetch_gather_offset)
> struct ptest_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "sp,vp", group, MODE_none);
> }
> @@ -2347,7 +2347,7 @@ SHAPE (ptest)
> struct rdffr_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "vp", group, MODE_none);
> }
> @@ -2358,14 +2358,14 @@ SHAPE (rdffr)
> struct reduction_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "s0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (1);
> }
> @@ -2381,14 +2381,14 @@ SHAPE (reduction)
> struct reduction_wide_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "sw0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (1);
> }
> @@ -2402,14 +2402,14 @@ SHAPE (reduction_wide)
> struct set_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "t0,t0,su64,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -2423,7 +2423,7 @@ struct set_def : public overloaded_base<0>
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int nvectors = c.vectors_per_tuple ();
> return c.require_immediate_range (1, 0, nvectors - 1);
> @@ -2435,7 +2435,7 @@ SHAPE (set)
> struct setffr_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "_", group, MODE_none);
> }
> @@ -2449,20 +2449,20 @@ SHAPE (setffr)
> struct shift_left_imm_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> build_all (b, "v0,v0,su64", group, MODE_n);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (1, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int bits = c.type_suffix (0).element_bits;
> return c.require_immediate_range (1, 0, bits - 1);
> @@ -2477,7 +2477,7 @@ SHAPE (shift_left_imm)
> struct shift_left_imm_long_def : public binary_imm_long_base
> {
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int bits = c.type_suffix (0).element_bits / 2;
> return c.require_immediate_range (1, 0, bits - 1);
> @@ -2492,7 +2492,7 @@ SHAPE (shift_left_imm_long)
> struct shift_left_imm_to_uint_def : public shift_left_imm_def
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> build_all (b, "vu0,v0,su64", group, MODE_n);
> @@ -2507,20 +2507,20 @@ SHAPE (shift_left_imm_to_uint)
> struct shift_right_imm_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_n);
> build_all (b, "v0,v0,su64", group, MODE_n);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (1, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int bits = c.type_suffix (0).element_bits;
> return c.require_immediate_range (1, 1, bits);
> @@ -2572,7 +2572,7 @@ SHAPE (shift_right_imm_narrowt_to_uint)
> struct store_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> b.add_overloaded_functions (group, MODE_vnum);
> @@ -2581,7 +2581,7 @@ struct store_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> bool vnum_p = r.mode_suffix_id == MODE_vnum;
> gcc_assert (r.mode_suffix_id == MODE_none || vnum_p);
> @@ -2612,7 +2612,7 @@ SHAPE (store)
> struct store_scatter_index_def : public store_scatter_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_index);
> build_sv_index (b, "_,as,d,t0", group);
> @@ -2632,7 +2632,7 @@ SHAPE (store_scatter_index)
> struct store_scatter_index_restricted_def : public store_scatter_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_index);
> build_sv_index64 (b, "_,as,d,t0", group);
> @@ -2657,7 +2657,7 @@ SHAPE (store_scatter_index_restricted)
> struct store_scatter_offset_def : public store_scatter_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> b.add_overloaded_functions (group, MODE_offset);
> @@ -2683,7 +2683,7 @@ SHAPE (store_scatter_offset)
> struct store_scatter_offset_restricted_def : public store_scatter_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> b.add_overloaded_functions (group, MODE_offset);
> @@ -2698,14 +2698,14 @@ SHAPE (store_scatter_offset_restricted)
> struct tbl_tuple_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,t0,vu0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -2724,7 +2724,7 @@ struct ternary_bfloat_def
> : public ternary_resize2_base<16, TYPE_bfloat, TYPE_bfloat>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vB,vB", group, MODE_none);
> @@ -2752,7 +2752,7 @@ struct ternary_bfloat_opt_n_def
> : public ternary_resize2_opt_n_base<16, TYPE_bfloat, TYPE_bfloat>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vB,vB", group, MODE_none);
> @@ -2770,7 +2770,7 @@ struct ternary_intq_uintq_lane_def
> : public ternary_qq_lane_base<TYPE_signed, TYPE_unsigned>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vqs0,vqu0,su64", group, MODE_none);
> @@ -2786,7 +2786,7 @@ struct ternary_intq_uintq_opt_n_def
> TYPE_signed, TYPE_unsigned>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vqs0,vqu0", group, MODE_none);
> @@ -2802,20 +2802,20 @@ SHAPE (ternary_intq_uintq_opt_n)
> struct ternary_lane_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,v0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (3, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_lane_index (3);
> }
> @@ -2830,20 +2830,20 @@ SHAPE (ternary_lane)
> struct ternary_lane_rotate_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,v0,su64,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (3, 2);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return (c.require_immediate_lane_index (3, 2)
> && c.require_immediate_one_of (4, 0, 90, 180, 270));
> @@ -2859,14 +2859,14 @@ struct ternary_long_lane_def
> : public ternary_resize2_lane_base<function_resolver::HALF_SIZE>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vh0,vh0,su64", group, MODE_none);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_lane_index (3);
> }
> @@ -2883,7 +2883,7 @@ struct ternary_long_opt_n_def
> : public ternary_resize2_opt_n_base<function_resolver::HALF_SIZE>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vh0,vh0", group, MODE_none);
> @@ -2900,7 +2900,7 @@ SHAPE (ternary_long_opt_n)
> struct ternary_opt_n_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,v0", group, MODE_none);
> @@ -2908,7 +2908,7 @@ struct ternary_opt_n_def : public overloaded_base<0>
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform_opt_n (3);
> }
> @@ -2922,7 +2922,7 @@ SHAPE (ternary_opt_n)
> struct ternary_qq_lane_def : public ternary_qq_lane_base<>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vq0,vq0,su64", group, MODE_none);
> @@ -2938,14 +2938,14 @@ SHAPE (ternary_qq_lane)
> struct ternary_qq_lane_rotate_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vq0,vq0,su64,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -2963,7 +2963,7 @@ struct ternary_qq_lane_rotate_def : public overloaded_base<0>
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return (c.require_immediate_lane_index (3, 4)
> && c.require_immediate_one_of (4, 0, 90, 180, 270));
> @@ -2981,7 +2981,7 @@ struct ternary_qq_opt_n_def
> : public ternary_resize2_opt_n_base<function_resolver::QUARTER_SIZE>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vq0,vq0", group, MODE_none);
> @@ -2998,14 +2998,14 @@ SHAPE (ternary_qq_opt_n)
> struct ternary_qq_rotate_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vq0,vq0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -3022,7 +3022,7 @@ struct ternary_qq_rotate_def : public overloaded_base<0>
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_one_of (3, 0, 90, 180, 270);
> }
> @@ -3036,20 +3036,20 @@ SHAPE (ternary_qq_rotate)
> struct ternary_rotate_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,v0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (3, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_one_of (3, 0, 90, 180, 270);
> }
> @@ -3063,7 +3063,7 @@ SHAPE (ternary_rotate)
> struct ternary_shift_left_imm_def : public ternary_shift_imm_base
> {
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int bits = c.type_suffix (0).element_bits;
> return c.require_immediate_range (2, 0, bits - 1);
> @@ -3078,7 +3078,7 @@ SHAPE (ternary_shift_left_imm)
> struct ternary_shift_right_imm_def : public ternary_shift_imm_base
> {
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> unsigned int bits = c.type_suffix (0).element_bits;
> return c.require_immediate_range (2, 1, bits);
> @@ -3090,14 +3090,14 @@ SHAPE (ternary_shift_right_imm)
> struct ternary_uint_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,vu0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -3119,7 +3119,7 @@ struct ternary_uintq_intq_def
> TYPE_unsigned, TYPE_signed>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vqu0,vqs0", group, MODE_none);
> @@ -3136,7 +3136,7 @@ struct ternary_uintq_intq_lane_def
> : public ternary_qq_lane_base<TYPE_unsigned, TYPE_signed>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vqu0,vqs0,su64", group, MODE_none);
> @@ -3152,7 +3152,7 @@ struct ternary_uintq_intq_opt_n_def
> TYPE_unsigned, TYPE_signed>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,vqu0,vqs0", group, MODE_none);
> @@ -3168,20 +3168,20 @@ SHAPE (ternary_uintq_intq_opt_n)
> struct tmad_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0,v0,su64", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_uniform (2, 1);
> }
>
> bool
> - check (function_checker &c) const OVERRIDE
> + check (function_checker &c) const override
> {
> return c.require_immediate_range (2, 0, 7);
> }
> @@ -3195,14 +3195,14 @@ SHAPE (tmad)
> struct unary_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_unary ();
> }
> @@ -3216,14 +3216,14 @@ SHAPE (unary)
> struct unary_convert_def : public overloaded_base<1>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v1", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_unary (r.type_suffix (0).tclass,
> r.type_suffix (0).element_bits);
> @@ -3239,14 +3239,14 @@ SHAPE (unary_convert)
> struct unary_convert_narrowt_def : public overloaded_base<1>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,v1", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_unary (r.type_suffix (0).tclass,
> r.type_suffix (0).element_bits, true);
> @@ -3258,14 +3258,14 @@ SHAPE (unary_convert_narrowt)
> struct unary_long_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,vh0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type, result_type;
> @@ -3286,7 +3286,7 @@ SHAPE (unary_long)
> struct unary_n_def : public overloaded_base<1>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> /* The "_n" suffix is optional; the full name has it, but the short
> name doesn't. */
> @@ -3294,7 +3294,7 @@ struct unary_n_def : public overloaded_base<1>
> }
>
> tree
> - resolve (function_resolver &) const OVERRIDE
> + resolve (function_resolver &) const override
> {
> /* The short forms just make "_n" implicit, so no resolution is needed. */
> gcc_unreachable ();
> @@ -3322,7 +3322,7 @@ SHAPE (unary_narrowt_to_uint)
> struct unary_pred_def : public nonoverloaded_base
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> build_all (b, "v0,v0", group, MODE_none);
> }
> @@ -3336,14 +3336,14 @@ SHAPE (unary_pred)
> struct unary_to_int_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vs0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_unary (TYPE_signed);
> }
> @@ -3357,14 +3357,14 @@ SHAPE (unary_to_int)
> struct unary_to_uint_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "vu0,v0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> return r.resolve_unary (TYPE_unsigned);
> }
> @@ -3378,14 +3378,14 @@ SHAPE (unary_to_uint)
> struct unary_uint_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,vu0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> @@ -3414,14 +3414,14 @@ SHAPE (unary_uint)
> struct unary_widen_def : public overloaded_base<0>
> {
> void
> - build (function_builder &b, const function_group_info &group) const OVERRIDE
> + build (function_builder &b, const function_group_info &group) const override
> {
> b.add_overloaded_functions (group, MODE_none);
> build_all (b, "v0,vh0", group, MODE_none);
> }
>
> tree
> - resolve (function_resolver &r) const OVERRIDE
> + resolve (function_resolver &r) const override
> {
> unsigned int i, nargs;
> type_suffix_index type;
> diff --git a/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc b/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc
> index e066f096dad..c0104371a22 100644
> --- a/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc
> +++ b/gcc/config/aarch64/aarch64-sve-builtins-sve2.cc
> @@ -82,7 +82,7 @@ class svaba_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> rtx_code max_code = e.type_suffix (0).unsigned_p ? UMAX : SMAX;
> machine_mode mode = e.vector_mode (0);
> @@ -94,7 +94,7 @@ class svcdot_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -107,7 +107,7 @@ class svcdot_lane_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -120,13 +120,13 @@ class svldnt1_gather_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_READ_MEMORY;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1, false);
> machine_mode mem_mode = e.memory_vector_mode ();
> @@ -142,7 +142,7 @@ public:
> : extending_load (memory_type) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1, false);
> /* Add a constant predicate for the extension rtx. */
> @@ -162,7 +162,7 @@ public:
> CONSTEXPR svmatch_svnmatch_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* These are UNSPEC_PRED_Z operations and so need a hint operand. */
> e.add_ptrue_hint (0, e.gp_mode (0));
> @@ -185,7 +185,7 @@ public:
> {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.args.quick_push (const0_rtx);
> return e.map_to_unspecs (m_unspec_for_sint, m_unspec_for_uint,
> @@ -197,7 +197,7 @@ class svqcadd_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -213,7 +213,7 @@ class svqrdcmlah_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -226,7 +226,7 @@ class svqrdcmlah_lane_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> /* Convert the rotation amount into a specific unspec. */
> int rot = INTVAL (e.args.pop ());
> @@ -242,7 +242,7 @@ public:
> : unspec_based_function (UNSPEC_SQRSHL, UNSPEC_UQRSHL, -1) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
> {
> @@ -276,7 +276,7 @@ public:
> : unspec_based_function (UNSPEC_SQSHL, UNSPEC_UQSHL, -1) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
> {
> @@ -312,7 +312,7 @@ public:
> : unspec_based_function (UNSPEC_SRSHL, UNSPEC_URSHL, -1) {}
>
> gimple *
> - fold (gimple_folder &f) const OVERRIDE
> + fold (gimple_folder &f) const override
> {
> if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
> {
> @@ -349,7 +349,7 @@ class svsqadd_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> if (e.pred == PRED_x
> @@ -363,7 +363,7 @@ class svsra_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> rtx_code shift_code = e.type_suffix (0).unsigned_p ? LSHIFTRT : ASHIFTRT;
> machine_mode mode = e.vector_mode (0);
> @@ -375,13 +375,13 @@ class svstnt1_scatter_impl : public full_width_access
> {
> public:
> unsigned int
> - call_properties (const function_instance &) const OVERRIDE
> + call_properties (const function_instance &) const override
> {
> return CP_WRITE_MEMORY;
> }
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1, false);
> machine_mode mem_mode = e.memory_vector_mode ();
> @@ -397,7 +397,7 @@ public:
> : truncating_store (to_mode) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> e.prepare_gather_address_operands (1, false);
> insn_code icode = code_for_aarch64_scatter_stnt (e.vector_mode (0),
> @@ -412,7 +412,7 @@ public:
> CONSTEXPR svtbl2_impl () : quiet<multi_vector_function> (2) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> return e.use_exact_insn (code_for_aarch64_sve2_tbl2 (e.vector_mode (0)));
> }
> @@ -422,7 +422,7 @@ class svuqadd_impl : public function_base
> {
> public:
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> machine_mode mode = e.vector_mode (0);
> if (e.pred == PRED_x
> @@ -440,7 +440,7 @@ public:
> CONSTEXPR svwhilerw_svwhilewr_impl (int unspec) : m_unspec (unspec) {}
>
> rtx
> - expand (function_expander &e) const OVERRIDE
> + expand (function_expander &e) const override
> {
> for (unsigned int i = 0; i < 2; ++i)
> e.args[i] = e.convert_to_pmode (e.args[i]);
@@ -148,7 +148,7 @@ class svabd_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* The integer operations are represented as the subtraction of the
minimum from the maximum, with the signedness of the instruction
@@ -179,7 +179,7 @@ public:
CONSTEXPR svac_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.add_ptrue_hint (0, e.gp_mode (0));
insn_code icode = code_for_aarch64_pred_fac (m_unspec, e.vector_mode (0));
@@ -194,7 +194,7 @@ class svadda_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Put the predicate last, as required by mask_fold_left_plus_optab. */
e.rotate_inputs_left (0, 3);
@@ -211,7 +211,7 @@ public:
CONSTEXPR svadr_bhwd_impl (unsigned int shift) : m_shift (shift) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = GET_MODE (e.args[0]);
if (m_shift == 0)
@@ -231,7 +231,7 @@ class svbic_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert svbic of a constant into svand of its inverse. */
if (CONST_INT_P (e.args[2]))
@@ -261,7 +261,7 @@ public:
CONSTEXPR svbrk_binary_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (code_for_aarch64_brk (m_unspec));
}
@@ -277,7 +277,7 @@ public:
CONSTEXPR svbrk_unary_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_cond_insn (code_for_aarch64_brk (m_unspec));
}
@@ -290,7 +290,7 @@ class svcadd_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -311,7 +311,7 @@ public:
CONSTEXPR svclast_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Match the fold_extract_optab order. */
std::swap (e.args[0], e.args[1]);
@@ -332,7 +332,7 @@ class svcmla_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -355,7 +355,7 @@ class svcmla_lane_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -384,7 +384,7 @@ public:
: m_code (code), m_unspec_for_fp (unspec_for_fp) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree pg = gimple_call_arg (f.call, 0);
tree rhs1 = gimple_call_arg (f.call, 1);
@@ -406,7 +406,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
@@ -442,7 +442,7 @@ public:
m_unspec_for_uint (unspec_for_uint) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
bool unsigned_p = e.type_suffix (0).unsigned_p;
@@ -480,7 +480,7 @@ class svcmpuo_impl : public quiet<function_base>
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.add_ptrue_hint (0, e.gp_mode (0));
return e.use_exact_insn (code_for_aarch64_pred_fcmuo (e.vector_mode (0)));
@@ -491,7 +491,7 @@ class svcnot_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
if (e.pred == PRED_x)
@@ -514,7 +514,7 @@ public:
CONSTEXPR svcnt_bhwd_impl (machine_mode ref_mode) : m_ref_mode (ref_mode) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree count = build_int_cstu (TREE_TYPE (f.lhs),
GET_MODE_NUNITS (m_ref_mode));
@@ -522,7 +522,7 @@ public:
}
rtx
- expand (function_expander &) const OVERRIDE
+ expand (function_expander &) const override
{
return gen_int_mode (GET_MODE_NUNITS (m_ref_mode), DImode);
}
@@ -539,7 +539,7 @@ public:
: svcnt_bhwd_impl (ref_mode) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree pattern_arg = gimple_call_arg (f.call, 0);
aarch64_svpattern pattern = (aarch64_svpattern) tree_to_shwi (pattern_arg);
@@ -562,7 +562,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
unsigned int elements_per_vq = 128 / GET_MODE_UNIT_BITSIZE (m_ref_mode);
e.args.quick_push (gen_int_mode (elements_per_vq, DImode));
@@ -575,7 +575,7 @@ class svcntp_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
e.add_ptrue_hint (0, mode);
@@ -591,7 +591,7 @@ public:
: quiet<multi_vector_function> (vectors_per_tuple) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
unsigned int nargs = gimple_call_num_args (f.call);
tree lhs_type = TREE_TYPE (f.lhs);
@@ -621,7 +621,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
rtx lhs_tuple = e.get_nonoverlapping_reg_target ();
@@ -643,7 +643,7 @@ class svcvt_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode0 = e.vector_mode (0);
machine_mode mode1 = e.vector_mode (1);
@@ -706,7 +706,7 @@ class svdot_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* In the optab, the multiplication operands come before the accumulator
operand. The optab is keyed off the multiplication mode. */
@@ -729,7 +729,7 @@ public:
unspec_for_float) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Use the same ordering as the dot_prod_optab, with the
accumulator last. */
@@ -744,7 +744,7 @@ class svdup_impl : public quiet<function_base>
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree vec_type = TREE_TYPE (f.lhs);
tree rhs = gimple_call_arg (f.call, f.pred == PRED_none ? 0 : 1);
@@ -784,7 +784,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
if (e.pred == PRED_none || e.pred == PRED_x)
/* There's no benefit to using predicated instructions for _x here. */
@@ -812,7 +812,7 @@ class svdup_lane_impl : public quiet<function_base>
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* The native DUP lane has an index range of 64 bytes. */
machine_mode mode = e.vector_mode (0);
@@ -829,7 +829,7 @@ class svdupq_impl : public quiet<function_base>
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree vec_type = TREE_TYPE (f.lhs);
unsigned int nargs = gimple_call_num_args (f.call);
@@ -851,7 +851,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
unsigned int elements_per_vq = e.args.length ();
@@ -900,7 +900,7 @@ class svdupq_lane_impl : public quiet<function_base>
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
rtx index = e.args[1];
@@ -964,7 +964,7 @@ public:
: m_from_mode (from_mode) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
if (e.type_suffix (0).unsigned_p)
{
@@ -1006,7 +1006,7 @@ public:
: quiet<multi_vector_function> (vectors_per_tuple) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* Fold into a normal gimple component access. */
tree rhs_tuple = gimple_call_arg (f.call, 0);
@@ -1020,7 +1020,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Fold the access into a subreg rvalue. */
return simplify_gen_subreg (e.vector_mode (0), e.args[0],
@@ -1033,7 +1033,7 @@ class svindex_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (e.direct_optab_handler (vec_series_optab));
}
@@ -1043,7 +1043,7 @@ class svinsr_impl : public quiet<function_base>
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
gcall *new_call = gimple_build_call_internal (IFN_VEC_SHL_INSERT, 2,
gimple_call_arg (f.call, 0),
@@ -1053,7 +1053,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = direct_optab_handler (vec_shl_insert_optab,
e.vector_mode (0));
@@ -1068,7 +1068,7 @@ public:
CONSTEXPR svlast_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (code_for_extract (m_unspec, e.vector_mode (0)));
}
@@ -1081,13 +1081,13 @@ class svld1_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY;
}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree vectype = f.vector_type (0);
@@ -1105,7 +1105,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = convert_optab_handler (maskload_optab,
e.vector_mode (0), e.gp_mode (0));
@@ -1121,7 +1121,7 @@ public:
: extending_load (memory_type) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = code_for_aarch64_load (UNSPEC_LD1_SVE, extend_rtx_code (),
e.vector_mode (0),
@@ -1134,13 +1134,13 @@ class svld1_gather_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1);
/* Put the predicate last, as required by mask_gather_load_optab. */
@@ -1161,7 +1161,7 @@ public:
: extending_load (memory_type) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1);
/* Put the predicate last, since the extending gathers use the same
@@ -1180,13 +1180,13 @@ class load_replicate : public function_base
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY;
}
tree
- memory_scalar_type (const function_instance &fi) const OVERRIDE
+ memory_scalar_type (const function_instance &fi) const override
{
return fi.scalar_type (0);
}
@@ -1196,13 +1196,13 @@ class svld1rq_impl : public load_replicate
{
public:
machine_mode
- memory_vector_mode (const function_instance &fi) const OVERRIDE
+ memory_vector_mode (const function_instance &fi) const override
{
return aarch64_vq_mode (GET_MODE_INNER (fi.vector_mode (0))).require ();
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = code_for_aarch64_sve_ld1rq (e.vector_mode (0));
return e.use_contiguous_load_insn (icode);
@@ -1213,13 +1213,13 @@ class svld1ro_impl : public load_replicate
{
public:
machine_mode
- memory_vector_mode (const function_instance &) const OVERRIDE
+ memory_vector_mode (const function_instance &) const override
{
return OImode;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = code_for_aarch64_sve_ld1ro (e.vector_mode (0));
return e.use_contiguous_load_insn (icode);
@@ -1234,13 +1234,13 @@ public:
: full_width_access (vectors_per_tuple) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY;
}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree tuple_type = TREE_TYPE (f.lhs);
tree vectype = f.vector_type (0);
@@ -1275,7 +1275,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode tuple_mode = TYPE_MODE (TREE_TYPE (e.call_expr));
insn_code icode = convert_optab_handler (vec_mask_load_lanes_optab,
@@ -1288,13 +1288,13 @@ class svldff1_gather_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY | CP_READ_FFR | CP_WRITE_FFR;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* See the block comment in aarch64-sve.md for details about the
FFR handling. */
@@ -1317,7 +1317,7 @@ public:
: extending_load (memory_type) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* See the block comment in aarch64-sve.md for details about the
FFR handling. */
@@ -1340,13 +1340,13 @@ class svldnt1_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = code_for_aarch64_ldnt1 (e.vector_mode (0));
return e.use_contiguous_load_insn (icode);
@@ -1360,13 +1360,13 @@ public:
CONSTEXPR svldxf1_impl (int unspec) : m_unspec (unspec) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY | CP_READ_FFR | CP_WRITE_FFR;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* See the block comment in aarch64-sve.md for details about the
FFR handling. */
@@ -1388,13 +1388,13 @@ public:
: extending_load (memory_type), m_unspec (unspec) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY | CP_READ_FFR | CP_WRITE_FFR;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* See the block comment in aarch64-sve.md for details about the
FFR handling. */
@@ -1414,7 +1414,7 @@ class svlen_impl : public quiet<function_base>
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* The argument only exists for its type. */
tree rhs_type = TREE_TYPE (gimple_call_arg (f.call, 0));
@@ -1424,7 +1424,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* The argument only exists for its type. */
return gen_int_mode (GET_MODE_NUNITS (e.vector_mode (0)), DImode);
@@ -1435,7 +1435,7 @@ class svmad_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return expand_mad (e);
}
@@ -1445,7 +1445,7 @@ class svmla_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Put the accumulator at the end (argument 3), but keep it as the
merge input for _m functions. */
@@ -1458,7 +1458,7 @@ class svmla_lane_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
if (e.type_suffix (0).integer_p)
{
@@ -1473,7 +1473,7 @@ class svmls_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Put the accumulator at the end (argument 3), but keep it as the
merge input for _m functions. */
@@ -1486,7 +1486,7 @@ class svmov_impl : public function_base
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
return gimple_build_assign (f.lhs, BIT_AND_EXPR,
gimple_call_arg (f.call, 0),
@@ -1494,7 +1494,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* The canonical form for the assembler alias "MOV Pa.B, Pb/Z, Pc.B"
is "AND Pa.B, Pb/Z, Pc.B, Pc.B". */
@@ -1508,7 +1508,7 @@ class svmls_lane_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
if (e.type_suffix (0).integer_p)
{
@@ -1523,7 +1523,7 @@ class svmmla_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode;
if (e.type_suffix (0).integer_p)
@@ -1543,7 +1543,7 @@ class svmsb_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return expand_msb (e);
}
@@ -1553,7 +1553,7 @@ class svnand_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
gcc_assert (e.pred == PRED_z);
return e.use_exact_insn (CODE_FOR_aarch64_pred_nandvnx16bi_z);
@@ -1564,7 +1564,7 @@ class svnor_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
gcc_assert (e.pred == PRED_z);
return e.use_exact_insn (CODE_FOR_aarch64_pred_norvnx16bi_z);
@@ -1577,7 +1577,7 @@ public:
CONSTEXPR svnot_impl () : rtx_code_function (NOT, NOT, -1) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
if (e.type_suffix_ids[0] == TYPE_SUFFIX_b)
{
@@ -1595,7 +1595,7 @@ class svorn_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
gcc_assert (e.pred == PRED_z);
return e.use_exact_insn (CODE_FOR_aarch64_pred_ornvnx16bi_z);
@@ -1606,13 +1606,13 @@ class svpfalse_impl : public function_base
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
return f.fold_to_pfalse ();
}
rtx
- expand (function_expander &) const OVERRIDE
+ expand (function_expander &) const override
{
return CONST0_RTX (VNx16BImode);
}
@@ -1625,7 +1625,7 @@ public:
CONSTEXPR svpfirst_svpnext_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
e.add_ptrue_hint (0, mode);
@@ -1643,13 +1643,13 @@ public:
CONSTEXPR svprf_bhwd_impl (machine_mode mode) : m_mode (mode) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_PREFETCH_MEMORY;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_prefetch_operands ();
insn_code icode = code_for_aarch64_sve_prefetch (m_mode);
@@ -1667,19 +1667,19 @@ public:
CONSTEXPR svprf_bhwd_gather_impl (machine_mode mode) : m_mode (mode) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_PREFETCH_MEMORY;
}
machine_mode
- memory_vector_mode (const function_instance &) const OVERRIDE
+ memory_vector_mode (const function_instance &) const override
{
return m_mode;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_prefetch_operands ();
e.prepare_gather_address_operands (1);
@@ -1705,7 +1705,7 @@ public:
CONSTEXPR svptest_impl (rtx_code compare) : m_compare (compare) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* See whether GP is an exact ptrue for some predicate mode;
i.e. whether converting the GP to that mode will not drop
@@ -1751,13 +1751,13 @@ class svptrue_impl : public function_base
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
return f.fold_to_ptrue ();
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return aarch64_ptrue_all (e.type_suffix (0).element_bytes);
}
@@ -1767,7 +1767,7 @@ class svptrue_pat_impl : public function_base
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree pattern_arg = gimple_call_arg (f.call, 0);
aarch64_svpattern pattern = (aarch64_svpattern) tree_to_shwi (pattern_arg);
@@ -1788,7 +1788,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* In rtl, the predicate is represented as the constant:
@@ -1816,7 +1816,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Treat non-_pat functions in the same way as _pat functions with
an SV_ALL argument. */
@@ -1877,7 +1877,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
rtx_code code = (e.type_suffix (0).unsigned_p
? m_code_for_uint
@@ -1908,13 +1908,13 @@ class svrdffr_impl : public function_base
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_FFR;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* See the block comment in aarch64-sve.md for details about the
FFR handling. */
@@ -1931,7 +1931,7 @@ class svreinterpret_impl : public quiet<function_base>
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* Punt to rtl if the effect of the reinterpret on registers does not
conform to GCC's endianness model. */
@@ -1947,7 +1947,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
return e.use_exact_insn (code_for_aarch64_sve_reinterpret (mode));
@@ -1958,7 +1958,7 @@ class svrev_impl : public permute
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* Punt for now on _b16 and wider; we'd need more complex evpc logic
to rerecognize the result. */
@@ -1974,7 +1974,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (code_for_aarch64_sve_rev (e.vector_mode (0)));
}
@@ -1984,7 +1984,7 @@ class svsel_impl : public quiet<function_base>
{
public:
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* svsel corresponds exactly to VEC_COND_EXPR. */
gimple_seq stmts = NULL;
@@ -1996,7 +1996,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* svsel (cond, truev, falsev) is vcond_mask (truev, falsev, cond). */
e.rotate_inputs_left (0, 3);
@@ -2015,7 +2015,7 @@ public:
: quiet<multi_vector_function> (vectors_per_tuple) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree rhs_tuple = gimple_call_arg (f.call, 0);
tree index = gimple_call_arg (f.call, 1);
@@ -2042,7 +2042,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
rtx rhs_tuple = e.args[0];
unsigned int index = INTVAL (e.args[1]);
@@ -2065,13 +2065,13 @@ class svsetffr_impl : public function_base
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_FFR;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.args.quick_push (CONSTM1_RTX (VNx16BImode));
return e.use_exact_insn (CODE_FOR_aarch64_wrffr);
@@ -2082,13 +2082,13 @@ class svst1_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_MEMORY;
}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree vectype = f.vector_type (0);
@@ -2105,7 +2105,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = convert_optab_handler (maskstore_optab,
e.vector_mode (0), e.gp_mode (0));
@@ -2117,13 +2117,13 @@ class svst1_scatter_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_MEMORY;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1);
/* Put the predicate last, as required by mask_scatter_store_optab. */
@@ -2144,7 +2144,7 @@ public:
: truncating_store (to_mode) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1);
/* Put the predicate last, since the truncating scatters use the same
@@ -2164,7 +2164,7 @@ public:
: truncating_store (to_mode) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = code_for_aarch64_store_trunc (e.memory_vector_mode (),
e.vector_mode (0));
@@ -2180,13 +2180,13 @@ public:
: full_width_access (vectors_per_tuple) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_MEMORY;
}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
tree vectype = f.vector_type (0);
@@ -2208,7 +2208,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode tuple_mode = GET_MODE (e.args.last ());
insn_code icode = convert_optab_handler (vec_mask_store_lanes_optab,
@@ -2221,13 +2221,13 @@ class svstnt1_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_MEMORY;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = code_for_aarch64_stnt1 (e.vector_mode (0));
return e.use_contiguous_store_insn (icode);
@@ -2241,7 +2241,7 @@ public:
: rtx_code_function (MINUS, MINUS, UNSPEC_COND_FSUB) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Canonicalize subtractions of constants to additions. */
machine_mode mode = e.vector_mode (0);
@@ -2256,7 +2256,7 @@ class svtbl_impl : public permute
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (code_for_aarch64_sve_tbl (e.vector_mode (0)));
}
@@ -2270,7 +2270,7 @@ public:
: binary_permute (base ? UNSPEC_TRN2 : UNSPEC_TRN1), m_base (base) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* svtrn1: { 0, nelts, 2, nelts + 2, 4, nelts + 4, ... }
svtrn2: as for svtrn1, but with 1 added to each index. */
@@ -2296,7 +2296,7 @@ public:
: quiet<multi_vector_function> (vectors_per_tuple) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
rtx target = e.get_reg_target ();
emit_clobber (copy_rtx (target));
@@ -2311,7 +2311,7 @@ public:
CONSTEXPR svunpk_impl (bool high_p) : m_high_p (high_p) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* Don't fold the predicate ops, since every bit of the svbool_t
result is significant. */
@@ -2326,7 +2326,7 @@ public:
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = GET_MODE (e.args[0]);
unsigned int unpacku = m_high_p ? UNSPEC_UNPACKUHI : UNSPEC_UNPACKULO;
@@ -2353,7 +2353,7 @@ public:
CONSTEXPR svusdot_impl (bool su) : m_su (su) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* The implementation of the ACLE function svsudot (for the non-lane
version) is through the USDOT instruction but with the second and third
@@ -2382,7 +2382,7 @@ public:
: binary_permute (base ? UNSPEC_UZP2 : UNSPEC_UZP1), m_base (base) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* svuzp1: { 0, 2, 4, 6, ... }
svuzp2: { 1, 3, 5, 7, ... }. */
@@ -2456,7 +2456,7 @@ public:
}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
if (f.type_suffix (1).unsigned_p)
return fold_type<poly_uint64> (f);
@@ -2472,13 +2472,13 @@ class svwrffr_impl : public function_base
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_FFR;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (CODE_FOR_aarch64_wrffr);
}
@@ -2492,7 +2492,7 @@ public:
: binary_permute (base ? UNSPEC_ZIP2 : UNSPEC_ZIP1), m_base (base) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
/* svzip1: { 0, nelts, 1, nelts + 1, 2, nelts + 2, ... }
svzip2: as for svzip1, but with nelts / 2 added to each index. */
@@ -44,7 +44,7 @@ public:
: T (t1, t2, t3) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return 0;
}
@@ -59,7 +59,7 @@ public:
: m_vectors_per_tuple (vectors_per_tuple) {}
unsigned int
- vectors_per_tuple () const OVERRIDE
+ vectors_per_tuple () const override
{
return m_vectors_per_tuple;
}
@@ -78,13 +78,13 @@ public:
: multi_vector_function (vectors_per_tuple) {}
tree
- memory_scalar_type (const function_instance &fi) const OVERRIDE
+ memory_scalar_type (const function_instance &fi) const override
{
return fi.scalar_type (0);
}
machine_mode
- memory_vector_mode (const function_instance &fi) const OVERRIDE
+ memory_vector_mode (const function_instance &fi) const override
{
machine_mode mode = fi.vector_mode (0);
if (m_vectors_per_tuple != 1)
@@ -103,19 +103,19 @@ public:
: m_memory_type (memory_type) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY;
}
tree
- memory_scalar_type (const function_instance &) const OVERRIDE
+ memory_scalar_type (const function_instance &) const override
{
return scalar_types[type_suffixes[m_memory_type].vector_type];
}
machine_mode
- memory_vector_mode (const function_instance &fi) const OVERRIDE
+ memory_vector_mode (const function_instance &fi) const override
{
machine_mode mem_mode = type_suffixes[m_memory_type].vector_mode;
machine_mode reg_mode = fi.vector_mode (0);
@@ -145,13 +145,13 @@ public:
CONSTEXPR truncating_store (scalar_int_mode to_mode) : m_to_mode (to_mode) {}
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_MEMORY;
}
tree
- memory_scalar_type (const function_instance &fi) const OVERRIDE
+ memory_scalar_type (const function_instance &fi) const override
{
/* In truncating stores, the signedness of the memory element is defined
to be the same as the signedness of the vector element. The signedness
@@ -163,7 +163,7 @@ public:
}
machine_mode
- memory_vector_mode (const function_instance &fi) const OVERRIDE
+ memory_vector_mode (const function_instance &fi) const override
{
poly_uint64 nunits = GET_MODE_NUNITS (fi.vector_mode (0));
return aarch64_sve_data_mode (m_to_mode, nunits).require ();
@@ -205,7 +205,7 @@ public:
: rtx_code_function_base (code_for_sint, code_for_uint, unspec_for_fp) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.map_to_rtx_codes (m_code_for_sint, m_code_for_uint,
m_unspec_for_fp);
@@ -225,7 +225,7 @@ public:
: rtx_code_function_base (code_for_sint, code_for_uint, unspec_for_fp) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Rotate the inputs into their normal order, but continue to make _m
functions merge with what was originally the first vector argument. */
@@ -279,7 +279,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.map_to_unspecs (m_unspec_for_sint, m_unspec_for_uint,
m_unspec_for_fp);
@@ -301,7 +301,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Rotate the inputs into their normal order, but continue to make _m
functions merge with what was originally the first vector argument. */
@@ -329,7 +329,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (CODE (unspec_for (e), e.vector_mode (0)));
}
@@ -386,7 +386,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
int unspec = unspec_for (e);
insn_code icode;
@@ -421,7 +421,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
int unspec = unspec_for (e);
insn_code icode;
@@ -451,7 +451,7 @@ class code_for_mode_function : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (CODE_FOR_MODE (e.vector_mode (N)));
}
@@ -477,7 +477,7 @@ public:
CONSTEXPR fixed_insn_function (insn_code code) : m_code (code) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (m_code);
}
@@ -519,7 +519,7 @@ public:
CONSTEXPR binary_permute (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
insn_code icode = code_for_aarch64_sve (m_unspec, e.vector_mode (0));
return e.use_exact_insn (icode);
@@ -547,7 +547,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
int unspec = (!e.type_suffix (0).integer_p ? m_unspec_for_fp
@@ -576,7 +576,7 @@ public:
: m_code (code), m_wide_unspec (wide_unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
machine_mode elem_mode = GET_MODE_INNER (mode);
@@ -610,7 +610,7 @@ public:
CONSTEXPR unary_count (rtx_code code) : m_code (code) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* The md patterns treat the operand as an integer. */
machine_mode mode = aarch64_sve_int_mode (e.vector_mode (0));
@@ -636,7 +636,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Suffix 0 determines the predicate mode, suffix 1 determines the
scalar mode and signedness. */
@@ -454,13 +454,13 @@ long_type_suffix (function_resolver &r, type_suffix_index type)
struct nonoverloaded_base : public function_shape
{
bool
- explicit_type_suffix_p (unsigned int) const OVERRIDE
+ explicit_type_suffix_p (unsigned int) const override
{
return true;
}
tree
- resolve (function_resolver &) const OVERRIDE
+ resolve (function_resolver &) const override
{
gcc_unreachable ();
}
@@ -472,7 +472,7 @@ template<unsigned int EXPLICIT_MASK>
struct overloaded_base : public function_shape
{
bool
- explicit_type_suffix_p (unsigned int i) const OVERRIDE
+ explicit_type_suffix_p (unsigned int i) const override
{
return (EXPLICIT_MASK >> i) & 1;
}
@@ -484,7 +484,7 @@ struct adr_base : public overloaded_base<0>
/* The function takes two arguments: a vector base and a vector displacement
(either an index or an offset). Resolve based on them both. */
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
mode_suffix_index mode;
@@ -503,7 +503,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
struct binary_imm_narrowb_base : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
@@ -515,7 +515,7 @@ struct binary_imm_narrowb_base : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (1, 1);
}
@@ -528,7 +528,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
struct binary_imm_narrowt_base : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
@@ -540,7 +540,7 @@ struct binary_imm_narrowt_base : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -560,14 +560,14 @@ struct binary_imm_narrowt_base : public overloaded_base<0>
struct binary_imm_long_base : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
build_all (b, "v0,vh0,su64", group, MODE_n);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type, result_type;
@@ -623,7 +623,7 @@ struct inc_dec_base : public overloaded_base<0>
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_range (m_pat_p ? 2 : 1, 1, 16);
}
@@ -637,7 +637,7 @@ struct load_contiguous_base : public overloaded_base<0>
/* Resolve a call based purely on a pointer argument. The other arguments
are a governing predicate and (for MODE_vnum) a vnum offset. */
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
bool vnum_p = r.mode_suffix_id == MODE_vnum;
gcc_assert (r.mode_suffix_id == MODE_none || vnum_p);
@@ -658,7 +658,7 @@ struct load_contiguous_base : public overloaded_base<0>
struct load_gather_sv_base : public overloaded_base<0>
{
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
mode_suffix_index mode;
@@ -686,7 +686,7 @@ struct load_ext_gather_base : public overloaded_base<1>
The function has an explicit type suffix that determines the type
of the loaded data. */
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
/* No resolution is needed for a vector base with no displacement;
there's a one-to-one mapping between short and long names. */
@@ -713,7 +713,7 @@ struct load_ext_gather_base : public overloaded_base<1>
struct mmla_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
/* svmmla is distributed over several extensions. Allow the common
@@ -729,7 +729,7 @@ struct mmla_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -769,7 +769,7 @@ struct prefetch_gather_base : public overloaded_base<0>
The prefetch operation is the final argument. This is purely a
mode-based resolution; there are no type suffixes. */
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
bool has_displacement_p = r.displacement_units () != UNITS_none;
@@ -791,7 +791,7 @@ template<typename BASE, unsigned int N>
struct shift_right_imm_narrow_wrapper : public BASE
{
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int bits = c.type_suffix (0).element_bits / 2;
return c.require_immediate_range (N, 1, bits);
@@ -811,7 +811,7 @@ struct store_scatter_base : public overloaded_base<0>
The stored data is the final argument, and it determines the
type suffix. */
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
bool has_displacement_p = r.displacement_units () != UNITS_none;
@@ -832,14 +832,14 @@ struct store_scatter_base : public overloaded_base<0>
struct ternary_shift_imm_base : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
build_all (b, "v0,v0,v0,su64", group, MODE_n);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2, 1);
}
@@ -862,7 +862,7 @@ template<unsigned int MODIFIER,
struct ternary_resize2_opt_n_base : public overloaded_base<0>
{
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -884,7 +884,7 @@ template<unsigned int MODIFIER,
struct ternary_resize2_base : public overloaded_base<0>
{
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -908,7 +908,7 @@ template<unsigned int MODIFIER,
struct ternary_resize2_lane_base : public overloaded_base<0>
{
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -932,14 +932,14 @@ struct ternary_bfloat_lane_base
: public ternary_resize2_lane_base<16, TYPE_bfloat, TYPE_bfloat>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vB,vB,su64", group, MODE_none);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_lane_index (3, N);
}
@@ -954,7 +954,7 @@ struct ternary_qq_lane_base
TYPE_CLASS2, TYPE_CLASS3>
{
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_lane_index (3, 4);
}
@@ -966,7 +966,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
struct unary_narrowb_base : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
@@ -978,7 +978,7 @@ struct unary_narrowb_base : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_unary (CLASS, r.HALF_SIZE);
}
@@ -991,7 +991,7 @@ template<type_class_index CLASS = function_resolver::SAME_TYPE_CLASS>
struct unary_narrowt_base : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
STATIC_ASSERT (CLASS == function_resolver::SAME_TYPE_CLASS
@@ -1003,7 +1003,7 @@ struct unary_narrowt_base : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1023,7 +1023,7 @@ struct unary_narrowt_base : public overloaded_base<0>
struct adr_index_def : public adr_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_index);
build_all (b, "b,b,d", group, MODE_u32base_s32index);
@@ -1041,7 +1041,7 @@ SHAPE (adr_index)
struct adr_offset_def : public adr_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_offset);
build_all (b, "b,b,d", group, MODE_u32base_s32offset);
@@ -1058,14 +1058,14 @@ SHAPE (adr_offset)
struct binary_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2);
}
@@ -1080,7 +1080,7 @@ SHAPE (binary)
struct binary_int_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vs0", group, MODE_none);
@@ -1088,7 +1088,7 @@ struct binary_int_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1108,20 +1108,20 @@ SHAPE (binary_int_opt_n)
struct binary_lane_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_lane_index (2);
}
@@ -1135,14 +1135,14 @@ SHAPE (binary_lane)
struct binary_long_lane_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,vh0,vh0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type, result_type;
@@ -1160,7 +1160,7 @@ struct binary_long_lane_def : public overloaded_base<0>
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_lane_index (2);
}
@@ -1172,7 +1172,7 @@ SHAPE (binary_long_lane)
struct binary_long_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,vh0,vh0", group, MODE_none);
@@ -1180,7 +1180,7 @@ struct binary_long_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type, result_type;
@@ -1202,14 +1202,14 @@ SHAPE (binary_long_opt_n)
struct binary_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
build_all (b, "v0,v0,s0", group, MODE_n);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1231,7 +1231,7 @@ SHAPE (binary_n)
struct binary_narrowb_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vh0,v0,v0", group, MODE_none);
@@ -1239,7 +1239,7 @@ struct binary_narrowb_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform_opt_n (2);
}
@@ -1253,7 +1253,7 @@ SHAPE (binary_narrowb_opt_n)
struct binary_narrowt_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vh0,vh0,v0,v0", group, MODE_none);
@@ -1261,7 +1261,7 @@ struct binary_narrowt_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1284,7 +1284,7 @@ SHAPE (binary_narrowt_opt_n)
struct binary_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0", group, MODE_none);
@@ -1298,7 +1298,7 @@ struct binary_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform_opt_n (2);
}
@@ -1309,7 +1309,7 @@ SHAPE (binary_opt_n)
struct binary_pred_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "v0,v0,v0", group, MODE_none);
}
@@ -1322,20 +1322,20 @@ SHAPE (binary_pred)
struct binary_rotate_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_either_or (2, 90, 270);
}
@@ -1349,7 +1349,7 @@ SHAPE (binary_rotate)
struct binary_scalar_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "v0,s0,s0", group, MODE_none);
}
@@ -1362,14 +1362,14 @@ SHAPE (binary_scalar)
struct binary_to_uint_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vu0,v0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2);
}
@@ -1383,14 +1383,14 @@ SHAPE (binary_to_uint)
struct binary_uint_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vu0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1411,14 +1411,14 @@ SHAPE (binary_uint)
struct binary_uint_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,su0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1440,7 +1440,7 @@ SHAPE (binary_uint_n)
struct binary_uint_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vu0", group, MODE_none);
@@ -1448,7 +1448,7 @@ struct binary_uint_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1468,14 +1468,14 @@ SHAPE (binary_uint_opt_n)
struct binary_uint64_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1497,7 +1497,7 @@ SHAPE (binary_uint64_n)
struct binary_uint64_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vu64", group, MODE_none);
@@ -1505,7 +1505,7 @@ struct binary_uint64_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1522,14 +1522,14 @@ SHAPE (binary_uint64_opt_n)
struct binary_wide_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vh0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1549,7 +1549,7 @@ SHAPE (binary_wide)
struct binary_wide_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vh0", group, MODE_none);
@@ -1557,7 +1557,7 @@ struct binary_wide_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1576,7 +1576,7 @@ SHAPE (binary_wide_opt_n)
struct clast_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0", group, MODE_none);
@@ -1584,7 +1584,7 @@ struct clast_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
if (!r.check_gp_argument (2, i, nargs)
@@ -1615,14 +1615,14 @@ SHAPE (clast)
struct compare_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vp,v0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2);
}
@@ -1636,7 +1636,7 @@ SHAPE (compare)
struct compare_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vp,v0,v0", group, MODE_none);
@@ -1644,7 +1644,7 @@ struct compare_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform_opt_n (2);
}
@@ -1655,14 +1655,14 @@ SHAPE (compare_opt_n)
struct compare_ptr_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vp,al,al", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1684,14 +1684,14 @@ SHAPE (compare_ptr)
struct compare_scalar_def : public overloaded_base<1>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vp,s1,s1", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1714,7 +1714,7 @@ SHAPE (compare_scalar)
struct compare_wide_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vp,v0,vw0", group, MODE_none);
@@ -1722,7 +1722,7 @@ struct compare_wide_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1739,7 +1739,7 @@ SHAPE (compare_wide_opt_n)
struct count_inherent_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "su64", group, MODE_none);
}
@@ -1750,7 +1750,7 @@ SHAPE (count_inherent)
struct count_pat_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "su64,epattern", group, MODE_none);
}
@@ -1761,7 +1761,7 @@ SHAPE (count_pat)
struct count_pred_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "su64,vp", group, MODE_none);
}
@@ -1772,14 +1772,14 @@ SHAPE (count_pred)
struct count_vector_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "su64,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (1);
}
@@ -1792,14 +1792,14 @@ SHAPE (count_vector)
struct create_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "t0,v0*t", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (r.vectors_per_tuple ());
}
@@ -1813,7 +1813,7 @@ SHAPE (create)
struct dupq_def : public overloaded_base<1>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
/* The "_n" suffix is optional; the full name has it, but the short
name doesn't. */
@@ -1821,7 +1821,7 @@ struct dupq_def : public overloaded_base<1>
}
tree
- resolve (function_resolver &) const OVERRIDE
+ resolve (function_resolver &) const override
{
/* The short forms just make "_n" implicit, so no resolution is needed. */
gcc_unreachable ();
@@ -1836,20 +1836,20 @@ SHAPE (dupq)
struct ext_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int bytes = c.type_suffix (0).element_bytes;
return c.require_immediate_range (2, 0, 256 / bytes - 1);
@@ -1861,14 +1861,14 @@ SHAPE (ext)
struct fold_left_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "s0,s0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1889,14 +1889,14 @@ SHAPE (fold_left)
struct get_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,t0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1909,7 +1909,7 @@ struct get_def : public overloaded_base<0>
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int nvectors = c.vectors_per_tuple ();
return c.require_immediate_range (1, 0, nvectors - 1);
@@ -1927,7 +1927,7 @@ struct inc_dec_def : public inc_dec_base
CONSTEXPR inc_dec_def () : inc_dec_base (false) {}
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
/* These functions are unusual in that the type suffixes for
@@ -1952,7 +1952,7 @@ struct inc_dec_pat_def : public inc_dec_base
CONSTEXPR inc_dec_pat_def () : inc_dec_base (true) {}
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
/* These functions are unusual in that the type suffixes for
@@ -1971,14 +1971,14 @@ SHAPE (inc_dec_pat)
struct inc_dec_pred_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vp", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -1998,14 +1998,14 @@ SHAPE (inc_dec_pred)
struct inc_dec_pred_scalar_def : public overloaded_base<2>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
build_all (b, "s0,s0,vp", group, MODE_n);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -2023,7 +2023,7 @@ SHAPE (inc_dec_pred_scalar)
struct inherent_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "t0", group, MODE_none);
}
@@ -2034,7 +2034,7 @@ SHAPE (inherent)
struct inherent_b_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
/* The "_b" suffix is optional; the full name has it, but the short
name doesn't. */
@@ -2042,7 +2042,7 @@ struct inherent_b_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &) const OVERRIDE
+ resolve (function_resolver &) const override
{
/* The short forms just make "_b" implicit, so no resolution is needed. */
gcc_unreachable ();
@@ -2055,7 +2055,7 @@ SHAPE (inherent_b)
struct load_def : public load_contiguous_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
b.add_overloaded_functions (group, MODE_vnum);
@@ -2072,7 +2072,7 @@ SHAPE (load)
struct load_ext_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "t0,al", group, MODE_none);
build_all (b, "t0,al,ss64", group, MODE_vnum);
@@ -2092,7 +2092,7 @@ SHAPE (load_ext)
struct load_ext_gather_index_def : public load_ext_gather_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_index);
build_sv_index (b, "t0,al,d", group);
@@ -2112,7 +2112,7 @@ SHAPE (load_ext_gather_index)
struct load_ext_gather_index_restricted_def : public load_ext_gather_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_index);
build_sv_index64 (b, "t0,al,d", group);
@@ -2136,7 +2136,7 @@ SHAPE (load_ext_gather_index_restricted)
struct load_ext_gather_offset_def : public load_ext_gather_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_offset);
build_sv_offset (b, "t0,al,d", group);
@@ -2161,7 +2161,7 @@ SHAPE (load_ext_gather_offset)
struct load_ext_gather_offset_restricted_def : public load_ext_gather_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_offset);
build_sv_uint_offset (b, "t0,al,d", group);
@@ -2183,7 +2183,7 @@ SHAPE (load_ext_gather_offset_restricted)
struct load_gather_sv_def : public load_gather_sv_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_index);
b.add_overloaded_functions (group, MODE_offset);
@@ -2205,7 +2205,7 @@ SHAPE (load_gather_sv)
struct load_gather_sv_restricted_def : public load_gather_sv_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_index);
b.add_overloaded_functions (group, MODE_offset);
@@ -2226,7 +2226,7 @@ SHAPE (load_gather_sv_restricted)
struct load_gather_vs_def : public overloaded_base<1>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
/* The base vector mode is optional; the full name has it but the
short name doesn't. There is no ambiguity with SHAPE_load_gather_sv
@@ -2237,7 +2237,7 @@ struct load_gather_vs_def : public overloaded_base<1>
}
tree
- resolve (function_resolver &) const OVERRIDE
+ resolve (function_resolver &) const override
{
/* The short name just makes the base vector mode implicit;
no resolution is needed. */
@@ -2252,7 +2252,7 @@ SHAPE (load_gather_vs)
struct load_replicate_def : public load_contiguous_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "t0,al", group, MODE_none);
@@ -2264,7 +2264,7 @@ SHAPE (load_replicate)
struct pattern_pred_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "vp,epattern", group, MODE_none);
}
@@ -2276,7 +2276,7 @@ SHAPE (pattern_pred)
struct prefetch_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "_,ap,eprfop", group, MODE_none);
build_all (b, "_,ap,ss64,eprfop", group, MODE_vnum);
@@ -2297,7 +2297,7 @@ SHAPE (prefetch)
struct prefetch_gather_index_def : public prefetch_gather_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
b.add_overloaded_functions (group, MODE_index);
@@ -2321,7 +2321,7 @@ SHAPE (prefetch_gather_index)
struct prefetch_gather_offset_def : public prefetch_gather_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
b.add_overloaded_functions (group, MODE_offset);
@@ -2336,7 +2336,7 @@ SHAPE (prefetch_gather_offset)
struct ptest_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "sp,vp", group, MODE_none);
}
@@ -2347,7 +2347,7 @@ SHAPE (ptest)
struct rdffr_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "vp", group, MODE_none);
}
@@ -2358,14 +2358,14 @@ SHAPE (rdffr)
struct reduction_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "s0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (1);
}
@@ -2381,14 +2381,14 @@ SHAPE (reduction)
struct reduction_wide_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "sw0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (1);
}
@@ -2402,14 +2402,14 @@ SHAPE (reduction_wide)
struct set_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "t0,t0,su64,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -2423,7 +2423,7 @@ struct set_def : public overloaded_base<0>
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int nvectors = c.vectors_per_tuple ();
return c.require_immediate_range (1, 0, nvectors - 1);
@@ -2435,7 +2435,7 @@ SHAPE (set)
struct setffr_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "_", group, MODE_none);
}
@@ -2449,20 +2449,20 @@ SHAPE (setffr)
struct shift_left_imm_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
build_all (b, "v0,v0,su64", group, MODE_n);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (1, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int bits = c.type_suffix (0).element_bits;
return c.require_immediate_range (1, 0, bits - 1);
@@ -2477,7 +2477,7 @@ SHAPE (shift_left_imm)
struct shift_left_imm_long_def : public binary_imm_long_base
{
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int bits = c.type_suffix (0).element_bits / 2;
return c.require_immediate_range (1, 0, bits - 1);
@@ -2492,7 +2492,7 @@ SHAPE (shift_left_imm_long)
struct shift_left_imm_to_uint_def : public shift_left_imm_def
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
build_all (b, "vu0,v0,su64", group, MODE_n);
@@ -2507,20 +2507,20 @@ SHAPE (shift_left_imm_to_uint)
struct shift_right_imm_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_n);
build_all (b, "v0,v0,su64", group, MODE_n);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (1, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int bits = c.type_suffix (0).element_bits;
return c.require_immediate_range (1, 1, bits);
@@ -2572,7 +2572,7 @@ SHAPE (shift_right_imm_narrowt_to_uint)
struct store_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
b.add_overloaded_functions (group, MODE_vnum);
@@ -2581,7 +2581,7 @@ struct store_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
bool vnum_p = r.mode_suffix_id == MODE_vnum;
gcc_assert (r.mode_suffix_id == MODE_none || vnum_p);
@@ -2612,7 +2612,7 @@ SHAPE (store)
struct store_scatter_index_def : public store_scatter_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_index);
build_sv_index (b, "_,as,d,t0", group);
@@ -2632,7 +2632,7 @@ SHAPE (store_scatter_index)
struct store_scatter_index_restricted_def : public store_scatter_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_index);
build_sv_index64 (b, "_,as,d,t0", group);
@@ -2657,7 +2657,7 @@ SHAPE (store_scatter_index_restricted)
struct store_scatter_offset_def : public store_scatter_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
b.add_overloaded_functions (group, MODE_offset);
@@ -2683,7 +2683,7 @@ SHAPE (store_scatter_offset)
struct store_scatter_offset_restricted_def : public store_scatter_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
b.add_overloaded_functions (group, MODE_offset);
@@ -2698,14 +2698,14 @@ SHAPE (store_scatter_offset_restricted)
struct tbl_tuple_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,t0,vu0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -2724,7 +2724,7 @@ struct ternary_bfloat_def
: public ternary_resize2_base<16, TYPE_bfloat, TYPE_bfloat>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vB,vB", group, MODE_none);
@@ -2752,7 +2752,7 @@ struct ternary_bfloat_opt_n_def
: public ternary_resize2_opt_n_base<16, TYPE_bfloat, TYPE_bfloat>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vB,vB", group, MODE_none);
@@ -2770,7 +2770,7 @@ struct ternary_intq_uintq_lane_def
: public ternary_qq_lane_base<TYPE_signed, TYPE_unsigned>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vqs0,vqu0,su64", group, MODE_none);
@@ -2786,7 +2786,7 @@ struct ternary_intq_uintq_opt_n_def
TYPE_signed, TYPE_unsigned>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vqs0,vqu0", group, MODE_none);
@@ -2802,20 +2802,20 @@ SHAPE (ternary_intq_uintq_opt_n)
struct ternary_lane_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,v0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (3, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_lane_index (3);
}
@@ -2830,20 +2830,20 @@ SHAPE (ternary_lane)
struct ternary_lane_rotate_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,v0,su64,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (3, 2);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return (c.require_immediate_lane_index (3, 2)
&& c.require_immediate_one_of (4, 0, 90, 180, 270));
@@ -2859,14 +2859,14 @@ struct ternary_long_lane_def
: public ternary_resize2_lane_base<function_resolver::HALF_SIZE>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vh0,vh0,su64", group, MODE_none);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_lane_index (3);
}
@@ -2883,7 +2883,7 @@ struct ternary_long_opt_n_def
: public ternary_resize2_opt_n_base<function_resolver::HALF_SIZE>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vh0,vh0", group, MODE_none);
@@ -2900,7 +2900,7 @@ SHAPE (ternary_long_opt_n)
struct ternary_opt_n_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,v0", group, MODE_none);
@@ -2908,7 +2908,7 @@ struct ternary_opt_n_def : public overloaded_base<0>
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform_opt_n (3);
}
@@ -2922,7 +2922,7 @@ SHAPE (ternary_opt_n)
struct ternary_qq_lane_def : public ternary_qq_lane_base<>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vq0,vq0,su64", group, MODE_none);
@@ -2938,14 +2938,14 @@ SHAPE (ternary_qq_lane)
struct ternary_qq_lane_rotate_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vq0,vq0,su64,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -2963,7 +2963,7 @@ struct ternary_qq_lane_rotate_def : public overloaded_base<0>
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return (c.require_immediate_lane_index (3, 4)
&& c.require_immediate_one_of (4, 0, 90, 180, 270));
@@ -2981,7 +2981,7 @@ struct ternary_qq_opt_n_def
: public ternary_resize2_opt_n_base<function_resolver::QUARTER_SIZE>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vq0,vq0", group, MODE_none);
@@ -2998,14 +2998,14 @@ SHAPE (ternary_qq_opt_n)
struct ternary_qq_rotate_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vq0,vq0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -3022,7 +3022,7 @@ struct ternary_qq_rotate_def : public overloaded_base<0>
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_one_of (3, 0, 90, 180, 270);
}
@@ -3036,20 +3036,20 @@ SHAPE (ternary_qq_rotate)
struct ternary_rotate_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,v0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (3, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_one_of (3, 0, 90, 180, 270);
}
@@ -3063,7 +3063,7 @@ SHAPE (ternary_rotate)
struct ternary_shift_left_imm_def : public ternary_shift_imm_base
{
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int bits = c.type_suffix (0).element_bits;
return c.require_immediate_range (2, 0, bits - 1);
@@ -3078,7 +3078,7 @@ SHAPE (ternary_shift_left_imm)
struct ternary_shift_right_imm_def : public ternary_shift_imm_base
{
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
unsigned int bits = c.type_suffix (0).element_bits;
return c.require_immediate_range (2, 1, bits);
@@ -3090,14 +3090,14 @@ SHAPE (ternary_shift_right_imm)
struct ternary_uint_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,vu0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -3119,7 +3119,7 @@ struct ternary_uintq_intq_def
TYPE_unsigned, TYPE_signed>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vqu0,vqs0", group, MODE_none);
@@ -3136,7 +3136,7 @@ struct ternary_uintq_intq_lane_def
: public ternary_qq_lane_base<TYPE_unsigned, TYPE_signed>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vqu0,vqs0,su64", group, MODE_none);
@@ -3152,7 +3152,7 @@ struct ternary_uintq_intq_opt_n_def
TYPE_unsigned, TYPE_signed>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,vqu0,vqs0", group, MODE_none);
@@ -3168,20 +3168,20 @@ SHAPE (ternary_uintq_intq_opt_n)
struct tmad_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0,v0,su64", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_uniform (2, 1);
}
bool
- check (function_checker &c) const OVERRIDE
+ check (function_checker &c) const override
{
return c.require_immediate_range (2, 0, 7);
}
@@ -3195,14 +3195,14 @@ SHAPE (tmad)
struct unary_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_unary ();
}
@@ -3216,14 +3216,14 @@ SHAPE (unary)
struct unary_convert_def : public overloaded_base<1>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v1", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_unary (r.type_suffix (0).tclass,
r.type_suffix (0).element_bits);
@@ -3239,14 +3239,14 @@ SHAPE (unary_convert)
struct unary_convert_narrowt_def : public overloaded_base<1>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,v1", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_unary (r.type_suffix (0).tclass,
r.type_suffix (0).element_bits, true);
@@ -3258,14 +3258,14 @@ SHAPE (unary_convert_narrowt)
struct unary_long_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,vh0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type, result_type;
@@ -3286,7 +3286,7 @@ SHAPE (unary_long)
struct unary_n_def : public overloaded_base<1>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
/* The "_n" suffix is optional; the full name has it, but the short
name doesn't. */
@@ -3294,7 +3294,7 @@ struct unary_n_def : public overloaded_base<1>
}
tree
- resolve (function_resolver &) const OVERRIDE
+ resolve (function_resolver &) const override
{
/* The short forms just make "_n" implicit, so no resolution is needed. */
gcc_unreachable ();
@@ -3322,7 +3322,7 @@ SHAPE (unary_narrowt_to_uint)
struct unary_pred_def : public nonoverloaded_base
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
build_all (b, "v0,v0", group, MODE_none);
}
@@ -3336,14 +3336,14 @@ SHAPE (unary_pred)
struct unary_to_int_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vs0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_unary (TYPE_signed);
}
@@ -3357,14 +3357,14 @@ SHAPE (unary_to_int)
struct unary_to_uint_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "vu0,v0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
return r.resolve_unary (TYPE_unsigned);
}
@@ -3378,14 +3378,14 @@ SHAPE (unary_to_uint)
struct unary_uint_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,vu0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -3414,14 +3414,14 @@ SHAPE (unary_uint)
struct unary_widen_def : public overloaded_base<0>
{
void
- build (function_builder &b, const function_group_info &group) const OVERRIDE
+ build (function_builder &b, const function_group_info &group) const override
{
b.add_overloaded_functions (group, MODE_none);
build_all (b, "v0,vh0", group, MODE_none);
}
tree
- resolve (function_resolver &r) const OVERRIDE
+ resolve (function_resolver &r) const override
{
unsigned int i, nargs;
type_suffix_index type;
@@ -82,7 +82,7 @@ class svaba_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
rtx_code max_code = e.type_suffix (0).unsigned_p ? UMAX : SMAX;
machine_mode mode = e.vector_mode (0);
@@ -94,7 +94,7 @@ class svcdot_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -107,7 +107,7 @@ class svcdot_lane_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -120,13 +120,13 @@ class svldnt1_gather_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_READ_MEMORY;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1, false);
machine_mode mem_mode = e.memory_vector_mode ();
@@ -142,7 +142,7 @@ public:
: extending_load (memory_type) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1, false);
/* Add a constant predicate for the extension rtx. */
@@ -162,7 +162,7 @@ public:
CONSTEXPR svmatch_svnmatch_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* These are UNSPEC_PRED_Z operations and so need a hint operand. */
e.add_ptrue_hint (0, e.gp_mode (0));
@@ -185,7 +185,7 @@ public:
{}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.args.quick_push (const0_rtx);
return e.map_to_unspecs (m_unspec_for_sint, m_unspec_for_uint,
@@ -197,7 +197,7 @@ class svqcadd_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -213,7 +213,7 @@ class svqrdcmlah_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -226,7 +226,7 @@ class svqrdcmlah_lane_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
/* Convert the rotation amount into a specific unspec. */
int rot = INTVAL (e.args.pop ());
@@ -242,7 +242,7 @@ public:
: unspec_based_function (UNSPEC_SQRSHL, UNSPEC_UQRSHL, -1) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
{
@@ -276,7 +276,7 @@ public:
: unspec_based_function (UNSPEC_SQSHL, UNSPEC_UQSHL, -1) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
{
@@ -312,7 +312,7 @@ public:
: unspec_based_function (UNSPEC_SRSHL, UNSPEC_URSHL, -1) {}
gimple *
- fold (gimple_folder &f) const OVERRIDE
+ fold (gimple_folder &f) const override
{
if (tree amount = uniform_integer_cst_p (gimple_call_arg (f.call, 2)))
{
@@ -349,7 +349,7 @@ class svsqadd_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
if (e.pred == PRED_x
@@ -363,7 +363,7 @@ class svsra_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
rtx_code shift_code = e.type_suffix (0).unsigned_p ? LSHIFTRT : ASHIFTRT;
machine_mode mode = e.vector_mode (0);
@@ -375,13 +375,13 @@ class svstnt1_scatter_impl : public full_width_access
{
public:
unsigned int
- call_properties (const function_instance &) const OVERRIDE
+ call_properties (const function_instance &) const override
{
return CP_WRITE_MEMORY;
}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1, false);
machine_mode mem_mode = e.memory_vector_mode ();
@@ -397,7 +397,7 @@ public:
: truncating_store (to_mode) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
e.prepare_gather_address_operands (1, false);
insn_code icode = code_for_aarch64_scatter_stnt (e.vector_mode (0),
@@ -412,7 +412,7 @@ public:
CONSTEXPR svtbl2_impl () : quiet<multi_vector_function> (2) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
return e.use_exact_insn (code_for_aarch64_sve2_tbl2 (e.vector_mode (0)));
}
@@ -422,7 +422,7 @@ class svuqadd_impl : public function_base
{
public:
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
machine_mode mode = e.vector_mode (0);
if (e.pred == PRED_x
@@ -440,7 +440,7 @@ public:
CONSTEXPR svwhilerw_svwhilewr_impl (int unspec) : m_unspec (unspec) {}
rtx
- expand (function_expander &e) const OVERRIDE
+ expand (function_expander &e) const override
{
for (unsigned int i = 0; i < 2; ++i)
e.args[i] = e.convert_to_pmode (e.args[i]);