From patchwork Thu Apr 5 21:15:06 2018 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 7bit X-Patchwork-Submitter: Tom Tromey X-Patchwork-Id: 26619 Received: (qmail 40495 invoked by alias); 5 Apr 2018 21:16:04 -0000 Mailing-List: contact gdb-patches-help@sourceware.org; run by ezmlm Precedence: bulk List-Id: List-Unsubscribe: List-Subscribe: List-Archive: List-Post: List-Help: , Sender: gdb-patches-owner@sourceware.org Delivered-To: mailing list gdb-patches@sourceware.org Received: (qmail 40352 invoked by uid 89); 5 Apr 2018 21:15:46 -0000 Authentication-Results: sourceware.org; auth=none X-Virus-Found: No X-Spam-SWARE-Status: No, score=-26.9 required=5.0 tests=BAYES_00, GIT_PATCH_0, GIT_PATCH_1, GIT_PATCH_2, GIT_PATCH_3, RCVD_IN_DNSWL_NONE, SPF_HELO_PASS, TIME_LIMIT_EXCEEDED autolearn=unavailable version=3.3.2 spammy=OTHER, TRY, len1, compares X-HELO: gateway33.websitewelcome.com Received: from gateway33.websitewelcome.com (HELO gateway33.websitewelcome.com) (192.185.145.87) by sourceware.org (qpsmtpd/0.93/v0.84-503-g423c35a) with ESMTP; Thu, 05 Apr 2018 21:15:28 +0000 Received: from cm17.websitewelcome.com (cm17.websitewelcome.com [100.42.49.20]) by gateway33.websitewelcome.com (Postfix) with ESMTP id 096711BEC6FD for ; Thu, 5 Apr 2018 16:15:19 -0500 (CDT) Received: from box5379.bluehost.com ([162.241.216.53]) by cmsmtp with SMTP id 4CE3fBmXPy2aL4CE3fb5Oh; Thu, 05 Apr 2018 16:15:19 -0500 Received: from 75-166-37-45.hlrn.qwest.net ([75.166.37.45]:47524 helo=bapiya.Home) by box5379.bluehost.com with esmtpsa (TLSv1.2:ECDHE-RSA-AES256-GCM-SHA384:256) (Exim 4.89_1) (envelope-from ) id 1f4CE3-003niR-Ni; Thu, 05 Apr 2018 16:15:19 -0500 From: Tom Tromey To: gdb-patches@sourceware.org Cc: Tom Tromey Subject: [RFA 11/12] Remove range_s VEC Date: Thu, 5 Apr 2018 15:15:06 -0600 Message-Id: <20180405211507.6103-12-tom@tromey.com> In-Reply-To: <20180405211507.6103-1-tom@tromey.com> References: <20180405211507.6103-1-tom@tromey.com> X-BWhitelist: no X-Source-L: No X-Exim-ID: 1f4CE3-003niR-Ni X-Source-Sender: 75-166-37-45.hlrn.qwest.net (bapiya.Home) [75.166.37.45]:47524 X-Source-Auth: tom+tromey.com X-Email-Count: 12 X-Source-Cap: ZWx5bnJvYmk7ZWx5bnJvYmk7Ym94NTM3OS5ibHVlaG9zdC5jb20= X-Local-Domain: yes This changes the "optimized_out" and "unavailable" VECs in struct value to be std::vectors, and then fixes up all the uses. ChangeLog 2018-04-05 Tom Tromey * value.c (range_s): Remove typedef, VEC. (struct range): Add operator<. (range_lessthan): Remove. (ranges_contain): Change type. (~value): Update. (struct value) : Now std::vector. (value_entirely_available) (value_entirely_covered_by_range_vector) (value_entirely_unavailable, value_entirely_optimized_out): Update. (insert_into_bit_range_vector): Change argument type. (find_first_range_overlap): Likewise. (struct ranges_and_idx, value_contents_bits_eq) (require_not_optimized_out, require_available): Update. (ranges_copy_adjusted): Change argument types. (value_optimized_out, value_copy, value_fetch_lazy): Update. --- gdb/ChangeLog | 19 ++++++ gdb/value.c | 207 ++++++++++++++++++++++++++++------------------------------ 2 files changed, 119 insertions(+), 107 deletions(-) diff --git a/gdb/value.c b/gdb/value.c index e25934b9e3..e39a3f0aac 100644 --- a/gdb/value.c +++ b/gdb/value.c @@ -66,11 +66,17 @@ struct range /* Length of the range. */ LONGEST length; -}; -typedef struct range range_s; + /* Returns true if THIS is strictly less than OTHER, useful for + searching. We keep ranges sorted by offset and coalesce + overlapping and contiguous ranges, so this just compares the + starting offset. */ -DEF_VEC_O(range_s); + bool operator< (const range &other) const + { + return offset < other.offset; + } +}; /* Returns true if the ranges defined by [offset1, offset1+len1) and [offset2, offset2+len2) overlap. */ @@ -86,25 +92,14 @@ ranges_overlap (LONGEST offset1, LONGEST len1, return (l < h); } -/* Returns true if the first argument is strictly less than the - second, useful for VEC_lower_bound. We keep ranges sorted by - offset and coalesce overlapping and contiguous ranges, so this just - compares the starting offset. */ - -static int -range_lessthan (const range_s *r1, const range_s *r2) -{ - return r1->offset < r2->offset; -} - /* Returns true if RANGES contains any range that overlaps [OFFSET, OFFSET+LENGTH). */ static int -ranges_contain (VEC(range_s) *ranges, LONGEST offset, LONGEST length) +ranges_contain (const std::vector &ranges, LONGEST offset, + LONGEST length) { - range_s what; - LONGEST i; + range what; what.offset = offset; what.length = length; @@ -140,21 +135,22 @@ ranges_contain (VEC(range_s) *ranges, LONGEST offset, LONGEST length) I=1 */ - i = VEC_lower_bound (range_s, ranges, &what, range_lessthan); - if (i > 0) + auto i = std::lower_bound (ranges.begin (), ranges.end (), what); + + if (i > ranges.begin ()) { - struct range *bef = VEC_index (range_s, ranges, i - 1); + const struct range &bef = *(i - 1); - if (ranges_overlap (bef->offset, bef->length, offset, length)) + if (ranges_overlap (bef.offset, bef.length, offset, length)) return 1; } - if (i < VEC_length (range_s, ranges)) + if (i < ranges.end ()) { - struct range *r = VEC_index (range_s, ranges, i); + const struct range &r = *i; - if (ranges_overlap (r->offset, r->length, offset, length)) + if (ranges_overlap (r.offset, r.length, offset, length)) return 1; } @@ -192,8 +188,6 @@ struct value delete location.xm_worker; xfree (contents); - VEC_free (range_s, unavailable); - VEC_free (range_s, optimized_out); } DISABLE_COPY_AND_ASSIGN (value); @@ -346,7 +340,7 @@ struct value The unavailable ranges are tracked in bits. Note that a contents bit that has been optimized out doesn't really exist in the program, so it can't be marked unavailable either. */ - VEC(range_s) *unavailable = nullptr; + std::vector unavailable; /* Likewise, but for optimized out contents (a chunk of the value of a variable that does not actually exist in the program). If LVAL @@ -355,7 +349,7 @@ struct value saved registers and optimized-out program variables values are treated pretty much the same, except not-saved registers have a different string representation and related error strings. */ - VEC(range_s) *optimized_out = nullptr; + std::vector optimized_out; }; /* See value.h. */ @@ -399,7 +393,7 @@ value_entirely_available (struct value *value) if (value->lazy) value_fetch_lazy (value); - if (VEC_empty (range_s, value->unavailable)) + if (value->unavailable.empty ()) return 1; return 0; } @@ -410,20 +404,20 @@ value_entirely_available (struct value *value) static int value_entirely_covered_by_range_vector (struct value *value, - VEC(range_s) **ranges) + const std::vector &ranges) { /* We can only tell whether the whole value is optimized out / unavailable when we try to read it. */ if (value->lazy) value_fetch_lazy (value); - if (VEC_length (range_s, *ranges) == 1) + if (ranges.size () == 1) { - struct range *t = VEC_index (range_s, *ranges, 0); + const struct range &t = ranges[0]; - if (t->offset == 0 - && t->length == (TARGET_CHAR_BIT - * TYPE_LENGTH (value_enclosing_type (value)))) + if (t.offset == 0 + && t.length == (TARGET_CHAR_BIT + * TYPE_LENGTH (value_enclosing_type (value)))) return 1; } @@ -433,24 +427,23 @@ value_entirely_covered_by_range_vector (struct value *value, int value_entirely_unavailable (struct value *value) { - return value_entirely_covered_by_range_vector (value, &value->unavailable); + return value_entirely_covered_by_range_vector (value, value->unavailable); } int value_entirely_optimized_out (struct value *value) { - return value_entirely_covered_by_range_vector (value, &value->optimized_out); + return value_entirely_covered_by_range_vector (value, value->optimized_out); } /* Insert into the vector pointed to by VECTORP the bit range starting of OFFSET bits, and extending for the next LENGTH bits. */ static void -insert_into_bit_range_vector (VEC(range_s) **vectorp, +insert_into_bit_range_vector (std::vector *vectorp, LONGEST offset, LONGEST length) { - range_s newr; - int i; + range newr; /* Insert the range sorted. If there's overlap or the new range would be contiguous with an existing range, merge. */ @@ -538,76 +531,77 @@ insert_into_bit_range_vector (VEC(range_s) **vectorp, */ - i = VEC_lower_bound (range_s, *vectorp, &newr, range_lessthan); - if (i > 0) + auto i = std::lower_bound (vectorp->begin (), vectorp->end (), newr); + if (i > vectorp->begin ()) { - struct range *bef = VEC_index (range_s, *vectorp, i - 1); + struct range &bef = *(i - 1); - if (ranges_overlap (bef->offset, bef->length, offset, length)) + if (ranges_overlap (bef.offset, bef.length, offset, length)) { /* #1 */ - ULONGEST l = std::min (bef->offset, offset); - ULONGEST h = std::max (bef->offset + bef->length, offset + length); + ULONGEST l = std::min (bef.offset, offset); + ULONGEST h = std::max (bef.offset + bef.length, offset + length); - bef->offset = l; - bef->length = h - l; + bef.offset = l; + bef.length = h - l; i--; } - else if (offset == bef->offset + bef->length) + else if (offset == bef.offset + bef.length) { /* #2 */ - bef->length += length; + bef.length += length; i--; } else { /* #3 */ - VEC_safe_insert (range_s, *vectorp, i, &newr); + i = vectorp->insert (i, newr); } } else { /* #4 */ - VEC_safe_insert (range_s, *vectorp, i, &newr); + i = vectorp->insert (i, newr); } /* Check whether the ranges following the one we've just added or touched can be folded in (#5 above). */ - if (i + 1 < VEC_length (range_s, *vectorp)) + if (i != vectorp->end () && i + 1 < vectorp->end ()) { - struct range *t; - struct range *r; int removed = 0; - int next = i + 1; + auto next = i + 1; /* Get the range we just touched. */ - t = VEC_index (range_s, *vectorp, i); + struct range &t = *i; removed = 0; i = next; - for (; VEC_iterate (range_s, *vectorp, i, r); i++) - if (r->offset <= t->offset + t->length) - { - ULONGEST l, h; - - l = std::min (t->offset, r->offset); - h = std::max (t->offset + t->length, r->offset + r->length); - - t->offset = l; - t->length = h - l; - - removed++; - } - else - { - /* If we couldn't merge this one, we won't be able to - merge following ones either, since the ranges are - always sorted by OFFSET. */ - break; - } + for (; i < vectorp->end (); i++) + { + struct range &r = *i; + if (r.offset <= t.offset + t.length) + { + ULONGEST l, h; + + l = std::min (t.offset, r.offset); + h = std::max (t.offset + t.length, r.offset + r.length); + + t.offset = l; + t.length = h - l; + + removed++; + } + else + { + /* If we couldn't merge this one, we won't be able to + merge following ones either, since the ranges are + always sorted by OFFSET. */ + break; + } + } if (removed != 0) - VEC_block_remove (range_s, *vectorp, next, removed); + vectorp->erase (next, next + removed); } } @@ -633,15 +627,17 @@ mark_value_bytes_unavailable (struct value *value, found, or -1 if none was found. */ static int -find_first_range_overlap (VEC(range_s) *ranges, int pos, +find_first_range_overlap (const std::vector *ranges, int pos, LONGEST offset, LONGEST length) { - range_s *r; int i; - for (i = pos; VEC_iterate (range_s, ranges, i, r); i++) - if (ranges_overlap (r->offset, r->length, offset, length)) - return i; + for (i = pos; i < ranges->size (); i++) + { + const range &r = (*ranges)[i]; + if (ranges_overlap (r.offset, r.length, offset, length)) + return i; + } return -1; } @@ -754,7 +750,7 @@ memcmp_with_bit_offsets (const gdb_byte *ptr1, size_t offset1_bits, struct ranges_and_idx { /* The ranges. */ - VEC(range_s) *ranges; + const std::vector *ranges; /* The range we've last found in RANGES. Given ranges are sorted, we can start the next lookup here. */ @@ -788,12 +784,12 @@ find_first_range_overlap_and_match (struct ranges_and_idx *rp1, return 0; else { - range_s *r1, *r2; + const range *r1, *r2; ULONGEST l1, h1; ULONGEST l2, h2; - r1 = VEC_index (range_s, rp1->ranges, rp1->idx); - r2 = VEC_index (range_s, rp2->ranges, rp2->idx); + r1 = &(*rp1->ranges)[rp1->idx]; + r2 = &(*rp2->ranges)[rp2->idx]; /* Get the unavailable windows intersected by the incoming ranges. The first and last ranges that overlap the argument @@ -849,10 +845,10 @@ value_contents_bits_eq (const struct value *val1, int offset1, memset (&rp1, 0, sizeof (rp1)); memset (&rp2, 0, sizeof (rp2)); - rp1[0].ranges = val1->unavailable; - rp2[0].ranges = val2->unavailable; - rp1[1].ranges = val1->optimized_out; - rp2[1].ranges = val2->optimized_out; + rp1[0].ranges = &val1->unavailable; + rp2[0].ranges = &val2->unavailable; + rp1[1].ranges = &val1->optimized_out; + rp2[1].ranges = &val2->optimized_out; while (length > 0) { @@ -1210,7 +1206,7 @@ error_value_optimized_out (void) static void require_not_optimized_out (const struct value *value) { - if (!VEC_empty (range_s, value->optimized_out)) + if (!value->optimized_out.empty ()) { if (value->lval == lval_register) error (_("register has not been saved in frame")); @@ -1222,7 +1218,7 @@ require_not_optimized_out (const struct value *value) static void require_available (const struct value *value) { - if (!VEC_empty (range_s, value->unavailable)) + if (!value->unavailable.empty ()) throw_error (NOT_AVAILABLE_ERROR, _("value is not available")); } @@ -1254,19 +1250,16 @@ value_contents_all (struct value *value) SRC_BIT_OFFSET+BIT_LENGTH) ranges into *DST_RANGE, adjusted. */ static void -ranges_copy_adjusted (VEC (range_s) **dst_range, int dst_bit_offset, - VEC (range_s) *src_range, int src_bit_offset, +ranges_copy_adjusted (std::vector *dst_range, int dst_bit_offset, + const std::vector &src_range, int src_bit_offset, int bit_length) { - range_s *r; - int i; - - for (i = 0; VEC_iterate (range_s, src_range, i, r); i++) + for (const range &r : src_range) { ULONGEST h, l; - l = std::max (r->offset, (LONGEST) src_bit_offset); - h = std::min (r->offset + r->length, + l = std::max (r.offset, (LONGEST) src_bit_offset); + h = std::min (r.offset + r.length, (LONGEST) src_bit_offset + bit_length); if (l < h) @@ -1405,7 +1398,7 @@ value_optimized_out (struct value *value) { /* We can only know if a value is optimized out once we have tried to fetch it. */ - if (VEC_empty (range_s, value->optimized_out) && value->lazy) + if (value->optimized_out.empty () && value->lazy) { TRY { @@ -1418,7 +1411,7 @@ value_optimized_out (struct value *value) END_CATCH } - return !VEC_empty (range_s, value->optimized_out); + return !value->optimized_out.empty (); } /* Mark contents of VALUE as optimized out, starting at OFFSET bytes, and @@ -1687,8 +1680,8 @@ value_copy (struct value *arg) TYPE_LENGTH (value_enclosing_type (arg))); } - val->unavailable = VEC_copy (range_s, arg->unavailable); - val->optimized_out = VEC_copy (range_s, arg->optimized_out); + val->unavailable = arg->unavailable; + val->optimized_out = arg->optimized_out; val->parent = arg->parent; if (VALUE_LVAL (val) == lval_computed) { @@ -3738,8 +3731,8 @@ value_fetch_lazy (struct value *val) /* A value is either lazy, or fully fetched. The availability/validity is only established as we try to fetch a value. */ - gdb_assert (VEC_empty (range_s, val->optimized_out)); - gdb_assert (VEC_empty (range_s, val->unavailable)); + gdb_assert (val->optimized_out.empty ()); + gdb_assert (val->unavailable.empty ()); if (value_bitsize (val)) { /* To read a lazy bitfield, read the entire enclosing value. This