diff mbox series

[4/8] tree-dynamic-object-size: Support ADDR_EXPR

Message ID 20211007221432.1029249-5-siddhesh@gotplt.org
State Superseded
Headers show
Series __builtin_dynamic_object_size and more | expand

Commit Message

Siddhesh Poyarekar Oct. 7, 2021, 10:14 p.m. UTC
This is the beginnings of support for ADDR_EXPR objects and is partly
based on the implementation in tree-object-size, splitting out
functions for readability.

One key difference from constant-size ADDR_EXPR computation is the
computation of residual sizes based on offset.  This pass attempts to
compute an expression with guards to try and not overflow.  This is
however still rudimentary and a subsequent patch for subobject support
makes it more comprehensive by handling negative offsets as well.

The size expressions based on offsets may look arbitrarily complex but
in practice most parts of the expression tend to fold away due to
being constants.  Despite that it is still a potential bottleneck and
may need some artifical backstop (such as bailing out on computation
if the size expression nests more than an arbitrarily chosen N levels)
to reduce compile time as well as avoid adding too much of a
performance overhead to generated code.

gcc/ChangeLog:

	* builtins.c (fold_builtin_dyn_object_size): Handle ADDR_EXPR.
	* tree-dynamic-object-size.c (compute_object_offset,
	size_for_offset, get_whole_var, whole_var_size,
	addr_dyn_object_size): New functions.
	(compute_builtin_dyn_object_size): Handle ADDR_EXPR.
	(expr_object_size): New function.
	(collect_object_sizes_for): Use it.

gcc/testsuite/ChangeLog:

	* gcc.dg/builtin-dynamic-object-size-0.c
	(test_builtin_malloc_condphi4, test_builtin_malloc_condphi5,
	test_passthrough_nonssa): New tests.
	(main): Call them.

Signed-off-by: Siddhesh Poyarekar <siddhesh@gotplt.org>
---
 gcc/builtins.c                                |   2 +-
 .../gcc.dg/builtin-dynamic-object-size-0.c    |  37 +++
 gcc/tree-dynamic-object-size.c                | 255 +++++++++++++++++-
 3 files changed, 287 insertions(+), 7 deletions(-)
diff mbox series

Patch

diff --git a/gcc/builtins.c b/gcc/builtins.c
index d015029765b..c1e23324552 100644
--- a/gcc/builtins.c
+++ b/gcc/builtins.c
@@ -10339,7 +10339,7 @@  fold_builtin_dyn_object_size (tree ptr, tree ost)
 
   /* If object size expression cannot be evaluated yet, delay folding until
      later.  Maybe subsequent passes will help determining it.  */
-  if (TREE_CODE (ptr) == SSA_NAME
+  if ((TREE_CODE (ptr) == SSA_NAME || TREE_CODE (ptr) == ADDR_EXPR)
       && compute_builtin_dyn_object_size (ptr, object_size_type, &bytes))
     return bytes;
 
diff --git a/gcc/testsuite/gcc.dg/builtin-dynamic-object-size-0.c b/gcc/testsuite/gcc.dg/builtin-dynamic-object-size-0.c
index 7098ef485c6..22c86190341 100644
--- a/gcc/testsuite/gcc.dg/builtin-dynamic-object-size-0.c
+++ b/gcc/testsuite/gcc.dg/builtin-dynamic-object-size-0.c
@@ -105,6 +105,25 @@  test_builtin_malloc_condphi3 (int cond, size_t in, size_t in2)
   return __builtin_dynamic_object_size (ret, 0);
 }
 
+size_t
+__attribute__ ((noinline))
+test_builtin_malloc_condphi4 (size_t sz, int cond)
+{
+  char *a = __builtin_malloc (sz);
+  char b[sz / 2];
+
+  return __builtin_dynamic_object_size (cond ? b : (void *) &a, 0);
+}
+
+size_t
+__attribute__ ((noinline))
+test_builtin_malloc_condphi5 (size_t sz, int cond, char *c)
+{
+  char *a = __builtin_malloc (sz);
+
+  return __builtin_dynamic_object_size (cond ? c : (void *) &a, 0);
+}
+
 /* Calloc-like allocator.  */
 
 size_t
@@ -158,6 +177,16 @@  test_passthrough (size_t sz, char *in)
   return __builtin_dynamic_object_size (dest, 0);
 }
 
+size_t
+__attribute__ ((noinline))
+test_passthrough_nonssa (char *in)
+{
+  char bin[__builtin_strlen (in) + 1];
+  char *dest = __builtin_memcpy (bin, in, __builtin_strlen (in) + 1);
+
+  return __builtin_dynamic_object_size (dest, 0);
+}
+
 /* Variable length arrays.  */
 size_t
 __attribute__ ((noinline))
@@ -223,6 +252,12 @@  main (int argc, char **argv)
     FAIL ();
   if (test_builtin_malloc_condphi3 (0, 128, 256) != 256)
     FAIL ();
+  if (test_builtin_malloc_condphi4 (128, 1) != 64)
+    FAIL ();
+  if (test_builtin_malloc_condphi4 (128, 0) != sizeof (void *))
+    FAIL ();
+  if (test_builtin_malloc_condphi5 (128, 0, argv[0]) != -1)
+    FAIL ();
   if (test_calloc (2048, 4) != 2048 * 4)
     FAIL ();
   if (test_builtin_calloc (2048, 8) != 2048 * 8)
@@ -240,6 +275,8 @@  main (int argc, char **argv)
   if (test_passthrough (__builtin_strlen (argv[0]) + 1, argv[0])
       != __builtin_strlen (argv[0]) + 1)
     FAIL ();
+  if (test_passthrough_nonssa (argv[0]) != __builtin_strlen (argv[0]) + 1)
+     FAIL ();
   if (test_dynarray (__builtin_strlen (argv[0])) != __builtin_strlen (argv[0]))
     FAIL ();
   if (test_dynarray_cond (0) != 16)
diff --git a/gcc/tree-dynamic-object-size.c b/gcc/tree-dynamic-object-size.c
index 483d6782d49..6cc63abd010 100644
--- a/gcc/tree-dynamic-object-size.c
+++ b/gcc/tree-dynamic-object-size.c
@@ -93,6 +93,212 @@  static vec<tree *>object_size_exprs[4];
 /* Bitmaps what object sizes have been computed already.  */
 static bitmap computed[4];
 
+bool compute_builtin_dyn_object_size (tree, int, tree *,
+				      struct function *fun = NULL);
+
+/* Compute offset of EXPR within VAR.  Return error_mark_node if unknown.  */
+
+static tree
+compute_object_offset (const_tree expr, const_tree var)
+{
+  enum tree_code code = PLUS_EXPR;
+  tree base, off, t;
+
+  if (expr == var)
+    return size_zero_node;
+
+  switch (TREE_CODE (expr))
+    {
+    case COMPONENT_REF:
+      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
+      if (base == error_mark_node)
+	return base;
+
+      t = TREE_OPERAND (expr, 1);
+      off = fold_build2 (PLUS_EXPR, sizetype, DECL_FIELD_OFFSET (t),
+			 size_int (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (t))
+				   / BITS_PER_UNIT));
+      break;
+
+    case REALPART_EXPR:
+    CASE_CONVERT:
+    case VIEW_CONVERT_EXPR:
+    case NON_LVALUE_EXPR:
+      return compute_object_offset (TREE_OPERAND (expr, 0), var);
+
+    case IMAGPART_EXPR:
+      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
+      if (base == error_mark_node)
+	return base;
+
+      off = TYPE_SIZE_UNIT (TREE_TYPE (expr));
+      break;
+
+    case ARRAY_REF:
+      base = compute_object_offset (TREE_OPERAND (expr, 0), var);
+      if (base == error_mark_node)
+	return base;
+
+      t = TREE_OPERAND (expr, 1);
+      tree low_bound, unit_size;
+      low_bound = array_ref_low_bound (CONST_CAST_TREE (expr));
+      unit_size = array_ref_element_size (CONST_CAST_TREE (expr));
+      if (! integer_zerop (low_bound))
+	t = fold_build2 (MINUS_EXPR, TREE_TYPE (t), t, low_bound);
+      if (TREE_CODE (t) == INTEGER_CST && tree_int_cst_sgn (t) < 0)
+	{
+	  code = MINUS_EXPR;
+	  t = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
+	}
+      t = fold_convert (sizetype, t);
+      off = fold_build2 (MULT_EXPR, sizetype, unit_size, t);
+      break;
+
+    case MEM_REF:
+      gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
+      return wide_int_to_tree (sizetype, mem_ref_offset (expr));
+
+    default:
+      return error_mark_node;
+    }
+
+  return fold_build2 (code, sizetype, base, off);
+}
+
+/* Given an object size SZ and offset OFF into it, compute the usable object
+   size.  The expression returns 0 for all offsets that invoke undefined
+   behaviour.  */
+
+static tree
+size_for_offset (tree sz, tree off)
+{
+  /* A MEM_REF offset may be a pointer, where we need to figure out the
+     multiplier based on the base type.  */
+  if (POINTER_TYPE_P (TREE_TYPE (off)))
+    {
+      tree typesize = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (off)));
+
+      if (typesize)
+	{
+	  /*  SZ < TYPESIZE ? SZ : TYPESIZE * MIN (SZ / TYPESIZE, OFF)  */
+	  tree cond = fold_build2 (LT_EXPR, sizetype, sz, typesize);
+
+	  tree tmp = fold_build2 (EXACT_DIV_EXPR, sizetype, sz, typesize);
+	  tmp = fold_build2 (MIN_EXPR, sizetype, tmp, off);
+	  tmp = fold_build2 (MULT_EXPR, sizetype, tmp, typesize);
+
+	  off = fold_build3 (COND_EXPR, sizetype, cond, sz, tmp);
+
+	  return fold_build2 (MINUS_EXPR, sizetype, sz, off);
+	}
+      else
+	off = fold_convert (sizetype, off);
+    }
+
+  off = fold_build2 (MIN_EXPR, sizetype, sz, off);
+  return fold_build2 (MINUS_EXPR, sizetype, sz, off);
+}
+
+/* Peek through ADDR_EXPR operands to get the definition of the whole variable
+   PTR points in.  Write the result expression into PT_VARP and its size into
+   PT_VAR_SIZEP.  Return true if the object is found.  */
+
+static tree
+get_whole_var (const_tree ptr)
+{
+  tree pt_var;
+
+  pt_var = TREE_OPERAND (ptr, 0);
+  while (handled_component_p (pt_var))
+    pt_var = TREE_OPERAND (pt_var, 0);
+
+  return pt_var;
+}
+
+static bool
+whole_var_size (struct object_size_info *osi, tree pt_var,
+		int object_size_type, tree *pt_var_sizep)
+{
+  tree pt_var_size = NULL_TREE;
+  int subobject = object_size_type & 1;
+  int min = object_size_type & 2;
+
+  if (TREE_CODE (pt_var) == MEM_REF)
+    {
+      tree var = TREE_OPERAND (pt_var, 0);
+      if (!osi || subobject || TREE_CODE (var) != SSA_NAME)
+	compute_builtin_dyn_object_size (var, min, &pt_var_size);
+      else
+	{
+	  collect_object_sizes_for (osi, var);
+	  pt_var_size = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
+	}
+
+      if (pt_var_size != NULL_TREE)
+	{
+	  tree offset = wide_int_to_tree (size_type_node,
+					  mem_ref_offset (pt_var));
+
+	  pt_var_size = size_for_offset (pt_var_size, offset);
+	}
+    }
+  else if (DECL_P (pt_var))
+    {
+      pt_var_size = decl_init_size (pt_var, min);
+      if (!pt_var_size)
+	return false;
+    }
+  else if (TREE_CODE (pt_var) == STRING_CST)
+    pt_var_size = TYPE_SIZE_UNIT (TREE_TYPE (pt_var));
+  else
+    return false;
+
+  *pt_var_sizep = pt_var_size;
+  return true;
+}
+
+/* Compute an object size estimate for PTR, which is a ADDR_EXPR.
+   OBJECT_SIZE_TYPE is the second argument from __builtin_dynamic_object_size.
+   If unknown, return false, setting PSIZE to NULL_TREE.  */
+
+static bool
+addr_dyn_object_size (struct object_size_info *osi, const_tree ptr,
+		      int object_size_type, tree *psize)
+{
+  tree pt_var, pt_var_size = NULL_TREE, bytes;
+
+  gcc_assert (TREE_CODE (ptr) == ADDR_EXPR);
+
+  /* Set to unknown and overwrite just before returning if the size
+     could be determined.  */
+  *psize = NULL_TREE;
+
+  pt_var = get_whole_var (ptr);
+
+  if (!pt_var)
+    return false;
+
+  if (!whole_var_size (osi, pt_var, object_size_type, &pt_var_size))
+    return false;
+
+  if (!pt_var_size)
+    return false;
+
+  /* PTR points to a subobject of whole variable PT_VAR.  */
+  if (pt_var != TREE_OPERAND (ptr, 0))
+    {
+      bytes = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
+      if (bytes != error_mark_node)
+	bytes = size_for_offset (pt_var_size, bytes);
+    }
+  else
+    bytes = pt_var_size;
+
+  *psize = bytes == error_mark_node ? NULL_TREE : bytes;
+  return true;
+}
+
+
 /* Compute __builtin_dynamic_object_size for CALL, which is a GIMPLE_CALL.
    Handles calls to functions declared with attribute alloc_size.
    OBJECT_SIZE_TYPE is the second argument from __builtin_dynamic_object_size.
@@ -299,6 +505,13 @@  compute_builtin_dyn_object_size (tree ptr, int object_size_type, tree *psize,
      could be determined.  */
   *psize = NULL_TREE;
 
+  if (TREE_CODE (ptr) == ADDR_EXPR)
+    /* We assume that the caller will gimplify the expression.  If the
+       expression depends on any SSA objects, its size expression is gimplified
+       and returned, so the expression will definitely not depend on the cached
+       objects.  */
+    return addr_dyn_object_size (NULL, ptr, object_size_type, psize);
+
   if (TREE_CODE (ptr) != SSA_NAME
       || !POINTER_TYPE_P (TREE_TYPE (ptr)))
       return false;
@@ -359,6 +572,27 @@  out:
   return *psize != NULL_TREE;
 }
 
+/* Compute object_sizes an object defined to VALUE, which is not an
+   SSA_NAME.  */
+
+static void
+expr_object_size (struct object_size_info *osi, tree value, tree *sz)
+{
+  int object_size_type = osi->object_size_type;
+  tree bytes = NULL_TREE;
+
+  if (TREE_CODE (value) == WITH_SIZE_EXPR)
+    value = TREE_OPERAND (value, 0);
+
+  if (TREE_CODE (value) == ADDR_EXPR)
+    addr_dyn_object_size (osi, value, object_size_type, &bytes);
+
+  if (bytes)
+    STRIP_NOPS (bytes);
+
+  *sz = bytes;
+}
+
 static void
 maybe_update_dependency_loop (struct object_size_info *osi, unsigned name,
 			      tree sz)
@@ -506,6 +740,12 @@  collect_object_sizes_for (struct object_size_info *osi, tree var)
 	  {
 	    if (TREE_CODE (arg) == SSA_NAME)
 	      set_object_size_ssa (osi, var, arg);
+	    else
+	      {
+		tree ret;
+		expr_object_size (osi, arg, &ret);
+		cache_object_size_copy (osi, varno, ret);
+	      }
 	  }
 	else
 	  call_object_size (osi, var, call_stmt);
@@ -517,17 +757,20 @@  collect_object_sizes_for (struct object_size_info *osi, tree var)
 	unsigned i, num_args = gimple_phi_num_args (stmt);
 	tree *sizes = new tree[num_args] ();
 
-	/* Bail out if any of the PHI arguments are non-SSA expressions or
-	   if size of an argument cannot be determined.  */
+	/* Bail out if the size of any of the PHI arguments cannot be
+	   determined.  */
 	for (i = 0; i < gimple_phi_num_args (stmt); i++)
 	  {
 	    tree rhs = gimple_phi_arg_def (stmt, i);
+	    tree sz;
 
 	    if (TREE_CODE (rhs) != SSA_NAME)
-	      break;
-
-	    collect_object_sizes_for (osi, rhs);
-	    tree sz = object_sizes[object_size_type][SSA_NAME_VERSION (rhs)];
+	      expr_object_size (osi, rhs, &sz);
+	    else
+	      {
+		collect_object_sizes_for (osi, rhs);
+		sz = object_sizes[object_size_type][SSA_NAME_VERSION (rhs)];
+	      }
 
 	    if (sz == NULL_TREE)
 	      break;