crypt: Remove alloca usage from __sha512_crypt_r

Message ID 20230918203213.2830034-1-josimmon@redhat.com
State Superseded
Headers
Series crypt: Remove alloca usage from __sha512_crypt_r |

Checks

Context Check Description
redhat-pt-bot/TryBot-apply_patch success Patch applied to master at the time it was sent
redhat-pt-bot/TryBot-32bit success Build for i686
linaro-tcwg-bot/tcwg_glibc_build--master-arm success Testing passed
linaro-tcwg-bot/tcwg_glibc_build--master-aarch64 success Testing passed
linaro-tcwg-bot/tcwg_glibc_check--master-arm success Testing passed
linaro-tcwg-bot/tcwg_glibc_check--master-aarch64 success Testing passed

Commit Message

Joe Simmons-Talbott Sept. 18, 2023, 8:32 p.m. UTC
  Replace alloca usage with scratch_buffers to avoid potential stack
overflow.
---
 crypt/sha512-crypt.c | 63 ++++++++++++++++++++++++++++----------------
 1 file changed, 40 insertions(+), 23 deletions(-)
  

Patch

diff --git a/crypt/sha512-crypt.c b/crypt/sha512-crypt.c
index d9c0de14d2..8a4cc380e3 100644
--- a/crypt/sha512-crypt.c
+++ b/crypt/sha512-crypt.c
@@ -18,6 +18,7 @@ 
 
 #include <assert.h>
 #include <errno.h>
+#include <scratch_buffer.h>
 #include <stdbool.h>
 #include <stdlib.h>
 #include <string.h>
@@ -115,9 +116,16 @@  __sha512_crypt_r (const char *key, const char *salt, char *buffer, int buflen)
   /* Default number of rounds.  */
   size_t rounds = ROUNDS_DEFAULT;
   bool rounds_custom = false;
-  size_t alloca_used = 0;
   char *free_key = NULL;
   char *free_pbytes = NULL;
+  struct scratch_buffer sbuf;
+  scratch_buffer_init (&sbuf);
+  struct scratch_buffer salt_buf;
+  scratch_buffer_init (&salt_buf);
+  struct scratch_buffer p_bytes_buf;
+  scratch_buffer_init (&p_bytes_buf);
+  struct scratch_buffer s_bytes_buf;
+  scratch_buffer_init (&s_bytes_buf);
 
   /* Find beginning of salt string.  The prefix should normally always
      be present.  Just in case it is not.  */
@@ -146,14 +154,11 @@  __sha512_crypt_r (const char *key, const char *salt, char *buffer, int buflen)
     {
       char *tmp;
 
-      if (__libc_use_alloca (alloca_used + key_len + __alignof__ (uint64_t)))
-	tmp = alloca_account (key_len + __alignof__ (uint64_t), alloca_used);
-      else
-	{
-	  free_key = tmp = (char *) malloc (key_len + __alignof__ (uint64_t));
-	  if (tmp == NULL)
-	    return NULL;
-	}
+      if (!scratch_buffer_set_array_size (
+		&sbuf, 1, key_len + __align_of (uint64_t)))
+        return NULL;
+
+      tmp = sbuf.data;
 
       key = copied_key =
 	memcpy (tmp + __alignof__ (uint64_t)
@@ -164,7 +169,13 @@  __sha512_crypt_r (const char *key, const char *salt, char *buffer, int buflen)
 
   if (((uintptr_t) salt) % __alignof__ (uint64_t) != 0)
     {
-      char *tmp = (char *) alloca (salt_len + __alignof__ (uint64_t));
+      if (!scratch_buffer_set_array_size (
+		&salt_buf, 1, salt_len + __alignof__ (uint64_t)))
+	{
+	  scratch_buffer_free (&sbuf);
+	  return NULL;
+	}
+      char *tmp = salt_buf.data;
       salt = copied_salt =
 	memcpy (tmp + __alignof__ (uint64_t)
 		- ((uintptr_t) tmp) % __alignof__ (uint64_t),
@@ -177,7 +188,8 @@  __sha512_crypt_r (const char *key, const char *salt, char *buffer, int buflen)
   NSSLOWInitContext *nss_ictx = NSSLOW_Init ();
   if (nss_ictx == NULL)
     {
-      free (free_key);
+      scratch_buffer_free (&sbuf);
+      scratch_buffer_free (&salt_buf);
       return NULL;
     }
   NSSLOWHASHContext *nss_ctx = NULL;
@@ -242,17 +254,13 @@  __sha512_crypt_r (const char *key, const char *salt, char *buffer, int buflen)
   sha512_finish_ctx (&alt_ctx, nss_alt_ctx, temp_result);
 
   /* Create byte sequence P.  */
-  if (__libc_use_alloca (alloca_used + key_len))
-    cp = p_bytes = (char *) alloca (key_len);
-  else
+  if (!scratch_buffer_set_array_size (&p_bytes_buf, 1, key_len))
     {
-      free_pbytes = cp = p_bytes = (char *)malloc (key_len);
-      if (free_pbytes == NULL)
-	{
-	  free (free_key);
-	  return NULL;
-	}
+      scratch_buffer_free (&sbuf);
+      scratch_buffer_free (&salt_buf);
+      return NULL;
     }
+  cp = p_bytes = p_bytes_buf.data;
 
   for (cnt = key_len; cnt >= 64; cnt -= 64)
     cp = mempcpy (cp, temp_result, 64);
@@ -269,7 +277,14 @@  __sha512_crypt_r (const char *key, const char *salt, char *buffer, int buflen)
   sha512_finish_ctx (&alt_ctx, nss_alt_ctx, temp_result);
 
   /* Create byte sequence S.  */
-  cp = s_bytes = alloca (salt_len);
+  if (!scratch_buffer_set_array_size (&s_bytes_buf, 1, salt_len))
+    {
+      scratch_buffer_free (&sbuf);
+      scratch_buffer_free (&salt_buf);
+      scratch_buffer_free (&p_bytes_buf);
+    }
+  cp = s_bytes = s_bytes_buf.data;
+
   for (cnt = salt_len; cnt >= 64; cnt -= 64)
     cp = mempcpy (cp, temp_result, 64);
   memcpy (cp, temp_result, cnt);
@@ -403,8 +418,10 @@  __sha512_crypt_r (const char *key, const char *salt, char *buffer, int buflen)
   if (copied_salt != NULL)
     explicit_bzero (copied_salt, salt_len);
 
-  free (free_key);
-  free (free_pbytes);
+  scratch_buffer_free (&sbuf);
+  scratch_buffer_free (&salt_buf);
+  scratch_buffer_free (&p_bytes_buf);
+  scratch_buffer_free (&s_bytes_buf);
   return buffer;
 }