From: Simon McVittie <smcv@debian.org>
Date: Mon, 25 Aug 2025 11:11:22 +0100
Subject: tests: Gracefully skip test if a large memory allocation fails

On resource-constrained 32-bit machines, it might not be possible to
allocate 1G of buffer space. Catch this and skip the test that uses
very large buffers, instead of having it fail.

Signed-off-by: Simon McVittie <smcv@debian.org>
Forwarded: https://gitlab.gnome.org/GNOME/libsoup/-/merge_requests/478
Applied-upstream: 3.7.0, commit:ee7635695a5c9da15c2e25624ce6297366b813a2
---
 tests/http2-body-stream-test.c | 10 ++++++++--
 1 file changed, 8 insertions(+), 2 deletions(-)

diff --git a/tests/http2-body-stream-test.c b/tests/http2-body-stream-test.c
index 540beb3..cbe8407 100644
--- a/tests/http2-body-stream-test.c
+++ b/tests/http2-body-stream-test.c
@@ -29,8 +29,13 @@ do_large_data_test (void)
         GInputStream *stream = soup_body_input_stream_http2_new ();
         SoupBodyInputStreamHttp2 *mem_stream = SOUP_BODY_INPUT_STREAM_HTTP2 (stream);
         gsize data_needed = TEST_SIZE;
-        guint8 *memory_chunk = g_new (guint8, CHUNK_SIZE); 
-        guint8 *trash_buffer = g_new (guint8, CHUNK_SIZE);
+        guint8 *memory_chunk = g_try_new (guint8, CHUNK_SIZE);
+        guint8 *trash_buffer = g_try_new (guint8, CHUNK_SIZE);
+
+	if (memory_chunk == NULL || trash_buffer == NULL) {
+		g_test_skip ("large memory allocation failed");
+		goto out;
+	}
 
         /* We can add unlimited data and as long as its read the data will
          * be freed, so this should work fine even though its reading GB of data */
@@ -55,6 +60,7 @@ do_large_data_test (void)
                 data_needed -= CHUNK_SIZE;
         }
 
+out:
         g_free (trash_buffer);
         g_free (memory_chunk);
         g_object_unref (stream);
