Skip to content

Commit 4a9aba1

Browse files
committed
fix loh
1 parent cd8c9be commit 4a9aba1

File tree

1 file changed

+5
-4
lines changed

1 file changed

+5
-4
lines changed

sdk/src/Services/S3/Custom/Transfer/Internal/ChunkedBufferStream.cs

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ namespace Amazon.S3.Transfer.Internal
4444
///
4545
/// <para><strong>Size Limits:</strong></para>
4646
/// <para>
47-
/// Maximum supported stream size is approximately 175TB (int.MaxValue * CHUNK_SIZE bytes).
47+
/// Maximum supported stream size is approximately 140TB (int.MaxValue * CHUNK_SIZE bytes).
4848
/// This limit exists because chunk indexing uses int for List indexing.
4949
/// </para>
5050
///
@@ -68,13 +68,14 @@ namespace Amazon.S3.Transfer.Internal
6868
internal class ChunkedBufferStream : Stream
6969
{
7070
/// <summary>
71-
/// Size of each buffer chunk. Set to 80KB to safely stay below the 85KB Large Object Heap threshold.
71+
/// Size of each buffer chunk. Set to 64KB to match ArrayPool bucket size and stay below the 85KB Large Object Heap threshold.
72+
/// If we chose any higher than 64KB, ArrayPool would round up to 128KB (which would go to LOH).
7273
/// </summary>
73-
private const int CHUNK_SIZE = 81920; // 80KB - safely below 85KB LOH threshold
74+
private const int CHUNK_SIZE = 65536; // 64KB - matches ArrayPool bucket, safely below 85KB LOH threshold
7475

7576
/// <summary>
7677
/// Maximum supported stream size. This limit exists because chunk indexing uses int for List indexing.
77-
/// With 80KB chunks, this allows approximately 175TB of data.
78+
/// With 64KB chunks, this allows approximately 140TB of data.
7879
/// </summary>
7980
private const long MAX_STREAM_SIZE = (long)int.MaxValue * CHUNK_SIZE;
8081

0 commit comments

Comments
 (0)