Skip to content

Commit

Permalink
pooling array internal array to lower GC pressure
Browse files Browse the repository at this point in the history
  • Loading branch information
jakubmisek committed Feb 20, 2025
1 parent 5eaa3b0 commit b44ea49
Showing 1 changed file with 67 additions and 23 deletions.
90 changes: 67 additions & 23 deletions src/Peachpie.Runtime/OrderedDictionary.cs
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
using System;
using System.Buffers;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using Pchp.Core.Utilities;
Expand Down Expand Up @@ -188,6 +188,27 @@ public int CompareTo(IntStringKey other)
[DebuggerDisplay("dictionary (count = {Count})")]
public sealed class OrderedDictionary/*<TValue>*/ : IEnumerable<KeyValuePair<IntStringKey, TValue>>, IEnumerable<TValue>
{
#region BucketPool

static class BucketPool
{
public static Bucket[] RentData(uint size) => ArrayPool<Bucket>.Shared.Rent(unchecked((int)size));

public static int[] RentHash(uint size) => ArrayPool<int>.Shared.Rent(unchecked((int)size));

public static void Return(Bucket[] array) => ArrayPool<Bucket>.Shared.Return(array, clearArray: true);

public static void Return(int[] array)
{
if (array != null)
{
ArrayPool<int>.Shared.Return(array, clearArray: false);
}
}
}

#endregion

[DebuggerDisplay("{DebugDisplay,nq}")]
internal struct Bucket
{
Expand Down Expand Up @@ -302,12 +323,19 @@ private OrderedDictionary(OrderedDictionary/*<TValue>*/ from)
if (from == null) throw new ArgumentNullException(nameof(from));

_mask = from._mask;
_data = from._data.AsSpan().ToArray();
if (from._hash != null)
_hash = from._hash.AsSpan().ToArray();
_size = from._size;
_dataUsed = from._dataUsed;

_data = BucketPool.RentData(_size);
Array.Copy(from._data, 0, _data, 0, _dataUsed);

if (from._hash != null)
{
_hash = BucketPool.RentHash(_size);
Array.Copy(from._hash, 0, _hash, 0, _size);
}

_dataDeleted = from._dataDeleted;
_size = from._size;
//nInternalPointer = from.nInternalPointer;
_maxIntKey = from._maxIntKey;
}
Expand Down Expand Up @@ -376,7 +404,7 @@ private void _initialize(uint mask)
//

_mask = mask;
_data = new Bucket[size];
_data = BucketPool.RentData(size);
_hash = null; // no keys
_dataUsed = 0;
_dataDeleted = 0;
Expand All @@ -392,9 +420,13 @@ private void _resize(uint size)

//Array.Resize(ref this._data, (int)size); // slower

var newdata = new Bucket[size];
var newdata = BucketPool.RentData(size);

Array.Copy(_data, 0, newdata, 0, _dataUsed); // faster than Memory<T>.CopyTo() and Array.Resize<T>
_data = newdata;

BucketPool.Return(
Interlocked.Exchange(ref _data, newdata)
);

_mask = size - 1;
_size = size;
Expand All @@ -409,13 +441,13 @@ private void _resize(uint size)

private void _rehash()
{
var data = this._data;
var hash = this._hash;
var data = this._data.AsSpan(0, (int)_size);
var hash = this._hash.AsSpan(0, (int)_size);

Debug.Assert(hash != null, "no hash");
Debug.Assert(hash.Length != 0, "no hash");
Debug.Assert(data.Length == hash.Length, "internal array size mismatch");

hash.AsSpan().Fill(_invalidIndex); // some optimizations
hash.Fill(_invalidIndex); // some optimizations
//Array.Fill(hash, _invalidIndex); // simple for-loop

for (int i = this._dataUsed - 1; i >= 0; i--)
Expand All @@ -441,7 +473,10 @@ private void _rehash()

private void _createhash()
{
this._hash = new int[this._size];
BucketPool.Return(
Interlocked.Exchange(ref this._hash, BucketPool.RentHash(this._size))
);

_rehash();
}

Expand Down Expand Up @@ -1069,7 +1104,7 @@ public void CopyTo(TValue[] array, int arrayIndex)
}
// FastEnumerator does not have to be disposed
}

/// <summary>
/// Copy values into given array.
/// </summary>
Expand Down Expand Up @@ -1107,7 +1142,7 @@ public void Shuffle(Random/*!*/generator)

// shuffle and compact elements:

var newData = new Bucket[_size];
var newData = BucketPool.RentData(_size);
var i = 0; // where to put next element

var enumerator = GetEnumerator();
Expand All @@ -1131,7 +1166,10 @@ public void Shuffle(Random/*!*/generator)
i++;
}

_data = newData;
BucketPool.Return(
Interlocked.Exchange(ref _data, newData) // _data = newData;
);

_dataDeleted = 0;
_dataUsed = i;
//nInternalPointer = 0;
Expand All @@ -1140,7 +1178,7 @@ public void Shuffle(Random/*!*/generator)

if (this._hash == null)
{
this._hash = new int[this._size];
this._hash = BucketPool.RentHash(this._size);
}

_rehash();
Expand All @@ -1158,7 +1196,7 @@ public void Reverse()

// copy elements in reverse order and compact:

var newData = new Bucket[_size];
var newData = BucketPool.RentData(_size);
var i = Count; // where to put next element

var enumerator = GetEnumerator();
Expand All @@ -1171,7 +1209,10 @@ public void Reverse()
bucket.Value = current.Value;
}

_data = newData;
BucketPool.Return(
Interlocked.Exchange(ref _data, newData)
);

_dataUsed = Count; // before changing _dataDeleted !!
_dataDeleted = 0;
//nInternalPointer = 0;
Expand All @@ -1180,7 +1221,7 @@ public void Reverse()

if (this._hash == null)
{
this._hash = new int[this._size];
this._hash = BucketPool.RentHash(this._size);
}

_rehash();
Expand Down Expand Up @@ -1236,7 +1277,7 @@ public void Sort(IComparer<KeyValuePair<IntStringKey, TValue>>/*!*/comparer)
{
if (this._hash == null)
{
this._hash = new int[this._size];
this._hash = BucketPool.RentHash(this._size);
}

_rehash();
Expand All @@ -1251,9 +1292,12 @@ sealed class MultisortComparer : IComparer<Bucket[]>
{
readonly IComparer<KeyValuePair<IntStringKey, TValue>>[]/*!*/ comparers;

public MultisortComparer(IComparer<KeyValuePair<IntStringKey, TValue>>[]/*!*/ comparers)
readonly int length;

public MultisortComparer(IComparer<KeyValuePair<IntStringKey, TValue>>[]/*!*/ comparers, int length)
{
this.comparers = comparers;
this.length = length;
}

public int Compare(Bucket[] x, Bucket[] y)
Expand Down Expand Up @@ -1310,7 +1354,7 @@ public static void Sort(
}

// sort indices
Array.Sort(idx, comparer: new MultisortComparer(comparers));
Array.Sort(idx, comparer: new MultisortComparer(comparers, hashtables.Length));

//
for (int h = 0; h < hashtables.Length; h++)
Expand Down

0 comments on commit b44ea49

Please sign in to comment.