diff --git a/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.PERF_INFO.cs b/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.PERF_INFO.cs index 97d6de64b831f..a5436fbe0f22c 100644 --- a/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.PERF_INFO.cs +++ b/src/libraries/Common/src/Interop/Windows/Advapi32/Interop.PERF_INFO.cs @@ -2,6 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; +using System.Diagnostics.CodeAnalysis; using System.Globalization; using System.Runtime.InteropServices; @@ -13,6 +14,17 @@ internal static partial class Advapi32 internal struct PERF_COUNTER_BLOCK { internal int ByteLength; + + internal static readonly int SizeOf = Marshal.SizeOf(); + + public readonly void Validate(int bufferSize) + { + if (ByteLength < SizeOf || + ByteLength > bufferSize) + { + ThrowInvalidOperationException(typeof(PERF_COUNTER_BLOCK)); + } + } } [StructLayout(LayoutKind.Sequential)] @@ -28,6 +40,20 @@ internal struct PERF_COUNTER_DEFINITION internal int CounterType; internal int CounterSize; internal int CounterOffset; + + internal static readonly int SizeOf = Marshal.SizeOf(); + + public readonly void Validate(int bufferSize) + { + if (ByteLength < SizeOf || + ByteLength > bufferSize || + CounterSize < 0 || + CounterOffset < 0 || + CounterOffset > bufferSize) + { + ThrowInvalidOperationException(typeof(PERF_COUNTER_DEFINITION)); + } + } } [StructLayout(LayoutKind.Sequential)] @@ -49,6 +75,23 @@ internal struct PERF_DATA_BLOCK internal long PerfTime100nSec; internal int SystemNameLength; internal int SystemNameOffset; + + internal const int Signature1Int = (int)'P' + ('E' << 16); + internal const int Signature2Int = (int)'R' + ('F' << 16); + internal static readonly int SizeOf = Marshal.SizeOf(); + + public readonly void Validate(int bufferSize) + { + if (Signature1 != Signature1Int || + Signature2 != Signature2Int || + TotalByteLength < SizeOf || + TotalByteLength > bufferSize || + HeaderLength < SizeOf || + HeaderLength > TotalByteLength) + { + ThrowInvalidOperationException(typeof(PERF_DATA_BLOCK)); + } + } } [StructLayout(LayoutKind.Sequential)] @@ -61,9 +104,23 @@ internal struct PERF_INSTANCE_DEFINITION internal int NameOffset; internal int NameLength; + internal static readonly int SizeOf = Marshal.SizeOf(); + internal static ReadOnlySpan GetName(in PERF_INSTANCE_DEFINITION instance, ReadOnlySpan data) => (instance.NameLength == 0) ? default : MemoryMarshal.Cast(data.Slice(instance.NameOffset, instance.NameLength - sizeof(char))); // NameLength includes the null-terminator + + public readonly void Validate(int bufferSize) + { + if (ByteLength < SizeOf || + ByteLength > bufferSize || + NameOffset < 0 || + NameLength < 0 || + checked (NameOffset + NameLength) > ByteLength) + { + ThrowInvalidOperationException(typeof(PERF_INSTANCE_DEFINITION)); + } + } } [StructLayout(LayoutKind.Sequential)] @@ -83,6 +140,29 @@ internal struct PERF_OBJECT_TYPE internal int CodePage; internal long PerfTime; internal long PerfFreq; + + internal static readonly int SizeOf = Marshal.SizeOf(); + + public readonly void Validate(int bufferSize) + { + if (HeaderLength < SizeOf || + HeaderLength > TotalByteLength || + HeaderLength > DefinitionLength || + DefinitionLength < SizeOf || + DefinitionLength > TotalByteLength || + TotalByteLength > bufferSize || + NumCounters < 0 || + checked + ( + // This is a simple check, not exact, since it depends on how instances are specified. + (NumInstances <= 0 ? 0 : NumInstances * PERF_INSTANCE_DEFINITION.SizeOf) + + NumCounters * PERF_COUNTER_DEFINITION.SizeOf + ) > bufferSize + ) + { + ThrowInvalidOperationException(typeof(PERF_OBJECT_TYPE)); + } + } } [StructLayout(LayoutKind.Sequential)] @@ -106,4 +186,8 @@ public override string ToString() } } } + + [DoesNotReturn] + public static void ThrowInvalidOperationException(Type type) => + throw new InvalidOperationException(SR.Format(SR.InvalidPerfData, type.Name)); } diff --git a/src/libraries/Common/src/System/HashCodeRandomization.cs b/src/libraries/Common/src/System/HashCodeRandomization.cs index 8a7273e51eaeb..dd0027caa4b83 100644 --- a/src/libraries/Common/src/System/HashCodeRandomization.cs +++ b/src/libraries/Common/src/System/HashCodeRandomization.cs @@ -1,6 +1,9 @@ // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. +using System.Runtime.InteropServices; +using System.Security.Cryptography; + namespace System { // Contains helpers for calculating randomized hash codes of common types. @@ -21,32 +24,11 @@ public static int GetRandomizedOrdinalHashCode(this string value) return value.GetHashCode(); #else - // Downlevel, we need to perform randomization ourselves. There's still - // the potential for limited collisions ("Hello!" and "Hello!\0"), but - // this shouldn't be a problem in practice. If we need to address it, - // we can mix the string length into the accumulator before running the - // string contents through. - // - // We'll pull out pairs of chars and write 32 bits at a time. + // Downlevel, we need to perform randomization ourselves. - HashCode hashCode = default; - int pair = 0; - for (int i = 0; i < value.Length; i++) - { - int ch = value[i]; - if ((i & 1) == 0) - { - pair = ch << 16; // first member of pair - } - else - { - pair |= ch; // second member of pair - hashCode.Add(pair); // write pair as single unit - pair = 0; - } - } - hashCode.Add(pair); // flush any leftover data (could be 0 or 1 chars) - return hashCode.ToHashCode(); + ReadOnlySpan charSpan = value.AsSpan(); + ReadOnlySpan byteSpan = MemoryMarshal.AsBytes(charSpan); + return Marvin.ComputeHash32(byteSpan, Marvin.DefaultSeed); #endif } diff --git a/src/libraries/Microsoft.Extensions.Caching.Memory/src/MemoryCache.cs b/src/libraries/Microsoft.Extensions.Caching.Memory/src/MemoryCache.cs index b4892d7137adc..f6ceaa43934f7 100644 --- a/src/libraries/Microsoft.Extensions.Caching.Memory/src/MemoryCache.cs +++ b/src/libraries/Microsoft.Extensions.Caching.Memory/src/MemoryCache.cs @@ -2,11 +2,13 @@ // The .NET Foundation licenses this file to you under the MIT license. using System; +using System.Collections; using System.Collections.Concurrent; using System.Collections.Generic; using System.Diagnostics; using System.Diagnostics.CodeAnalysis; using System.Runtime.CompilerServices; +using System.Runtime.InteropServices; using System.Threading; using System.Threading.Tasks; using Microsoft.Extensions.Logging; @@ -81,15 +83,7 @@ public MemoryCache(IOptions optionsAccessor, ILoggerFactory /// Gets an enumerable of the all the keys in the . /// public IEnumerable Keys - { - get - { - foreach (KeyValuePair pairs in _coherentState._entries) - { - yield return pairs.Key; - } - } - } + => _coherentState.GetAllKeys(); /// /// Internal accessor for Size for testing only. @@ -141,7 +135,7 @@ internal void SetEntry(CacheEntry entry) entry.LastAccessed = utcNow; CoherentState coherentState = _coherentState; // Clear() can update the reference in the meantime - if (coherentState._entries.TryGetValue(entry.Key, out CacheEntry? priorEntry)) + if (coherentState.TryGetValue(entry.Key, out CacheEntry? priorEntry)) { priorEntry.SetExpired(EvictionReason.Replaced); } @@ -160,19 +154,19 @@ internal void SetEntry(CacheEntry entry) if (priorEntry == null) { // Try to add the new entry if no previous entries exist. - entryAdded = coherentState._entries.TryAdd(entry.Key, entry); + entryAdded = coherentState.TryAdd(entry.Key, entry); } else { // Try to update with the new entry if a previous entries exist. - entryAdded = coherentState._entries.TryUpdate(entry.Key, entry, priorEntry); + entryAdded = coherentState.TryUpdate(entry.Key, entry, priorEntry); if (!entryAdded) { // The update will fail if the previous entry was removed after retrieval. // Adding the new entry will succeed only if no entry has been added since. // This guarantees removing an old entry does not prevent adding a new entry. - entryAdded = coherentState._entries.TryAdd(entry.Key, entry); + entryAdded = coherentState.TryAdd(entry.Key, entry); } } @@ -217,7 +211,7 @@ public bool TryGetValue(object key, out object? result) DateTime utcNow = UtcNow; CoherentState coherentState = _coherentState; // Clear() can update the reference in the meantime - if (coherentState._entries.TryGetValue(key, out CacheEntry? tmp)) + if (coherentState.TryGetValue(key, out CacheEntry? tmp)) { CacheEntry entry = tmp; // Check if expired due to expiration tokens, timers, etc. and if so, remove it. @@ -276,7 +270,8 @@ public void Remove(object key) CheckDisposed(); CoherentState coherentState = _coherentState; // Clear() can update the reference in the meantime - if (coherentState._entries.TryRemove(key, out CacheEntry? entry)) + + if (coherentState.TryRemove(key, out CacheEntry? entry)) { if (_options.HasSizeLimit) { @@ -298,10 +293,10 @@ public void Clear() CheckDisposed(); CoherentState oldState = Interlocked.Exchange(ref _coherentState, new CoherentState()); - foreach (KeyValuePair entry in oldState._entries) + foreach (CacheEntry entry in oldState.GetAllValues()) { - entry.Value.SetExpired(EvictionReason.Removed); - entry.Value.InvokeEvictionCallbacks(); + entry.SetExpired(EvictionReason.Removed); + entry.InvokeEvictionCallbacks(); } } @@ -422,10 +417,9 @@ private void ScanForExpiredItems() DateTime utcNow = _lastExpirationScan = UtcNow; CoherentState coherentState = _coherentState; // Clear() can update the reference in the meantime - foreach (KeyValuePair item in coherentState._entries) - { - CacheEntry entry = item.Value; + foreach (CacheEntry entry in coherentState.GetAllValues()) + { if (entry.CheckExpired(utcNow)) { coherentState.RemoveEntry(entry, _options); @@ -547,9 +541,8 @@ private void Compact(long removalSizeTarget, Func computeEntry // Sort items by expired & priority status DateTime utcNow = UtcNow; - foreach (KeyValuePair item in coherentState._entries) + foreach (CacheEntry entry in coherentState.GetAllValues()) { - CacheEntry entry = item.Value; if (entry.CheckExpired(utcNow)) { entriesToRemove.Add(entry); @@ -676,18 +669,71 @@ private static void ValidateCacheKey(object key) /// private sealed class CoherentState { - internal ConcurrentDictionary _entries = new ConcurrentDictionary(); + private readonly ConcurrentDictionary _stringEntries = new ConcurrentDictionary(StringKeyComparer.Instance); + private readonly ConcurrentDictionary _nonStringEntries = new ConcurrentDictionary(); internal long _cacheSize; - private ICollection> EntriesCollection => _entries; + internal bool TryGetValue(object key, [NotNullWhen(true)] out CacheEntry? entry) + => key is string s ? _stringEntries.TryGetValue(s, out entry) : _nonStringEntries.TryGetValue(key, out entry); + + internal bool TryRemove(object key, [NotNullWhen(true)] out CacheEntry? entry) + => key is string s ? _stringEntries.TryRemove(s, out entry) : _nonStringEntries.TryRemove(key, out entry); + + internal bool TryAdd(object key, CacheEntry entry) + => key is string s ? _stringEntries.TryAdd(s, entry) : _nonStringEntries.TryAdd(key, entry); + + internal bool TryUpdate(object key, CacheEntry entry, CacheEntry comparison) + => key is string s ? _stringEntries.TryUpdate(s, entry, comparison) : _nonStringEntries.TryUpdate(key, entry, comparison); + + public IEnumerable GetAllValues() + { + // note this mimics the outgoing code in that we don't just access + // .Values, which has additional overheads; this is only used for rare + // calls - compaction, clear, etc - so the additional overhead of a + // generated enumerator is not alarming + foreach (KeyValuePair entry in _stringEntries) + { + yield return entry.Value; + } + foreach (KeyValuePair entry in _nonStringEntries) + { + yield return entry.Value; + } + } + + public IEnumerable GetAllKeys() + { + foreach (KeyValuePair pairs in _stringEntries) + { + yield return pairs.Key; + } + foreach (KeyValuePair pairs in _nonStringEntries) + { + yield return pairs.Key; + } + } + + private ICollection> StringEntriesCollection => _stringEntries; + private ICollection> NonStringEntriesCollection => _nonStringEntries; - internal int Count => _entries.Count; + internal int Count => _stringEntries.Count + _nonStringEntries.Count; internal long Size => Volatile.Read(ref _cacheSize); internal void RemoveEntry(CacheEntry entry, MemoryCacheOptions options) { - if (EntriesCollection.Remove(new KeyValuePair(entry.Key, entry))) + if (entry.Key is string s) + { + if (StringEntriesCollection.Remove(new KeyValuePair(s, entry))) + { + if (options.SizeLimit.HasValue) + { + Interlocked.Add(ref _cacheSize, -entry.Size); + } + entry.InvokeEvictionCallbacks(); + } + } + else if (NonStringEntriesCollection.Remove(new KeyValuePair(entry.Key, entry))) { if (options.SizeLimit.HasValue) { @@ -696,6 +742,35 @@ internal void RemoveEntry(CacheEntry entry, MemoryCacheOptions options) entry.InvokeEvictionCallbacks(); } } + +#if NETCOREAPP + // on .NET Core, the inbuilt comparer has Marvin built in; no need to intercept + private static class StringKeyComparer + { + internal static IEqualityComparer Instance => EqualityComparer.Default; + } +#else + // otherwise, we need a custom comparer that manually implements Marvin + private sealed class StringKeyComparer : IEqualityComparer, IEqualityComparer + { + private StringKeyComparer() { } + + internal static readonly IEqualityComparer Instance = new StringKeyComparer(); + + // special-case string keys and use Marvin hashing + public int GetHashCode(string? s) => s is null ? 0 + : Marvin.ComputeHash32(MemoryMarshal.AsBytes(s.AsSpan()), Marvin.DefaultSeed); + + public bool Equals(string? x, string? y) + => string.Equals(x, y); + + bool IEqualityComparer.Equals(object x, object y) + => object.Equals(x, y); + + int IEqualityComparer.GetHashCode(object obj) + => obj is string s ? GetHashCode(s) : 0; + } +#endif } } } diff --git a/src/libraries/Microsoft.Extensions.Caching.Memory/src/Microsoft.Extensions.Caching.Memory.csproj b/src/libraries/Microsoft.Extensions.Caching.Memory/src/Microsoft.Extensions.Caching.Memory.csproj index 469e6e572536e..a69e95d7bf202 100644 --- a/src/libraries/Microsoft.Extensions.Caching.Memory/src/Microsoft.Extensions.Caching.Memory.csproj +++ b/src/libraries/Microsoft.Extensions.Caching.Memory/src/Microsoft.Extensions.Caching.Memory.csproj @@ -5,6 +5,7 @@ true true In-memory cache implementation of Microsoft.Extensions.Caching.Memory.IMemoryCache. + true @@ -21,4 +22,8 @@ + + + + diff --git a/src/libraries/Microsoft.Extensions.Caching.Memory/tests/MemoryCacheSetAndRemoveTests.cs b/src/libraries/Microsoft.Extensions.Caching.Memory/tests/MemoryCacheSetAndRemoveTests.cs index cebc0a374b310..57c3f56913b60 100644 --- a/src/libraries/Microsoft.Extensions.Caching.Memory/tests/MemoryCacheSetAndRemoveTests.cs +++ b/src/libraries/Microsoft.Extensions.Caching.Memory/tests/MemoryCacheSetAndRemoveTests.cs @@ -850,6 +850,21 @@ public async Task GetOrCreateAsyncWithCacheEntryOptions() Assert.False(cache.TryGetValue(cacheKey, out _)); } + [Fact] + public void MixedKeysUsage() + { + // keys are split internally into 2 separate chunks + var cache = CreateCache(); + var typed = Assert.IsType(cache); + object key0 = 123.45M, key1 = "123.45"; + cache.Set(key0, "string value"); + cache.Set(key1, "decimal value"); + + Assert.Equal(2, typed.Count); + Assert.Equal("string value", cache.Get(key0)); + Assert.Equal("decimal value", cache.Get(key1)); + } + private class TestKey { public override bool Equals(object obj) => true; diff --git a/src/libraries/System.Diagnostics.PerformanceCounter/src/Properties/InternalsVisibleTo.cs b/src/libraries/System.Diagnostics.PerformanceCounter/src/Properties/InternalsVisibleTo.cs new file mode 100644 index 0000000000000..ca32a806401c7 --- /dev/null +++ b/src/libraries/System.Diagnostics.PerformanceCounter/src/Properties/InternalsVisibleTo.cs @@ -0,0 +1,6 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("System.Diagnostics.PerformanceCounter.Tests, PublicKey=00240000048000009400000006020000002400005253413100040000010001004b86c4cb78549b34bab61a3b1800e23bfeb5b3ec390074041536a7e3cbd97f5f04cf0f857155a8928eaa29ebfd11cfbbad3ba70efea7bda3226c6a8d370a4cd303f714486b6ebc225985a638471e6ef571cc92a4613c00b8fa65d61ccee0cbe5f36330c9a01f4183559f1bef24cc2917c6d913e3a541333a1d05d9bed22b38cb")] diff --git a/src/libraries/System.Diagnostics.PerformanceCounter/src/Resources/Strings.resx b/src/libraries/System.Diagnostics.PerformanceCounter/src/Resources/Strings.resx index 829c04125b373..187cc802b358e 100644 --- a/src/libraries/System.Diagnostics.PerformanceCounter/src/Resources/Strings.resx +++ b/src/libraries/System.Diagnostics.PerformanceCounter/src/Resources/Strings.resx @@ -1,4 +1,5 @@ - + + + \ No newline at end of file diff --git a/src/libraries/System.Diagnostics.PerformanceCounter/tests/ValidationTests.cs b/src/libraries/System.Diagnostics.PerformanceCounter/tests/ValidationTests.cs new file mode 100644 index 0000000000000..6bc3a9614c7e7 --- /dev/null +++ b/src/libraries/System.Diagnostics.PerformanceCounter/tests/ValidationTests.cs @@ -0,0 +1,428 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System.Collections; +using System.Collections.Generic; +using System.Reflection; +using System.Runtime.InteropServices; +using Xunit; + +using static Interop.Advapi32; + +namespace System.Diagnostics.Tests +{ + public static class ValidationTests + { + [Theory] + [MemberData(nameof(InvalidDataBlocksToTest))] + public static void ValidateDataBlock(byte[] data) + { + using (PerformanceCounter counter = GetPerformanceCounterLib(out PerformanceCounterLib lib)) + { + InvalidOperationException ex = Assert.Throws(() => GetCategorySample(lib, data)); + Assert.Contains(nameof(PERF_DATA_BLOCK), ex.Message); + } + } + + public static IEnumerable InvalidDataBlocksToTest() + { + int validSize = PERF_DATA_BLOCK.SizeOf; + + yield return Create(0, validSize); + yield return Create(1, validSize); + yield return Create(-1, validSize); + yield return Create(validSize, 0); + yield return Create(validSize, 1); + yield return Create(validSize, -1); + yield return Create(validSize, validSize + 1); + yield return Create(validSize - 1, validSize); + + static object[] Create(int totalByteLength, int headerLength) + { + PERF_DATA_BLOCK perfDataBlock = new() + { + TotalByteLength = totalByteLength, + HeaderLength = headerLength, + Signature1 = PERF_DATA_BLOCK.Signature1Int, + Signature2 = PERF_DATA_BLOCK.Signature2Int + }; + + return new object[] { StructToByteArray(perfDataBlock) }; + } + } + + [Theory] + [MemberData(nameof(InvalidObjectTypesToTest))] + public static void ValidateObjectType(byte[] data) + { + using (PerformanceCounter counter = GetPerformanceCounterLib(out PerformanceCounterLib lib)) + { + InvalidOperationException ex = Assert.Throws(() => GetCategorySample(lib, data)); + Assert.Contains(nameof(PERF_OBJECT_TYPE), ex.Message); + } + } + + public static IEnumerable InvalidObjectTypesToTest() + { + VerifyInitialized(); + + int validSize = PERF_OBJECT_TYPE.SizeOf; + yield return new object[] { Create(0, validSize, validSize) }; + yield return new object[] { Create(1, validSize, validSize) }; + yield return new object[] { Create(-1, validSize, validSize) }; + yield return new object[] { Create(validSize, 0, validSize) }; + yield return new object[] { Create(validSize, 1, validSize) }; + yield return new object[] { Create(validSize, -1, validSize) }; + yield return new object[] { Create(validSize, validSize, 0) }; + yield return new object[] { Create(validSize, validSize, 1) }; + yield return new object[] { Create(validSize, validSize, -1) }; + yield return new object[] { Create(validSize - 1, validSize, validSize) }; + yield return new object[] { Create(validSize, validSize - 1, validSize) }; + yield return new object[] { Create(validSize, validSize + 1, validSize) }; + yield return new object[] { Create(validSize, validSize, validSize - 1) }; + yield return new object[] { Create(validSize, validSize, validSize + 1) }; + + static byte[] Create(int totalByteLength, int headerLength, int definitionLength) + { + PERF_DATA_BLOCK perfDataBlock = CreatePerfDataBlock(); + perfDataBlock.TotalByteLength = PERF_DATA_BLOCK.SizeOf + PERF_OBJECT_TYPE.SizeOf; + + PERF_OBJECT_TYPE perfObjectType = new() + { + TotalByteLength = totalByteLength, + HeaderLength = headerLength, + DefinitionLength = definitionLength, + ObjectNameTitleIndex = s_ObjectNameTitleIndex + }; + + return StructsToByteArray(perfDataBlock, perfObjectType); + } + } + + [Theory] + [MemberData(nameof(ObjectTypeWithHighCountsToTest))] + public static void ValidateObjectTypeWithHighCounts(byte[] data) + { + using (PerformanceCounter counter = GetPerformanceCounterLib(out PerformanceCounterLib lib)) + { + Exception ex = Assert.ThrowsAny(() => GetCategorySample(lib, data)); + Assert.True(ex is InvalidOperationException || ex is OverflowException, $"Type:{ex.GetType().Name}."); + } + } + + public static IEnumerable ObjectTypeWithHighCountsToTest() + { + VerifyInitialized(); + + yield return new object[] { Create(Array.MaxLength, 1) }; + yield return new object[] { Create(1, -1) }; // numInstances with -1 is supported, but numCounters is not. + yield return new object[] { Create(1, Array.MaxLength) }; + yield return new object[] { Create(Array.MaxLength / 1000, 1) }; + yield return new object[] { Create(1, Array.MaxLength / 1000) }; + yield return new object[] { Create(Array.MaxLength, Array.MaxLength) }; + + static byte[] Create(int numInstances, int numCounters) + { + PERF_DATA_BLOCK perfDataBlock = CreatePerfDataBlock(); + PERF_OBJECT_TYPE perfObjectType = CreatePerfObjectType(numInstances, numCounters); + + // Add a single instance definition. + PERF_COUNTER_DEFINITION perfCounterDefinition = CreatePerfCounterDefinition(); + + return StructsToByteArray(perfDataBlock, perfObjectType, perfCounterDefinition); + } + } + + [Theory] + [MemberData(nameof(InvalidCounterDefinitionsToTest))] + public static void ValidateCounterDefinition(byte[] data) + { + using (PerformanceCounter counter = GetPerformanceCounterLib(out PerformanceCounterLib lib)) + { + InvalidOperationException ex = Assert.Throws(() => GetCategorySample(lib, data)); + Assert.Contains(nameof(PERF_COUNTER_DEFINITION), ex.Message); + } + } + + public static IEnumerable InvalidCounterDefinitionsToTest() + { + VerifyInitialized(); + + int validSize = PERF_INSTANCE_DEFINITION.SizeOf; + + yield return new object[] { Create(0, 4) }; + yield return new object[] { Create(1, 4) }; + yield return new object[] { Create(-1, 4) }; + yield return new object[] { Create(validSize, -1) }; + yield return new object[] { Create(validSize - 1, 4) }; + yield return new object[] { Create(validSize, 1000) }; + + static byte[] Create(int byteLength, int counterOffset) + { + PERF_DATA_BLOCK perfDataBlock = CreatePerfDataBlock(); + PERF_OBJECT_TYPE perfObjectType = CreatePerfObjectType(numInstances: 0, numCounters: 1); + PERF_COUNTER_DEFINITION perfCounterDefinition = new() + { + ByteLength = byteLength, + CounterOffset = counterOffset, + CounterSize = 4, + CounterNameTitleIndex = s_ObjectNameTitleIndex + }; + + return StructsToByteArray(perfDataBlock, perfObjectType, perfCounterDefinition); + } + } + + [Theory] + [MemberData(nameof(InvalidInstancesToTest))] + public static void ValidateInstanceDefinition(byte[] data) + { + using (PerformanceCounter counter = GetPerformanceCounterLib(out PerformanceCounterLib lib)) + { + InvalidOperationException ex = Assert.Throws(() => GetCategorySample(lib, data)); + Assert.Contains(nameof(PERF_INSTANCE_DEFINITION), ex.Message); + } + } + + public static IEnumerable InvalidInstancesToTest() + { + VerifyInitialized(); + + int validSize = PERF_INSTANCE_DEFINITION.SizeOf; + + yield return new object[] { Create(0, 0, 0) }; + yield return new object[] { Create(1, 0, 0) }; + yield return new object[] { Create(-1, 0, 0) }; + yield return new object[] { Create(validSize, -1, 0) }; + yield return new object[] { Create(validSize, 0, -1) }; + yield return new object[] { Create(validSize, 1000, 0) }; + yield return new object[] { Create(validSize, 0, 1000) }; + + static byte[] Create(int byteLength, int nameOffset, int nameLength) + { + PERF_DATA_BLOCK perfDataBlock = CreatePerfDataBlock(); + PERF_OBJECT_TYPE perfObjectType = CreatePerfObjectType(numInstances: 1, numCounters: 1); + PERF_COUNTER_DEFINITION perfCounterDefinition = CreatePerfCounterDefinition(); + PERF_INSTANCE_DEFINITION perfInstanceDefinition = new() + { + ByteLength = byteLength, + NameOffset = nameOffset, + NameLength = nameLength, + }; + + return StructsToByteArray(perfDataBlock, perfObjectType, perfCounterDefinition, perfInstanceDefinition); + } + } + + [Theory] + [MemberData(nameof(InvalidCounterBlocksToTest))] + public static void ValidateCounterBlock(byte[] data) + { + using (PerformanceCounter counter = GetPerformanceCounterLib(out PerformanceCounterLib lib)) + { + InvalidOperationException ex = Assert.Throws(() => GetCategorySample(lib, data)); + Assert.Contains(nameof(PERF_COUNTER_BLOCK), ex.Message); + } + } + + public static IEnumerable InvalidCounterBlocksToTest() + { + VerifyInitialized(); + + yield return new object[] { Create(-1) }; + yield return new object[] { Create(0) }; + yield return new object[] { Create(1) }; + + static byte[] Create(int byteLength) + { + PERF_DATA_BLOCK perfDataBlock = CreatePerfDataBlock(); + PERF_OBJECT_TYPE perfObjectType = CreatePerfObjectType(numInstances: 1, numCounters: 1); + PERF_COUNTER_DEFINITION perfCounterDefinition = CreatePerfCounterDefinition(); + PERF_INSTANCE_DEFINITION perfInstanceDefinition = CreatePerfInstanceDefinition(); + PERF_COUNTER_BLOCK perfCounterBlock = new() + { + ByteLength = byteLength + }; + + int value = 0; + + return StructsToByteArray(perfDataBlock, perfObjectType, perfCounterDefinition, perfInstanceDefinition, perfCounterBlock, value); + } + } + + private static byte[] StructToByteArray(T value) where T : struct + { + int size = Marshal.SizeOf(value); + byte[] arr = new byte[size]; + CopyStruct(value, arr, 0, size); + return arr; + } + + private static byte[] StructsToByteArray(T1 value1, T2 value2) + where T1 : struct where T2 : struct + { + int size1 = Marshal.SizeOf(value1); + int size2 = Marshal.SizeOf(value2); + byte[] arr = new byte[size1 + size2]; + CopyStruct(value1, arr, 0, size1); + CopyStruct(value2, arr, size1, size2); + return arr; + } + + private static byte[] StructsToByteArray(T1 value1, T2 value2, T3 value3) + where T1 : struct where T2 : struct where T3 : struct + { + int size1 = Marshal.SizeOf(value1); + int size2 = Marshal.SizeOf(value2); + int size3 = Marshal.SizeOf(value3); + byte[] arr = new byte[size1 + size2 + size3]; + CopyStruct(value1, arr, 0, size1); + CopyStruct(value2, arr, size1, size2); + CopyStruct(value3, arr, size1 + size2, size3); + return arr; + } + + private static byte[] StructsToByteArray(T1 value1, T2 value2, T3 value3, T4 value4) + where T1 : struct where T2 : struct where T3 : struct where T4 : struct + { + int size1 = Marshal.SizeOf(value1); + int size2 = Marshal.SizeOf(value2); + int size3 = Marshal.SizeOf(value3); + int size4 = Marshal.SizeOf(value4); + byte[] arr = new byte[size1 + size2 + size3 + size4]; + CopyStruct(value1, arr, 0, size1); + CopyStruct(value2, arr, size1, size2); + CopyStruct(value3, arr, size1 + size2, size3); + CopyStruct(value4, arr, size1 + size2 + size3, size4); + return arr; + } + + private static byte[] StructsToByteArray(T1 value1, T2 value2, T3 value3, T4 value4, T5 value5, T6 value6) + where T1 : struct where T2 : struct where T3 : struct where T4 : struct where T5 : struct where T6 : struct + { + int size1 = Marshal.SizeOf(value1); + int size2 = Marshal.SizeOf(value2); + int size3 = Marshal.SizeOf(value3); + int size4 = Marshal.SizeOf(value4); + int size5 = Marshal.SizeOf(value5); + int size6 = Marshal.SizeOf(value5); + byte[] arr = new byte[size1 + size2 + size3 + size4 + size5 + size6]; + CopyStruct(value1, arr, 0, size1); + CopyStruct(value2, arr, size1, size2); + CopyStruct(value3, arr, size1 + size2, size3); + CopyStruct(value4, arr, size1 + size2 + size3, size4); + CopyStruct(value5, arr, size1 + size2 + size3 + size4, size5); + CopyStruct(value6, arr, size1 + size2 + size3 + size4 + size5, size6); + return arr; + } + + private static void CopyStruct(T value, byte[] data, int startIndex, int length) where T : struct + { + int size = Marshal.SizeOf(value); + IntPtr ptr = Marshal.AllocHGlobal(size); + + try + { + Marshal.StructureToPtr(value, ptr, true); + Marshal.Copy(ptr, data, startIndex, length); + } + finally + { + Marshal.FreeHGlobal(ptr); + } + } + + private static PerformanceCounter GetPerformanceCounterLib(out PerformanceCounterLib lib) + { + PerformanceCounter counterSample = Helpers.RetryOnAllPlatformsWithClosingResources(() => + new PerformanceCounter("Processor", "Interrupts/sec", "0", ".")); + + counterSample.BeginInit(); + Assert.NotNull(counterSample); + + FieldInfo fi = typeof(PerformanceCounterLib).GetField("s_libraryTable", BindingFlags.Static | BindingFlags.NonPublic); + Hashtable libs = (Hashtable)fi.GetValue(null); + CategoryEntry category = default; + + bool found = false; + lib = default; + foreach (string key in libs.Keys) + { + lib = (PerformanceCounterLib)libs[key]; + Assert.NotNull(lib); + + category = (CategoryEntry)lib.CategoryTable["Processor"]; + if (category != null) + { + found = true; + break; + } + } + + Assert.True(found); + + s_ObjectNameTitleIndex = category.NameIndex; + + return counterSample; + } + + private static CategorySample GetCategorySample(PerformanceCounterLib lib, byte[] data) + { + CategoryEntry entry = (CategoryEntry)lib.CategoryTable["Processor"]; + return new CategorySample(data, entry, lib); + } + + private static int s_ObjectNameTitleIndex { get; set; } = -1; + + private static void VerifyInitialized() + { + if (s_ObjectNameTitleIndex == -1) + { + using (PerformanceCounter counter = GetPerformanceCounterLib(out PerformanceCounterLib _)) { } + } + + Assert.True(s_ObjectNameTitleIndex != -1); + } + + private static PERF_DATA_BLOCK CreatePerfDataBlock() => + new PERF_DATA_BLOCK + { + TotalByteLength = PERF_DATA_BLOCK.SizeOf + PERF_OBJECT_TYPE.SizeOf + PERF_COUNTER_DEFINITION.SizeOf, + HeaderLength = PERF_DATA_BLOCK.SizeOf, + Signature1 = PERF_DATA_BLOCK.Signature1Int, + Signature2 = PERF_DATA_BLOCK.Signature2Int, + NumObjectTypes = 1 + }; + + private static PERF_OBJECT_TYPE CreatePerfObjectType(int numInstances, int numCounters) => + new PERF_OBJECT_TYPE + { + TotalByteLength = PERF_OBJECT_TYPE.SizeOf + PERF_COUNTER_DEFINITION.SizeOf, + HeaderLength = PERF_OBJECT_TYPE.SizeOf, + DefinitionLength = PERF_OBJECT_TYPE.SizeOf + PERF_COUNTER_DEFINITION.SizeOf, + ObjectNameTitleIndex = s_ObjectNameTitleIndex, + NumCounters = numCounters, + NumInstances = numInstances + }; + + private static PERF_COUNTER_DEFINITION CreatePerfCounterDefinition() => + new PERF_COUNTER_DEFINITION + { + ByteLength = PERF_COUNTER_DEFINITION.SizeOf, + CounterOffset = 4, + CounterSize = 4, + CounterNameTitleIndex = s_ObjectNameTitleIndex + }; + + private static PERF_INSTANCE_DEFINITION CreatePerfInstanceDefinition() => + new() + { + ByteLength = PERF_INSTANCE_DEFINITION.SizeOf, + NameOffset = 0, + NameLength = 0, + + // Setting this calls GetInstanceNamesFromIndex() which will validate the real (valid) definition. + ParentObjectTitleIndex = s_ObjectNameTitleIndex + }; + } +} diff --git a/src/libraries/System.Diagnostics.Process/src/Resources/Strings.resx b/src/libraries/System.Diagnostics.Process/src/Resources/Strings.resx index d6ac4b8688cca..6a2f970d6aec8 100644 --- a/src/libraries/System.Diagnostics.Process/src/Resources/Strings.resx +++ b/src/libraries/System.Diagnostics.Process/src/Resources/Strings.resx @@ -1,16 +1,17 @@ - - @@ -332,4 +333,7 @@ sysctl {0} failed with {1} error. - + + Invalid performance counter data with type '{0}'. + + \ No newline at end of file diff --git a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessManager.Windows.cs b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessManager.Windows.cs index 5762874bbef77..86399634e4da7 100644 --- a/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessManager.Windows.cs +++ b/src/libraries/System.Diagnostics.Process/src/System/Diagnostics/ProcessManager.Windows.cs @@ -476,18 +476,25 @@ private static ProcessInfo[] GetProcessInfos(PerformanceCounterLib library, int List threadInfos = new List(); ref readonly PERF_DATA_BLOCK dataBlock = ref MemoryMarshal.AsRef(data); + dataBlock.Validate(data.Length); int typePos = dataBlock.HeaderLength; + ReadOnlySpan dataSpan; + for (int i = 0; i < dataBlock.NumObjectTypes; i++) { - ref readonly PERF_OBJECT_TYPE type = ref MemoryMarshal.AsRef(data.Slice(typePos)); + dataSpan = data.Slice(typePos); + ref readonly PERF_OBJECT_TYPE type = ref MemoryMarshal.AsRef(dataSpan); + type.Validate(dataSpan.Length); PERF_COUNTER_DEFINITION[] counters = new PERF_COUNTER_DEFINITION[type.NumCounters]; int counterPos = typePos + type.HeaderLength; for (int j = 0; j < type.NumCounters; j++) { - ref readonly PERF_COUNTER_DEFINITION counter = ref MemoryMarshal.AsRef(data.Slice(counterPos)); + dataSpan = data.Slice(counterPos); + ref readonly PERF_COUNTER_DEFINITION counter = ref MemoryMarshal.AsRef(dataSpan); + counter.Validate(dataSpan.Length); string counterName = library.GetCounterName(counter.CounterNameTitleIndex); @@ -503,7 +510,9 @@ private static ProcessInfo[] GetProcessInfos(PerformanceCounterLib library, int int instancePos = typePos + type.DefinitionLength; for (int j = 0; j < type.NumInstances; j++) { - ref readonly PERF_INSTANCE_DEFINITION instance = ref MemoryMarshal.AsRef(data.Slice(instancePos)); + dataSpan = data.Slice(instancePos); + ref readonly PERF_INSTANCE_DEFINITION instance = ref MemoryMarshal.AsRef(dataSpan); + instance.Validate(dataSpan.Length); ReadOnlySpan instanceName = PERF_INSTANCE_DEFINITION.GetName(in instance, data.Slice(instancePos)); @@ -563,7 +572,11 @@ private static ProcessInfo[] GetProcessInfos(PerformanceCounterLib library, int instancePos += instance.ByteLength; - instancePos += MemoryMarshal.AsRef(data.Slice(instancePos)).ByteLength; + dataSpan = data.Slice(instancePos); + ref readonly PERF_COUNTER_BLOCK perfCounterBlock = ref MemoryMarshal.AsRef(dataSpan); + perfCounterBlock.Validate(dataSpan.Length); + + instancePos += perfCounterBlock.ByteLength; } typePos += type.TotalByteLength; diff --git a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs index 8bcb5495efe46..96b19871cd734 100644 --- a/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs +++ b/src/libraries/System.IO.Compression/src/System/IO/Compression/ZipBlocks.cs @@ -269,14 +269,12 @@ public static Zip64ExtraField GetAndRemoveZip64Block(List zip64Field._localHeaderOffset = null; zip64Field._startDiskNumber = null; - List markedForDelete = new List(); bool zip64FieldFound = false; - foreach (ZipGenericExtraField ef in extraFields) + extraFields.RemoveAll(ef => { if (ef.Tag == TagConstant) { - markedForDelete.Add(ef); if (!zip64FieldFound) { if (TryGetZip64BlockFromGenericExtraField(ef, readUncompressedSize, readCompressedSize, @@ -285,24 +283,18 @@ public static Zip64ExtraField GetAndRemoveZip64Block(List zip64FieldFound = true; } } + return true; } - } - foreach (ZipGenericExtraField ef in markedForDelete) - extraFields.Remove(ef); + return false; + }); return zip64Field; } public static void RemoveZip64Blocks(List extraFields) { - List markedForDelete = new List(); - foreach (ZipGenericExtraField field in extraFields) - if (field.Tag == TagConstant) - markedForDelete.Add(field); - - foreach (ZipGenericExtraField field in markedForDelete) - extraFields.Remove(field); + extraFields.RemoveAll(field => field.Tag == TagConstant); } public void WriteBlock(Stream stream) diff --git a/src/libraries/System.IO.Packaging/src/System/IO/Packaging/Package.cs b/src/libraries/System.IO.Packaging/src/System/IO/Packaging/Package.cs index f2f92345d3f7e..23aea78342647 100644 --- a/src/libraries/System.IO.Packaging/src/System/IO/Packaging/Package.cs +++ b/src/libraries/System.IO.Packaging/src/System/IO/Packaging/Package.cs @@ -380,6 +380,8 @@ public void DeletePart(Uri partUri) /// /// If this Package object has been disposed /// If the package is writeonly, no information can be retrieved from it + /// The package has a bad format. + /// The part name prefix exists. public PackagePartCollection GetParts() { ThrowIfObjectDisposed(); @@ -401,32 +403,63 @@ public PackagePartCollection GetParts() PackUriHelper.ValidatedPartUri partUri; + var uriComparer = Comparer.Default; + + //Sorting the parts array which takes O(n log n) time. + Array.Sort(parts, Comparer.Create((partA, partB) => uriComparer.Compare((PackUriHelper.ValidatedPartUri)partA.Uri, (PackUriHelper.ValidatedPartUri)partB.Uri))); + //We need this dictionary to detect any collisions that might be present in the //list of parts that was given to us from the underlying physical layer, as more than one //partnames can be mapped to the same normalized part. //Note: We cannot use the _partList member variable, as that gets updated incrementally and so its //not possible to find the collisions using that list. //PackUriHelper.ValidatedPartUri implements the IComparable interface. - Dictionary seenPartUris = new Dictionary(parts.Length); + Dictionary> partDictionary = new(parts.Length); + List partIndex = new(parts.Length); for (int i = 0; i < parts.Length; i++) { partUri = (PackUriHelper.ValidatedPartUri)parts[i].Uri; - if (seenPartUris.ContainsKey(partUri)) + string normalizedPartName = partUri.NormalizedPartUriString; + + if (partDictionary.ContainsKey(normalizedPartName)) + { throw new FileFormatException(SR.BadPackageFormat); + } else { - // Add the part to the list of URIs that we have already seen - seenPartUris.Add(partUri, parts[i]); + //since we will arrive to this line of code after the parts are already sorted + string? precedingPartName = null; + + if (partIndex.Count > 0) + { + precedingPartName = (partIndex[partIndex.Count - 1]); + } + + // Add the part to the dictionary + partDictionary.Add(normalizedPartName, new KeyValuePair(partUri, parts[i])); - if (!_partList.ContainsKey(partUri)) + if (precedingPartName != null + && normalizedPartName.StartsWith(precedingPartName, StringComparison.Ordinal) + && normalizedPartName.Length > precedingPartName.Length + && normalizedPartName[precedingPartName.Length] == PackUriHelper.ForwardSlashChar) { - // Add the part to the _partList if there is no prefix collision - AddIfNoPrefixCollisionDetected(partUri, parts[i]); + //Removing the invalid entry from the _partList. + partDictionary.Remove(normalizedPartName); + + throw new InvalidOperationException(SR.PartNamePrefixExists); } + + //adding entry to partIndex to keep track of last element being added. + //since parts are already sorted, last element in partIndex list will point to preceeding element to the current. + partIndex.Add(partUri.NormalizedPartUriString); } } + + //copying parts from partdictionary to partlist + CopyPartDictionaryToPartList(partDictionary, partIndex); + _partCollection = new PackagePartCollection(_partList); } return _partCollection; @@ -1173,6 +1206,23 @@ private PackageRelationshipCollection GetRelationshipsHelper(string? filterStrin return new PackageRelationshipCollection(_relationships, filterString); } + private void CopyPartDictionaryToPartList(Dictionary> partDictionary, List partIndex) + { + //Clearing _partList before copying in new data. Reassigning the variable, assuming the previous object to be garbage collected. + //ideally addition to sortedlist takes O(n) but since we have sorted data and also we defined the size, it will take O(log n) per addition + //total time complexity for this function will be O(n log n) + _partList = new SortedList(partDictionary.Count); + + //Since partIndex is created from a sorted parts array we are sure that partIndex + //will have items in same order + foreach (var id in partIndex) + { + //retrieving object from partDictionary hashtable + var keyValue = partDictionary[id]; + _partList.Add(keyValue.Key, keyValue.Value); + } + } + #endregion Private Methods #region Private Members diff --git a/src/libraries/System.IO.Packaging/src/System/IO/Packaging/ZipPackage.cs b/src/libraries/System.IO.Packaging/src/System/IO/Packaging/ZipPackage.cs index 677364d0d55b9..f3e1a416e360b 100644 --- a/src/libraries/System.IO.Packaging/src/System/IO/Packaging/ZipPackage.cs +++ b/src/libraries/System.IO.Packaging/src/System/IO/Packaging/ZipPackage.cs @@ -739,7 +739,7 @@ bool IEqualityComparer.Equals(string? extensionA, string? extensionB) //with the rules for comparing/normalizing partnames. //Refer to PackUriHelper.ValidatedPartUri.GetNormalizedPartUri method. //Currently normalization just involves upper-casing ASCII and hence the simplification. - return extensionA.Equals(extensionB, StringComparison.InvariantCultureIgnoreCase); + return extensionA.Equals(extensionB, StringComparison.OrdinalIgnoreCase); } int IEqualityComparer.GetHashCode(string extension) diff --git a/src/libraries/System.Private.CoreLib/src/System/Marvin.cs b/src/libraries/System.Private.CoreLib/src/System/Marvin.cs index 098ebb5260dc9..31c1b696a6801 100644 --- a/src/libraries/System.Private.CoreLib/src/System/Marvin.cs +++ b/src/libraries/System.Private.CoreLib/src/System/Marvin.cs @@ -6,6 +6,12 @@ using System.Runtime.CompilerServices; using System.Runtime.InteropServices; +#if SYSTEM_PRIVATE_CORELIB +using static System.Numerics.BitOperations; +#else +using System.Security.Cryptography; +#endif + namespace System { internal static partial class Marvin @@ -204,7 +210,7 @@ public static int ComputeHash32(ref byte data, uint count, uint p0, uint p1) else { partialResult |= (uint)Unsafe.ReadUnaligned(ref data); - partialResult = BitOperations.RotateLeft(partialResult, 16); + partialResult = RotateLeft(partialResult, 16); } } @@ -221,16 +227,16 @@ private static void Block(ref uint rp0, ref uint rp1) uint p1 = rp1; p1 ^= p0; - p0 = BitOperations.RotateLeft(p0, 20); + p0 = RotateLeft(p0, 20); p0 += p1; - p1 = BitOperations.RotateLeft(p1, 9); + p1 = RotateLeft(p1, 9); p1 ^= p0; - p0 = BitOperations.RotateLeft(p0, 27); + p0 = RotateLeft(p0, 27); p0 += p1; - p1 = BitOperations.RotateLeft(p1, 19); + p1 = RotateLeft(p1, 19); rp0 = p0; rp1 = p1; @@ -241,8 +247,29 @@ private static void Block(ref uint rp0, ref uint rp1) private static unsafe ulong GenerateSeed() { ulong seed; +#if SYSTEM_PRIVATE_CORELIB Interop.GetRandomBytes((byte*)&seed, sizeof(ulong)); +#else + byte[] seedBytes = new byte[sizeof(ulong)]; + using (RandomNumberGenerator rng = RandomNumberGenerator.Create()) + { + rng.GetBytes(seedBytes); + fixed (byte* b = seedBytes) + { + seed = *(ulong*)b; + } + } +#endif return seed; } + +#if !SYSTEM_PRIVATE_CORELIB + [MethodImpl(MethodImplOptions.AggressiveInlining)] + private static uint RotateLeft(uint value, int shift) + { + // This is expected to be optimized into a single rol (or ror with negated shift value) instruction + return (value << shift) | (value >> (32 - shift)); + } +#endif } } diff --git a/src/libraries/System.Private.CoreLib/src/System/Random.cs b/src/libraries/System.Private.CoreLib/src/System/Random.cs index 190fa3583caac..39438be55c8f0 100644 --- a/src/libraries/System.Private.CoreLib/src/System/Random.cs +++ b/src/libraries/System.Private.CoreLib/src/System/Random.cs @@ -199,11 +199,16 @@ public void GetItems(ReadOnlySpan choices, Span destination) // The most expensive part of this operation is the call to get random data. We can // do so potentially many fewer times if: + // - the instance was constructed as `new Random()` or is `Random.Shared`, such that it's not seeded nor is it + // a custom derived type. We don't want to observably change the deterministically-produced sequence from previous releases. // - the number of choices is <= 256. This let's us get a single byte per choice. // - the number of choices is a power of two. This let's us use a byte and simply mask off // unnecessary bits cheaply rather than needing to use rejection sampling. // In such a case, we can grab a bunch of random bytes in one call. - if (BitOperations.IsPow2(choices.Length) && choices.Length <= 256) + ImplBase impl = _impl; + if ((impl is null || impl.GetType() == typeof(XoshiroImpl)) && + BitOperations.IsPow2(choices.Length) && + choices.Length <= 256) { Span randomBytes = stackalloc byte[512]; // arbitrary size, a balance between stack consumed and number of random calls required while (!destination.IsEmpty) diff --git a/src/libraries/System.Security.Cryptography.Cose/src/System.Security.Cryptography.Cose.csproj b/src/libraries/System.Security.Cryptography.Cose/src/System.Security.Cryptography.Cose.csproj index d2322648c409e..d6b2d4433eb44 100644 --- a/src/libraries/System.Security.Cryptography.Cose/src/System.Security.Cryptography.Cose.csproj +++ b/src/libraries/System.Security.Cryptography.Cose/src/System.Security.Cryptography.Cose.csproj @@ -36,13 +36,12 @@ - - + + - - - + + diff --git a/src/mono/browser/runtime/loader/icu.ts b/src/mono/browser/runtime/loader/icu.ts index d5ad0dd3b8bbd..20b6a4578f0a3 100644 --- a/src/mono/browser/runtime/loader/icu.ts +++ b/src/mono/browser/runtime/loader/icu.ts @@ -66,8 +66,9 @@ export function getIcuResourceName (config: MonoConfig): string | null { let icuFile = null; if (config.globalizationMode === GlobalizationMode.Custom) { - if (icuFiles.length === 1) { - icuFile = icuFiles[0]; + // custom ICU file is saved in the resources with fingerprinting and does not require mapping + if (icuFiles.length >= 1) { + return icuFiles[0]; } } else if (config.globalizationMode === GlobalizationMode.Hybrid) { icuFile = "icudt_hybrid.dat";